From 0c57b45f0c07a069e2ea8ef4406aec8522f2085d Mon Sep 17 00:00:00 2001 From: BrandonRomano Date: Wed, 14 Jun 2023 15:40:44 -0700 Subject: [PATCH 01/12] Upgrade plugin for integrations library --- .github/workflows/ensure-docs-compiled.yaml | 22 ++ ...notify-integration-release-via-manual.yaml | 46 +++ .../notify-integration-release-via-tag.yaml | 40 +++ .web-docs/README.md | 60 ++++ .../components/builder/digitalocean/README.md | 310 ++++++++++++++++++ .../data-source/digitalocen-image/README.md | 78 +++++ .../digitalocean-import/README.md | 136 ++++++++ .web-docs/metadata.hcl | 22 ++ .web-docs/scripts/compile-to-webdocs.sh | 129 ++++++++ GNUmakefile | 8 +- docs/builders/digitalocean.mdx | 10 +- docs/datasources/digitalocen-image.mdx | 10 +- docs/post-processors/digitalocean-import.mdx | 8 +- 13 files changed, 860 insertions(+), 19 deletions(-) create mode 100644 .github/workflows/ensure-docs-compiled.yaml create mode 100644 .github/workflows/notify-integration-release-via-manual.yaml create mode 100644 .github/workflows/notify-integration-release-via-tag.yaml create mode 100644 .web-docs/README.md create mode 100644 .web-docs/components/builder/digitalocean/README.md create mode 100644 .web-docs/components/data-source/digitalocen-image/README.md create mode 100644 .web-docs/components/post-processor/digitalocean-import/README.md create mode 100644 .web-docs/metadata.hcl create mode 100755 .web-docs/scripts/compile-to-webdocs.sh diff --git a/.github/workflows/ensure-docs-compiled.yaml b/.github/workflows/ensure-docs-compiled.yaml new file mode 100644 index 0000000..8d10ac7 --- /dev/null +++ b/.github/workflows/ensure-docs-compiled.yaml @@ -0,0 +1,22 @@ +name: Ensure Docs are Compiled +on: + push: +jobs: + ensure-docs-compiled: + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎 + uses: actions/checkout@v2 + - uses: actions/setup-go@v4 + - shell: bash + run: make build-docs + - shell: bash + run: | + if [[ -z "$(git status -s)" ]]; then + echo "OK" + else + echo "Docs have been updated, but the compiled docs have not been committed." + echo "Run 'make build-docs', and commit the result to resolve this error." + exit 1 + fi + diff --git a/.github/workflows/notify-integration-release-via-manual.yaml b/.github/workflows/notify-integration-release-via-manual.yaml new file mode 100644 index 0000000..7126bfc --- /dev/null +++ b/.github/workflows/notify-integration-release-via-manual.yaml @@ -0,0 +1,46 @@ +name: Notify Integration Release (Manual) +on: + workflow_dispatch: + inputs: + version: + description: "The release version (semver)" + default: 0.0.1 + required: false + branch: + description: "A branch or SHA" + default: 'main' + required: false +jobs: + notify-release: + runs-on: ubuntu-latest + steps: + - name: Checkout this repo + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + with: + ref: ${{ github.event.inputs.branch }} + # Ensure that Docs are Compiled + - uses: actions/setup-go@v4 + - shell: bash + run: make build-docs + - shell: bash + run: | + if [[ -z "$(git status -s)" ]]; then + echo "OK" + else + echo "Docs have been updated, but the compiled docs have not been committed." + echo "Run 'make build-docs', and commit the result to resolve this error." + exit 1 + fi + # Perform the Release + - name: Checkout integration-release-action + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + with: + repository: hashicorp/integration-release-action + path: ./integration-release-action + - name: Notify Release + uses: ./integration-release-action + with: + integration_identifier: 'packer/BrandonRomano/digitalocean' + release_version: ${{ github.event.inputs.version }} + release_sha: ${{ github.event.inputs.branch }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/notify-integration-release-via-tag.yaml b/.github/workflows/notify-integration-release-via-tag.yaml new file mode 100644 index 0000000..4ac5df4 --- /dev/null +++ b/.github/workflows/notify-integration-release-via-tag.yaml @@ -0,0 +1,40 @@ +name: Notify Integration Release (Tag) +on: + push: + tags: + - '*.*.*' # Proper releases + - '*.*.*-*' # Pre releases +jobs: + notify-release: + runs-on: ubuntu-latest + steps: + - name: Checkout this repo + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + with: + ref: ${{ github.ref }} + # Ensure that Docs are Compiled + - uses: actions/setup-go@v4 + - shell: bash + run: make build-docs + - shell: bash + run: | + if [[ -z "$(git status -s)" ]]; then + echo "OK" + else + echo "Docs have been updated, but the compiled docs have not been committed." + echo "Run 'make build-docs', and commit the result to resolve this error." + exit 1 + fi + # Perform the Release + - name: Checkout integration-release-action + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + with: + repository: hashicorp/integration-release-action + path: ./integration-release-action + - name: Notify Release + uses: ./integration-release-action + with: + integration_identifier: 'packer/BrandonRomano/digitalocean' + release_version: ${{ github.ref_name }} + release_sha: ${{ github.ref }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.web-docs/README.md b/.web-docs/README.md new file mode 100644 index 0000000..e1a7290 --- /dev/null +++ b/.web-docs/README.md @@ -0,0 +1,60 @@ +# DigitalOcean Plugins + +The [DigitalOcean](https://www.digitalocean.com/) Packer plugin provides a builder for building images in +DigitalOcean, and a post-processor for importing already-existing images into +DigitalOcean. + +## Installation + +### Using pre-built releases + +#### Using the `packer init` command + +Starting from version 1.7, Packer supports a new `packer init` command allowing +automatic installation of Packer plugins. Read the +[Packer documentation](https://www.packer.io/docs/commands/init) for more information. + +To install this plugin, copy and paste this code into your Packer configuration . +Then, run [`packer init`](https://www.packer.io/docs/commands/init). + +```hcl +packer { + required_plugins { + digitalocean = { + version = ">= 1.0.4" + source = "github.com/digitalocean/digitalocean" + } + } +} +``` + +#### Manual installation + +You can find pre-built binary releases of the plugin [here](https://github.com/digitalocean/packer-plugin-digitalocean/releases). +Once you have downloaded the latest archive corresponding to your target OS, +uncompress it to retrieve the plugin binary file corresponding to your platform. +To install the plugin, please follow the Packer documentation on +[installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). + + +#### From Source + +If you prefer to build the plugin from its source code, clone the GitHub +repository locally and run the command `go build` from the root +directory. Upon successful compilation, a `packer-plugin-digitalocean` plugin +binary file can be found in the root directory. +To install the compiled plugin, please follow the official Packer documentation +on [installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). + + +## Plugin Contents + +The DigitalOcean plugin is intended as a starting point for creating Packer plugins, containing: + +### Builders + +- [builder](/docs/builders/digitalocean.mdx) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. + +### Post-processors + +- [post-processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean diff --git a/.web-docs/components/builder/digitalocean/README.md b/.web-docs/components/builder/digitalocean/README.md new file mode 100644 index 0000000..8d2d186 --- /dev/null +++ b/.web-docs/components/builder/digitalocean/README.md @@ -0,0 +1,310 @@ +Type: `digitalocean` +Artifact BuilderId: `pearkes.digitalocean` + +The `digitalocean` Packer builder is able to create new images for use with +[DigitalOcean](https://www.digitalocean.com). The builder takes a source image, +runs any provisioning necessary on the image after launching it, then snapshots +it into a reusable image. This reusable image can then be used as the +foundation of new servers that are launched within DigitalOcean. + +The builder does _not_ manage images. Once it creates an image, it is up to you +to use it or delete it. + +## Installation + +To install this plugin using Packer v1.7.0 or later, copy and paste this code +into your Packer configuration. + +Then, run [`packer init`](https://www.packer.io/docs/commands/init). + + +```hcl +packer { + required_plugins { + digitalocean = { + version = ">= 1.0.4" + source = "github.com/digitalocean/digitalocean" + } + } +} +``` + +## Configuration Reference + +There are many configuration options available for the builder. They are +segmented below into two categories: required and optional parameters. Within +each category, the available configuration keys are alphabetized. + +### Required: + + + +- `api_token` (string) - The client TOKEN to use to access your account. It + can also be specified via environment variable DIGITALOCEAN_TOKEN, DIGITALOCEAN_ACCESS_TOKEN, or DIGITALOCEAN_API_TOKEN if + set. DIGITALOCEAN_API_TOKEN will be deprecated in a future release in favor of DIGITALOCEAN_TOKEN or DIGITALOCEAN_ACCESS_TOKEN. + +- `region` (string) - The name (or slug) of the region to launch the droplet + in. Consequently, this is the region where the snapshot will be available. + See + https://docs.digitalocean.com/reference/api/api-reference/#operation/list_all_regions + for the accepted region names/slugs. + +- `size` (string) - The name (or slug) of the droplet size to use. See + https://docs.digitalocean.com/reference/api/api-reference/#operation/list_all_sizes + for the accepted size names/slugs. + +- `image` (string) - The name (or slug) of the base image to use. This is the + image that will be used to launch a new droplet and provision it. See + https://docs.digitalocean.com/reference/api/api-reference/#operation/get_images_list + for details on how to get a list of the accepted image names/slugs. + + + + +### Optional: + + + +- `api_url` (string) - Non standard api endpoint URL. Set this if you are + using a DigitalOcean API compatible service. It can also be specified via + environment variable DIGITALOCEAN_API_URL. + +- `private_networking` (bool) - Set to true to enable private networking + for the droplet being created. This defaults to false, or not enabled. + +- `monitoring` (bool) - Set to true to enable monitoring for the droplet + being created. This defaults to false, or not enabled. + +- `droplet_agent` (\*bool) - A boolean indicating whether to install the DigitalOcean agent used for + providing access to the Droplet web console in the control panel. By + default, the agent is installed on new Droplets but installation errors + (i.e. OS not supported) are ignored. To prevent it from being installed, + set to false. To make installation errors fatal, explicitly set it to true. + +- `ipv6` (bool) - Set to true to enable ipv6 for the droplet being + created. This defaults to false, or not enabled. + +- `snapshot_name` (string) - The name of the resulting snapshot that will + appear in your account. Defaults to `packer-{{timestamp}}` (see + configuration templates for more info). + +- `snapshot_regions` ([]string) - The regions of the resulting + snapshot that will appear in your account. + +- `state_timeout` (duration string | ex: "1h5m2s") - The time to wait, as a duration string, for a + droplet to enter a desired state (such as "active") before timing out. The + default state timeout is "6m". + +- `snapshot_timeout` (duration string | ex: "1h5m2s") - How long to wait for an image to be published to the shared image + gallery before timing out. If your Packer build is failing on the + Publishing to Shared Image Gallery step with the error `Original Error: + context deadline exceeded`, but the image is present when you check your + Azure dashboard, then you probably need to increase this timeout from + its default of "60m" (valid time units include `s` for seconds, `m` for + minutes, and `h` for hours.) + +- `droplet_name` (string) - The name assigned to the droplet. DigitalOcean + sets the hostname of the machine to this value. + +- `user_data` (string) - User data to launch with the Droplet. Packer will + not automatically wait for a user script to finish before shutting down the + instance this must be handled in a provisioner. + +- `user_data_file` (string) - Path to a file that will be used for the user + data when launching the Droplet. + +- `tags` ([]string) - Tags to apply to the droplet when it is created + +- `vpc_uuid` (string) - UUID of the VPC which the droplet will be created in. Before using this, + private_networking should be enabled. + +- `connect_with_private_ip` (bool) - Wheter the communicators should use private IP or not (public IP in that case). + If the droplet is or going to be accessible only from the local network because + it is at behind a firewall, then communicators should use the private IP + instead of the public IP. Before using this, private_networking should be enabled. + +- `ssh_key_id` (int) - The ID of an existing SSH key on the DigitalOcean account. This should be + used in conjunction with `ssh_private_key_file`. + + + + +## Basic Example + +Here is a basic example. It is completely valid as soon as you enter your own +access tokens: + +**HCL2** + +```hcl +source "digitalocean" "example" { + api_token = "YOUR API KEY" + image = "ubuntu-16-04-x64" + region = "nyc3" + size = "512mb" + ssh_username = "root" +} + +build { + sources = ["source.digitalocean.example"] +} +``` + +**JSON** + +```json +{ + "type": "digitalocean", + "api_token": "YOUR API KEY", + "image": "ubuntu-16-04-x64", + "region": "nyc3", + "size": "512mb", + "ssh_username": "root" +} +``` + + +### Communicator Config + +In addition to the builder options, a +[communicator](/docs/templates/legacy_json_templates/communicator) can be configured for this builder. + + + +- `communicator` (string) - Packer currently supports three kinds of communicators: + + - `none` - No communicator will be used. If this is set, most + provisioners also can't be used. + + - `ssh` - An SSH connection will be established to the machine. This + is usually the default. + + - `winrm` - A WinRM connection will be established. + + In addition to the above, some builders have custom communicators they + can use. For example, the Docker builder has a "docker" communicator + that uses `docker exec` and `docker cp` to execute scripts and copy + files. + +- `pause_before_connecting` (duration string | ex: "1h5m2s") - We recommend that you enable SSH or WinRM as the very last step in your + guest's bootstrap script, but sometimes you may have a race condition + where you need Packer to wait before attempting to connect to your + guest. + + If you end up in this situation, you can use the template option + `pause_before_connecting`. By default, there is no pause. For example if + you set `pause_before_connecting` to `10m` Packer will check whether it + can connect, as normal. But once a connection attempt is successful, it + will disconnect and then wait 10 minutes before connecting to the guest + and beginning provisioning. + + + + + + +- `ssh_host` (string) - The address to SSH to. This usually is automatically configured by the + builder. + +- `ssh_port` (int) - The port to connect to SSH. This defaults to `22`. + +- `ssh_username` (string) - The username to connect to SSH with. Required if using SSH. + +- `ssh_password` (string) - A plaintext password to use to authenticate with SSH. + +- `ssh_ciphers` ([]string) - This overrides the value of ciphers supported by default by Golang. + The default value is [ + "aes128-gcm@openssh.com", + "chacha20-poly1305@openssh.com", + "aes128-ctr", "aes192-ctr", "aes256-ctr", + ] + + Valid options for ciphers include: + "aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", + "chacha20-poly1305@openssh.com", + "arcfour256", "arcfour128", "arcfour", "aes128-cbc", "3des-cbc", + +- `ssh_clear_authorized_keys` (bool) - If true, Packer will attempt to remove its temporary key from + `~/.ssh/authorized_keys` and `/root/.ssh/authorized_keys`. This is a + mostly cosmetic option, since Packer will delete the temporary private + key from the host system regardless of whether this is set to true + (unless the user has set the `-debug` flag). Defaults to "false"; + currently only works on guests with `sed` installed. + +- `ssh_key_exchange_algorithms` ([]string) - If set, Packer will override the value of key exchange (kex) algorithms + supported by default by Golang. Acceptable values include: + "curve25519-sha256@libssh.org", "ecdh-sha2-nistp256", + "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", + "diffie-hellman-group14-sha1", and "diffie-hellman-group1-sha1". + +- `ssh_certificate_file` (string) - Path to user certificate used to authenticate with SSH. + The `~` can be used in path and will be expanded to the + home directory of current user. + +- `ssh_pty` (bool) - If `true`, a PTY will be requested for the SSH connection. This defaults + to `false`. + +- `ssh_timeout` (duration string | ex: "1h5m2s") - The time to wait for SSH to become available. Packer uses this to + determine when the machine has booted so this is usually quite long. + Example value: `10m`. + This defaults to `5m`, unless `ssh_handshake_attempts` is set. + +- `ssh_disable_agent_forwarding` (bool) - If true, SSH agent forwarding will be disabled. Defaults to `false`. + +- `ssh_handshake_attempts` (int) - The number of handshakes to attempt with SSH once it can connect. + This defaults to `10`, unless a `ssh_timeout` is set. + +- `ssh_bastion_host` (string) - A bastion host to use for the actual SSH connection. + +- `ssh_bastion_port` (int) - The port of the bastion host. Defaults to `22`. + +- `ssh_bastion_agent_auth` (bool) - If `true`, the local SSH agent will be used to authenticate with the + bastion host. Defaults to `false`. + +- `ssh_bastion_username` (string) - The username to connect to the bastion host. + +- `ssh_bastion_password` (string) - The password to use to authenticate with the bastion host. + +- `ssh_bastion_interactive` (bool) - If `true`, the keyboard-interactive used to authenticate with bastion host. + +- `ssh_bastion_private_key_file` (string) - Path to a PEM encoded private key file to use to authenticate with the + bastion host. The `~` can be used in path and will be expanded to the + home directory of current user. + +- `ssh_bastion_certificate_file` (string) - Path to user certificate used to authenticate with bastion host. + The `~` can be used in path and will be expanded to the + home directory of current user. + +- `ssh_file_transfer_method` (string) - `scp` or `sftp` - How to transfer files, Secure copy (default) or SSH + File Transfer Protocol. + + **NOTE**: Guests using Windows with Win32-OpenSSH v9.1.0.0p1-Beta, scp + (the default protocol for copying data) returns a a non-zero error code since the MOTW + cannot be set, which cause any file transfer to fail. As a workaround you can override the transfer protocol + with SFTP instead `ssh_file_transfer_protocol = "sftp"`. + +- `ssh_proxy_host` (string) - A SOCKS proxy host to use for SSH connection + +- `ssh_proxy_port` (int) - A port of the SOCKS proxy. Defaults to `1080`. + +- `ssh_proxy_username` (string) - The optional username to authenticate with the proxy server. + +- `ssh_proxy_password` (string) - The optional password to use to authenticate with the proxy server. + +- `ssh_keep_alive_interval` (duration string | ex: "1h5m2s") - How often to send "keep alive" messages to the server. Set to a negative + value (`-1s`) to disable. Example value: `10s`. Defaults to `5s`. + +- `ssh_read_write_timeout` (duration string | ex: "1h5m2s") - The amount of time to wait for a remote command to end. This might be + useful if, for example, packer hangs on a connection after a reboot. + Example: `5m`. Disabled by default. + +- `ssh_remote_tunnels` ([]string) - + +- `ssh_local_tunnels` ([]string) - + + + + +- `ssh_private_key_file` (string) - Path to a PEM encoded private key file to use to authenticate with SSH. + The `~` can be used in path and will be expanded to the home directory + of current user. diff --git a/.web-docs/components/data-source/digitalocen-image/README.md b/.web-docs/components/data-source/digitalocen-image/README.md new file mode 100644 index 0000000..ca65e57 --- /dev/null +++ b/.web-docs/components/data-source/digitalocen-image/README.md @@ -0,0 +1,78 @@ +Type: `digitalocean-image` + +The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image +for use as a builder source. + +## Required: + + + +- `api_token` (string) - The API token to used to access your account. It can also be specified via + the DIGITALOCEAN_TOKEN or DIGITALOCEAN_ACCESS_TOKEN environment variables. + + + + +## Optional: + + + +- `api_url` (string) - A non-standard API endpoint URL. Set this if you are using a DigitalOcean API + compatible service. It can also be specified via environment variable DIGITALOCEAN_API_URL. + +- `name` (string) - The name of the image to return. Only one of `name` or `name_regex` may be provided. + +- `name_regex` (string) - A regex matching the name of the image to return. Only one of `name` or `name_regex` may be provided. + +- `type` (string) - Filter the images searched by type. This may be one of `application`, `distribution`, or `user`. + By default, all image types are searched. + +- `region` (string) - A DigitalOcean region slug (e.g. `nyc3`). When provided, only images available in that region + will be returned. + +- `latest` (bool) - A boolean value determining how to handle multiple matching images. By default, multiple matching images + results in an error. When set to `true`, the most recently created image is returned instead. + + + + +## Output: + + + +- `image_id` (int) - The ID of the found image. + +- `image_regions` ([]string) - The regions the found image is availble in. + + + + +## Example Usage + +```hcl +data "digitalocean-image" "example" { + name_regex = "golden-image-2022.*" + region = "nyc3" + type = "user" + latest = true +} + +locals { + image_id = data.digitalocean-image.example.image_id +} + +source "digitalocean" "example" { + snapshot_name = "updated-golden-image" + image = local.image_id + region = "nyc3" + size = "s-1vcpu-1gb" + ssh_username = "root" +} + +build { + sources = ["source.digitalocean.example"] + provisioner "shell" { + inline = ["touch /root/provisioned-by-packer"] + } +} +``` diff --git a/.web-docs/components/post-processor/digitalocean-import/README.md b/.web-docs/components/post-processor/digitalocean-import/README.md new file mode 100644 index 0000000..184dc44 --- /dev/null +++ b/.web-docs/components/post-processor/digitalocean-import/README.md @@ -0,0 +1,136 @@ +Type: `digitalocean-import` +Artifact BuilderId: `packer.post-processor.digitalocean-import` + +The Packer DigitalOcean Import post-processor is used to import images created by other Packer builders to DigitalOcean. + +~> Note: Users looking to create custom images, and reusable snapshots, directly on DigitalOcean can use +the [DigitalOcean builder](/docs/builder/digitalocean) without this post-processor. + +## How Does it Work? + +The import process operates uploading a temporary copy of the image to +DigitalOcean Spaces and then importing it as a custom image via the +DigialOcean API. The temporary copy in Spaces can be discarded after the +import is complete. + +For information about the requirements to use an image for a DigitalOcean +Droplet, see DigitalOcean's [Custom Images documentation](https://www.digitalocean.com/docs/images/custom-images). + +### Installation + +To install this plugin using Packer v1.7.0 or later, copy and paste this code +into your Packer configuration. + +Then, run [`packer init`](https://www.packer.io/docs/commands/init). + + +```hcl +packer { + required_plugins { + digitalocean = { + version = ">= 1.0.4" + source = "github.com/digitalocean/digitalocean" + } + } +} +``` + +## Configuration + +There are some configuration options available for the post-processor. + +Required: + +- `api_token` (string) - A personal access token used to communicate with + the DigitalOcean v2 API. This may also be set using the + `DIGITALOCEAN_TOKEN` or `DIGITALOCEAN_ACCESS_TOKEN` environmental variables. + `DIGITALOCEAN_API_TOKEN` is acceptable but will be deprecated in a future release. + +- `spaces_key` (string) - The access key used to communicate with Spaces. + This may also be set using the `DIGITALOCEAN_SPACES_ACCESS_KEY` + environmental variable. + +- `spaces_secret` (string) - The secret key used to communicate with Spaces. + This may also be set using the `DIGITALOCEAN_SPACES_SECRET_KEY` + environmental variable. + +- `spaces_region` (string) - The name of the region, such as `nyc3`, in which + to upload the image to Spaces. + +- `space_name` (string) - The name of the specific Space where the image file + will be copied to for import. This Space must exist when the + post-processor is run. + +- `image_name` (string) - The name to be used for the resulting DigitalOcean + custom image. + +- `image_regions` (array of string) - A list of DigitalOcean regions, such + as `nyc3`, where the resulting image will be available for use in creating + Droplets. + +Optional: + +- `image_description` (string) - The description to set for the resulting + imported image. + +- `image_distribution` (string) - The name of the distribution to set for + the resulting imported image. + +- `image_tags` (array of strings) - A list of tags to apply to the resulting + imported image. + +- `keep_input_artifact` (boolean) - if true, do not delete the source virtual + machine image after importing it to the cloud. Defaults to false. + +- `skip_clean` (boolean) - Whether we should skip removing the image file + uploaded to Spaces after the import process has completed. "true" means + that we should leave it in the Space, "false" means to clean it out. + Defaults to `false`. + +- `space_object_name` (string) - The name of the key used in the Space where + the image file will be copied to for import. This is treated as a + [template engine](/docs/templates/legacy_json_templates/engine). Therefore, you + may use user variables and template functions in this field. + If not specified, this will default to `packer-import-{{timestamp}}`. + +- `timeout` (number) - The length of time in minutes to wait for individual + steps in the process to successfully complete. This includes both importing + the image from Spaces as well as distributing the resulting image to + additional regions. If not specified, this will default to 20. + +## Basic Example + +Here is a basic example: + +**JSON** + +```json +{ + "type": "digitalocean-import", + "api_token": "{{user `token`}}", + "spaces_key": "{{user `key`}}", + "spaces_secret": "{{user `secret`}}", + "spaces_region": "nyc3", + "space_name": "import-bucket", + "image_name": "ubuntu-18.10-minimal-amd64", + "image_description": "Packer import {{timestamp}}", + "image_regions": ["nyc3", "nyc2"], + "image_tags": ["custom", "packer"] +} +``` + +**HCL2** + +```hcl +post-processor "digitalocean-import" { + api_token = "{{user `token`}}" + spaces_key = "{{user `key`}}" + spaces_secret = "{{user `secret`}}" + spaces_region = "nyc3" + space_name = "import-bucket" + image_name = "ubuntu-18.10-minimal-amd64" + image_description = "Packer import {{timestamp}}" + image_regions = ["nyc3", "nyc2"] + image_tags = ["custom", "packer"] +} +``` diff --git a/.web-docs/metadata.hcl b/.web-docs/metadata.hcl new file mode 100644 index 0000000..b38eedd --- /dev/null +++ b/.web-docs/metadata.hcl @@ -0,0 +1,22 @@ +# For full specification on the configuration of this file visit: +# https://github.com/hashicorp/integration-template#metadata-configuration +integration { + name = "TODO" + description = "TODO" + identifier = "packer/BrandonRomano/digitalocean" + component { + type = "data-source" + name = "DigitalOcean Image" + slug = "digitalocen-image" + } + component { + type = "builder" + name = "DigitalOcean" + slug = "digitalocean" + } + component { + type = "post-processor" + name = "DigitalOcean Import" + slug = "digitalocean-import" + } +} diff --git a/.web-docs/scripts/compile-to-webdocs.sh b/.web-docs/scripts/compile-to-webdocs.sh new file mode 100755 index 0000000..51a7238 --- /dev/null +++ b/.web-docs/scripts/compile-to-webdocs.sh @@ -0,0 +1,129 @@ +#!/usr/bin/env bash + +# Converts the folder name that the component documentation file +# is stored in into the integration slug of the component. +componentTypeFromFolderName() { + if [[ "$1" = "builders" ]]; then + echo "builder" + elif [[ "$1" = "provisioners" ]]; then + echo "provisioner" + elif [[ "$1" = "post-processors" ]]; then + echo "post-processor" + elif [[ "$1" = "datasources" ]]; then + echo "data-source" + else + echo "" + fi +} + +# $1: The content to adjust links +# $2: The organization of the integration +rewriteLinks() { + local result="$1" + local organization="$2" + + urlSegment="([^/]+)" + urlAnchor="(#[^/]+)" + + # Rewrite Component Index Page links to the Integration root page. + # + # (\1) (\2) (\3) + # /packer/plugins/datasources/amazon#anchor-tag--> + # /packer/integrations/hashicorp/amazon#anchor-tag + local find="\(\/packer\/plugins\/$urlSegment\/$urlSegment$urlAnchor?\)" + local replace="\(\/packer\/integrations\/$organization\/\2\3\)" + result="$(echo "$result" | sed -E "s/$find/$replace/g")" + + + # Rewrite Component links to the Integration component page + # + # (\1) (\2) (\3) (\4) + # /packer/plugins/datasources/amazon/parameterstore#anchor-tag --> + # /packer/integrations/{organization}/amazon/latest/components/datasources/parameterstore + local find="\(\/packer\/plugins\/$urlSegment\/$urlSegment\/$urlSegment$urlAnchor?\)" + local replace="\(\/packer\/integrations\/$organization\/\2\/latest\/components\/\1\/\3\4\)" + result="$(echo "$result" | sed -E "s/$find/$replace/g")" + + # Rewrite the Component URL segment from the Packer Plugin format + # to the Integrations format + result="$(echo "$result" \ + | sed "s/\/datasources\//\/data-source\//g" \ + | sed "s/\/builders\//\/builder\//g" \ + | sed "s/\/post-processors\//\/post-processor\//g" \ + | sed "s/\/provisioners\//\/provisioner\//g" \ + )" + + echo "$result" +} + +# $1: Docs Dir +# $2: Web Docs Dir +# $3: Component File +# $4: The org of the integration +processComponentFile() { + local docsDir="$1" + local webDocsDir="$2" + local componentFile="$3" + + local escapedDocsDir="$(echo "$docsDir" | sed 's/\//\\\//g' | sed 's/\./\\\./g')" + local componentTypeAndSlug="$(echo "$componentFile" | sed "s/$escapedDocsDir\///g" | sed 's/\.mdx//g')" + + # Parse out the Component Slug & Component Type + local componentSlug="$(echo "$componentTypeAndSlug" | cut -d'/' -f 2)" + local componentType="$(componentTypeFromFolderName "$(echo "$componentTypeAndSlug" | cut -d'/' -f 1)")" + if [[ "$componentType" = "" ]]; then + echo "Failed to process '$componentFile', unexpected folder name." + echo "Documentation for components must be stored in one of:" + echo "builders, provisioners, post-processors, datasources" + exit 1 + fi + + + # Calculate the location of where this file will ultimately go + local webDocsFolder="$webDocsDir/components/$componentType/$componentSlug" + mkdir -p "$webDocsFolder" + local webDocsFile="$webDocsFolder/README.md" + local webDocsFileTmp="$webDocsFolder/README.md.tmp" + + # Copy over the file to its webDocsFile location + cp "$componentFile" "$webDocsFile" + + # Remove the Header + local lastMetadataLine="$(grep -n -m 2 '^\-\-\-' "$componentFile" | tail -n1 | cut -d':' -f1)" + cat "$webDocsFile" | tail -n +"$(($lastMetadataLine+2))" > "$webDocsFileTmp" + mv "$webDocsFileTmp" "$webDocsFile" + + # Remove the top H1, as this will be added automatically on the web + cat "$webDocsFile" | tail -n +3 > "$webDocsFileTmp" + mv "$webDocsFileTmp" "$webDocsFile" + + # Rewrite Links + rewriteLinks "$(cat "$webDocsFile")" "$4" > "$webDocsFileTmp" + mv "$webDocsFileTmp" "$webDocsFile" +} + +# Compiles the Packer SDC compiled docs folder down +# to a integrations-compliant folder (web docs) +# +# $1: The directory of the plugin +# $2: The directory of the SDC compiled docs files +# $3: The output directory to place the web-docs files +# $4: The org of the integration +compileWebDocs() { + local docsDir="$1/$2" + local webDocsDir="$1/$3" + + echo "Compiling MDX docs in '$2' to Markdown in '$3'..." + # Create the web-docs directory if it hasn't already been created + mkdir -p "$webDocsDir" + + # Copy the README over + cp "$docsDir/README.md" "$webDocsDir/README.md" + + # Process all MDX component files (exclude index files, which are unsupported) + for file in $(find "$docsDir" | grep "$docsDir/.*/.*\.mdx" | grep --invert-match "index.mdx"); do + processComponentFile "$docsDir" "$webDocsDir" "$file" "$4" + done +} + +compileWebDocs "$1" "$2" "$3" "$4" diff --git a/GNUmakefile b/GNUmakefile index b25bfdc..848c46b 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -56,4 +56,10 @@ check-fmt: fmt echo "Found files that are not fmt'ed."; \ echo "You can use the command: \`go fmt ./...\` to reformat code."; \ exit 1; \ - fi \ No newline at end of file + fi + +build-docs: install-packer-sdc + @if [ -d ".docs" ]; then rm -r ".docs"; fi + @packer-sdc renderdocs -src "docs" -partials docs-partials/ -dst ".docs/" + @./.web-docs/scripts/compile-to-webdocs.sh "." ".docs" ".web-docs" "BrandonRomano" + @rm -r ".docs" diff --git a/docs/builders/digitalocean.mdx b/docs/builders/digitalocean.mdx index f321507..20e08e2 100644 --- a/docs/builders/digitalocean.mdx +++ b/docs/builders/digitalocean.mdx @@ -27,7 +27,7 @@ foundation of new servers that are launched within DigitalOcean. The builder does _not_ manage images. Once it creates an image, it is up to you to use it or delete it. -### Installation +## Installation To install this plugin using Packer v1.7.0 or later, copy and paste this code into your Packer configuration. @@ -65,8 +65,7 @@ each category, the available configuration keys are alphabetized. Here is a basic example. It is completely valid as soon as you enter your own access tokens: - - +**HCL2** ```hcl source "digitalocean" "example" { @@ -82,8 +81,7 @@ build { } ``` - - +**JSON** ```json { @@ -96,8 +94,6 @@ build { } ``` - - ### Communicator Config diff --git a/docs/datasources/digitalocen-image.mdx b/docs/datasources/digitalocen-image.mdx index 7405219..cc9d7fe 100644 --- a/docs/datasources/digitalocen-image.mdx +++ b/docs/datasources/digitalocen-image.mdx @@ -12,19 +12,19 @@ Type: `digitalocean-image` The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. -### Required: +## Required: @include 'datasource/image/Config-required.mdx' -### Optional: +## Optional: @include 'datasource/image/Config-not-required.mdx' -### Output: +## Output: @include 'datasource/image/DatasourceOutput.mdx' -### Example Usage +## Example Usage ```hcl data "digitalocean-image" "example" { @@ -52,4 +52,4 @@ build { inline = ["touch /root/provisioned-by-packer"] } } -``` \ No newline at end of file +``` diff --git a/docs/post-processors/digitalocean-import.mdx b/docs/post-processors/digitalocean-import.mdx index e1531c6..ef67267 100644 --- a/docs/post-processors/digitalocean-import.mdx +++ b/docs/post-processors/digitalocean-import.mdx @@ -63,8 +63,7 @@ Optional: Here is a basic example: - - +**JSON** ```json { @@ -81,8 +80,7 @@ Here is a basic example: } ``` - - +**HCL2** ```hcl post-processor "digitalocean-import" { @@ -98,5 +96,3 @@ post-processor "digitalocean-import" { } ``` - - From 6c8364268ce1a940620adbd21524079cc0f06f99 Mon Sep 17 00:00:00 2001 From: BrandonRomano Date: Wed, 14 Jun 2023 15:43:11 -0700 Subject: [PATCH 02/12] Adds name --- .web-docs/metadata.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.web-docs/metadata.hcl b/.web-docs/metadata.hcl index b38eedd..fe7b70c 100644 --- a/.web-docs/metadata.hcl +++ b/.web-docs/metadata.hcl @@ -1,7 +1,7 @@ # For full specification on the configuration of this file visit: # https://github.com/hashicorp/integration-template#metadata-configuration integration { - name = "TODO" + name = "DigitalOcean" description = "TODO" identifier = "packer/BrandonRomano/digitalocean" component { From f9b5a7cad1ea032c85b234b7d1dcd735aeee1c2f Mon Sep 17 00:00:00 2001 From: Wilken Rivera Date: Tue, 21 Nov 2023 20:19:45 +0000 Subject: [PATCH 03/12] Update README for integration framework --- ...notify-integration-release-via-manual.yaml | 18 ++-- .../notify-integration-release-via-tag.yaml | 30 ++++-- .web-docs/README.md | 47 +++------ .../components/builder/digitalocean/README.md | 51 ++++++---- .../data-source/digitalocen-image/README.md | 7 ++ .../digitalocean-import/README.md | 99 ++++++++----------- docs/README.md | 47 +++------ docs/builders/digitalocean.mdx | 19 ---- docs/post-processors/digitalocean-import.mdx | 19 ---- 9 files changed, 137 insertions(+), 200 deletions(-) diff --git a/.github/workflows/notify-integration-release-via-manual.yaml b/.github/workflows/notify-integration-release-via-manual.yaml index 7126bfc..d2c0ff0 100644 --- a/.github/workflows/notify-integration-release-via-manual.yaml +++ b/.github/workflows/notify-integration-release-via-manual.yaml @@ -1,10 +1,12 @@ +# Manual release workflow is used for deploying documentation updates +# on the specified branch without making an official plugin release. name: Notify Integration Release (Manual) on: workflow_dispatch: inputs: version: description: "The release version (semver)" - default: 0.0.1 + default: 1.0.0 required: false branch: description: "A branch or SHA" @@ -15,32 +17,34 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout this repo - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: ref: ${{ github.event.inputs.branch }} # Ensure that Docs are Compiled - - uses: actions/setup-go@v4 + - uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 - shell: bash - run: make build-docs + run: make generate - shell: bash run: | if [[ -z "$(git status -s)" ]]; then echo "OK" else echo "Docs have been updated, but the compiled docs have not been committed." - echo "Run 'make build-docs', and commit the result to resolve this error." + echo "Run 'make generate', and commit the result to resolve this error." exit 1 fi # Perform the Release - name: Checkout integration-release-action - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: repository: hashicorp/integration-release-action path: ./integration-release-action - name: Notify Release uses: ./integration-release-action with: - integration_identifier: 'packer/BrandonRomano/digitalocean' + # The integration identifier will be used by the Packer team to register the integration + # the expected format is packer// + integration_identifier: "packer/hashicorp/scaffolding" release_version: ${{ github.event.inputs.version }} release_sha: ${{ github.event.inputs.branch }} github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/notify-integration-release-via-tag.yaml b/.github/workflows/notify-integration-release-via-tag.yaml index 4ac5df4..7318c02 100644 --- a/.github/workflows/notify-integration-release-via-tag.yaml +++ b/.github/workflows/notify-integration-release-via-tag.yaml @@ -3,38 +3,52 @@ on: push: tags: - '*.*.*' # Proper releases - - '*.*.*-*' # Pre releases jobs: + strip-version: + runs-on: ubuntu-latest + outputs: + packer-version: ${{ steps.strip.outputs.packer-version }} + steps: + - name: Strip leading v from version tag + id: strip + env: + REF: ${{ github.ref_name }} + run: | + echo "packer-version=$(echo "$REF" | sed -E 's/v?([0-9]+\.[0-9]+\.[0-9]+)/\1/')" >> "$GITHUB_OUTPUT" notify-release: + needs: + - strip-version runs-on: ubuntu-latest steps: - name: Checkout this repo - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: ref: ${{ github.ref }} # Ensure that Docs are Compiled - - uses: actions/setup-go@v4 + - uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 - shell: bash - run: make build-docs + run: make generate - shell: bash run: | if [[ -z "$(git status -s)" ]]; then echo "OK" else echo "Docs have been updated, but the compiled docs have not been committed." - echo "Run 'make build-docs', and commit the result to resolve this error." + echo "Run 'make generate', and commit the result to resolve this error." exit 1 fi # Perform the Release - name: Checkout integration-release-action - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 with: repository: hashicorp/integration-release-action path: ./integration-release-action - name: Notify Release uses: ./integration-release-action with: - integration_identifier: 'packer/BrandonRomano/digitalocean' - release_version: ${{ github.ref_name }} + # The integration identifier will be used by the Packer team to register the integration + # the expected format is packer// + integration_identifier: "packer/hashicorp/scaffolding" + release_version: ${{ needs.strip-version.outputs.packer-version }} release_sha: ${{ github.ref }} github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.web-docs/README.md b/.web-docs/README.md index e1a7290..af9de93 100644 --- a/.web-docs/README.md +++ b/.web-docs/README.md @@ -1,21 +1,11 @@ -# DigitalOcean Plugins - The [DigitalOcean](https://www.digitalocean.com/) Packer plugin provides a builder for building images in DigitalOcean, and a post-processor for importing already-existing images into DigitalOcean. -## Installation - -### Using pre-built releases -#### Using the `packer init` command +### Installation -Starting from version 1.7, Packer supports a new `packer init` command allowing -automatic installation of Packer plugins. Read the -[Packer documentation](https://www.packer.io/docs/commands/init) for more information. - -To install this plugin, copy and paste this code into your Packer configuration . -Then, run [`packer init`](https://www.packer.io/docs/commands/init). +To install this plugin, copy and paste this code into your Packer configuration, then run [`packer init`](https://www.packer.io/docs/commands/init). ```hcl packer { @@ -28,33 +18,22 @@ packer { } ``` -#### Manual installation - -You can find pre-built binary releases of the plugin [here](https://github.com/digitalocean/packer-plugin-digitalocean/releases). -Once you have downloaded the latest archive corresponding to your target OS, -uncompress it to retrieve the plugin binary file corresponding to your platform. -To install the plugin, please follow the Packer documentation on -[installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). +Alternatively, you can use `packer plugins install` to manage installation of this plugin. +```sh +$ packer plugins install github.com/digitalocean/digitalocean +``` -#### From Source - -If you prefer to build the plugin from its source code, clone the GitHub -repository locally and run the command `go build` from the root -directory. Upon successful compilation, a `packer-plugin-digitalocean` plugin -binary file can be found in the root directory. -To install the compiled plugin, please follow the official Packer documentation -on [installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). - +### Components -## Plugin Contents +#### Builders -The DigitalOcean plugin is intended as a starting point for creating Packer plugins, containing: +- [digitalocean](/packer/integrations/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. -### Builders +#### Data Sources -- [builder](/docs/builders/digitalocean.mdx) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. +- [digitalocean-image](/packer/integrations/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. -### Post-processors +#### Post-processors -- [post-processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean +- [digitalocean-import](/packer/integrations/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean diff --git a/.web-docs/components/builder/digitalocean/README.md b/.web-docs/components/builder/digitalocean/README.md index 8d2d186..0039c87 100644 --- a/.web-docs/components/builder/digitalocean/README.md +++ b/.web-docs/components/builder/digitalocean/README.md @@ -10,25 +10,6 @@ foundation of new servers that are launched within DigitalOcean. The builder does _not_ manage images. Once it creates an image, it is up to you to use it or delete it. -## Installation - -To install this plugin using Packer v1.7.0 or later, copy and paste this code -into your Packer configuration. - -Then, run [`packer init`](https://www.packer.io/docs/commands/init). - - -```hcl -packer { - required_plugins { - digitalocean = { - version = ">= 1.0.4" - source = "github.com/digitalocean/digitalocean" - } - } -} -``` - ## Configuration Reference There are many configuration options available for the builder. They are @@ -69,6 +50,13 @@ each category, the available configuration keys are alphabetized. using a DigitalOcean API compatible service. It can also be specified via environment variable DIGITALOCEAN_API_URL. +- `http_retry_max` (\*int) - The maximum number of retries for requests that fail with a 429 or 500-level error. + The default value is 5. Set to 0 to disable reties. + +- `http_retry_wait_max` (\*float64) - The maximum wait time (in seconds) between failed API requests. Default: 30.0 + +- `http_retry_wait_min` (\*float64) - The minimum wait time (in seconds) between failed API requests. Default: 1.0 + - `private_networking` (bool) - Set to true to enable private networking for the droplet being created. This defaults to false, or not enabled. @@ -201,6 +189,31 @@ In addition to the builder options, a + + +- `temporary_key_pair_type` (string) - `dsa` | `ecdsa` | `ed25519` | `rsa` ( the default ) + + Specifies the type of key to create. The possible values are 'dsa', + 'ecdsa', 'ed25519', or 'rsa'. + + NOTE: DSA is deprecated and no longer recognized as secure, please + consider other alternatives like RSA or ED25519. + +- `temporary_key_pair_bits` (int) - Specifies the number of bits in the key to create. For RSA keys, the + minimum size is 1024 bits and the default is 4096 bits. Generally, 3072 + bits is considered sufficient. DSA keys must be exactly 1024 bits as + specified by FIPS 186-2. For ECDSA keys, bits determines the key length + by selecting from one of three elliptic curve sizes: 256, 384 or 521 + bits. Attempting to use bit lengths other than these three values for + ECDSA keys will fail. Ed25519 keys have a fixed length and bits will be + ignored. + + NOTE: DSA is deprecated and no longer recognized as secure as specified + by FIPS 186-5, please consider other alternatives like RSA or ED25519. + + + + - `ssh_host` (string) - The address to SSH to. This usually is automatically configured by the diff --git a/.web-docs/components/data-source/digitalocen-image/README.md b/.web-docs/components/data-source/digitalocen-image/README.md index ca65e57..23939ed 100644 --- a/.web-docs/components/data-source/digitalocen-image/README.md +++ b/.web-docs/components/data-source/digitalocen-image/README.md @@ -20,6 +20,13 @@ for use as a builder source. - `api_url` (string) - A non-standard API endpoint URL. Set this if you are using a DigitalOcean API compatible service. It can also be specified via environment variable DIGITALOCEAN_API_URL. +- `http_retry_max` (\*int) - The maximum number of retries for requests that fail with a 429 or 500-level error. + The default value is 5. Set to 0 to disable reties. + +- `http_retry_wait_max` (\*float64) - The maximum wait time (in seconds) between failed API requests. Default: 30.0 + +- `http_retry_wait_min` (\*float64) - The minimum wait time (in seconds) between failed API requests. Default: 1.0 + - `name` (string) - The name of the image to return. Only one of `name` or `name_regex` may be provided. - `name_regex` (string) - A regex matching the name of the image to return. Only one of `name` or `name_regex` may be provided. diff --git a/.web-docs/components/post-processor/digitalocean-import/README.md b/.web-docs/components/post-processor/digitalocean-import/README.md index 184dc44..3aed683 100644 --- a/.web-docs/components/post-processor/digitalocean-import/README.md +++ b/.web-docs/components/post-processor/digitalocean-import/README.md @@ -16,88 +16,67 @@ import is complete. For information about the requirements to use an image for a DigitalOcean Droplet, see DigitalOcean's [Custom Images documentation](https://www.digitalocean.com/docs/images/custom-images). -### Installation +## Configuration -To install this plugin using Packer v1.7.0 or later, copy and paste this code -into your Packer configuration. +There are some configuration options available for the post-processor. -Then, run [`packer init`](https://www.packer.io/docs/commands/init). +Required: + -```hcl -packer { - required_plugins { - digitalocean = { - version = ">= 1.0.4" - source = "github.com/digitalocean/digitalocean" - } - } -} -``` +- `api_token` (string) - A personal access token used to communicate with the DigitalOcean v2 API. + This may also be set using the `DIGITALOCEAN_TOKEN` or + `DIGITALOCEAN_ACCESS_TOKEN` environmental variables. -## Configuration +- `spaces_key` (string) - The access key used to communicate with Spaces. This may also be set using + the `DIGITALOCEAN_SPACES_ACCESS_KEY` environmental variable. -There are some configuration options available for the post-processor. +- `spaces_secret` (string) - The secret key used to communicate with Spaces. This may also be set using + the `DIGITALOCEAN_SPACES_SECRET_KEY` environmental variable. -Required: +- `spaces_region` (string) - The name of the region, such as `nyc3`, in which to upload the image to Spaces. -- `api_token` (string) - A personal access token used to communicate with - the DigitalOcean v2 API. This may also be set using the - `DIGITALOCEAN_TOKEN` or `DIGITALOCEAN_ACCESS_TOKEN` environmental variables. - `DIGITALOCEAN_API_TOKEN` is acceptable but will be deprecated in a future release. +- `space_name` (string) - The name of the specific Space where the image file will be copied to for + import. This Space must exist when the post-processor is run. -- `spaces_key` (string) - The access key used to communicate with Spaces. - This may also be set using the `DIGITALOCEAN_SPACES_ACCESS_KEY` - environmental variable. +- `image_name` (string) - The name to be used for the resulting DigitalOcean custom image. -- `spaces_secret` (string) - The secret key used to communicate with Spaces. - This may also be set using the `DIGITALOCEAN_SPACES_SECRET_KEY` - environmental variable. +- `image_regions` ([]string) - A list of DigitalOcean regions, such as `nyc3`, where the resulting image + will be available for use in creating Droplets. -- `spaces_region` (string) - The name of the region, such as `nyc3`, in which - to upload the image to Spaces. + -- `space_name` (string) - The name of the specific Space where the image file - will be copied to for import. This Space must exist when the - post-processor is run. -- `image_name` (string) - The name to be used for the resulting DigitalOcean - custom image. +Optional: -- `image_regions` (array of string) - A list of DigitalOcean regions, such - as `nyc3`, where the resulting image will be available for use in creating - Droplets. + -Optional: +- `api_token` (string) - A personal access token used to communicate with the DigitalOcean v2 API. + This may also be set using the `DIGITALOCEAN_TOKEN` or + `DIGITALOCEAN_ACCESS_TOKEN` environmental variables. + +- `spaces_key` (string) - The access key used to communicate with Spaces. This may also be set using + the `DIGITALOCEAN_SPACES_ACCESS_KEY` environmental variable. + +- `spaces_secret` (string) - The secret key used to communicate with Spaces. This may also be set using + the `DIGITALOCEAN_SPACES_SECRET_KEY` environmental variable. -- `image_description` (string) - The description to set for the resulting - imported image. +- `spaces_region` (string) - The name of the region, such as `nyc3`, in which to upload the image to Spaces. -- `image_distribution` (string) - The name of the distribution to set for - the resulting imported image. +- `space_name` (string) - The name of the specific Space where the image file will be copied to for + import. This Space must exist when the post-processor is run. + +- `image_name` (string) - The name to be used for the resulting DigitalOcean custom image. + +- `image_regions` ([]string) - A list of DigitalOcean regions, such as `nyc3`, where the resulting image + will be available for use in creating Droplets. + + -- `image_tags` (array of strings) - A list of tags to apply to the resulting - imported image. - `keep_input_artifact` (boolean) - if true, do not delete the source virtual machine image after importing it to the cloud. Defaults to false. -- `skip_clean` (boolean) - Whether we should skip removing the image file - uploaded to Spaces after the import process has completed. "true" means - that we should leave it in the Space, "false" means to clean it out. - Defaults to `false`. - -- `space_object_name` (string) - The name of the key used in the Space where - the image file will be copied to for import. This is treated as a - [template engine](/docs/templates/legacy_json_templates/engine). Therefore, you - may use user variables and template functions in this field. - If not specified, this will default to `packer-import-{{timestamp}}`. - -- `timeout` (number) - The length of time in minutes to wait for individual - steps in the process to successfully complete. This includes both importing - the image from Spaces as well as distributing the resulting image to - additional regions. If not specified, this will default to 20. - ## Basic Example Here is a basic example: diff --git a/docs/README.md b/docs/README.md index e1a7290..af9de93 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,21 +1,11 @@ -# DigitalOcean Plugins - The [DigitalOcean](https://www.digitalocean.com/) Packer plugin provides a builder for building images in DigitalOcean, and a post-processor for importing already-existing images into DigitalOcean. -## Installation - -### Using pre-built releases -#### Using the `packer init` command +### Installation -Starting from version 1.7, Packer supports a new `packer init` command allowing -automatic installation of Packer plugins. Read the -[Packer documentation](https://www.packer.io/docs/commands/init) for more information. - -To install this plugin, copy and paste this code into your Packer configuration . -Then, run [`packer init`](https://www.packer.io/docs/commands/init). +To install this plugin, copy and paste this code into your Packer configuration, then run [`packer init`](https://www.packer.io/docs/commands/init). ```hcl packer { @@ -28,33 +18,22 @@ packer { } ``` -#### Manual installation - -You can find pre-built binary releases of the plugin [here](https://github.com/digitalocean/packer-plugin-digitalocean/releases). -Once you have downloaded the latest archive corresponding to your target OS, -uncompress it to retrieve the plugin binary file corresponding to your platform. -To install the plugin, please follow the Packer documentation on -[installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). +Alternatively, you can use `packer plugins install` to manage installation of this plugin. +```sh +$ packer plugins install github.com/digitalocean/digitalocean +``` -#### From Source - -If you prefer to build the plugin from its source code, clone the GitHub -repository locally and run the command `go build` from the root -directory. Upon successful compilation, a `packer-plugin-digitalocean` plugin -binary file can be found in the root directory. -To install the compiled plugin, please follow the official Packer documentation -on [installing a plugin](https://www.packer.io/docs/extending/plugins/#installing-plugins). - +### Components -## Plugin Contents +#### Builders -The DigitalOcean plugin is intended as a starting point for creating Packer plugins, containing: +- [digitalocean](/packer/integrations/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. -### Builders +#### Data Sources -- [builder](/docs/builders/digitalocean.mdx) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. +- [digitalocean-image](/packer/integrations/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. -### Post-processors +#### Post-processors -- [post-processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean +- [digitalocean-import](/packer/integrations/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean diff --git a/docs/builders/digitalocean.mdx b/docs/builders/digitalocean.mdx index 20e08e2..7693f89 100644 --- a/docs/builders/digitalocean.mdx +++ b/docs/builders/digitalocean.mdx @@ -27,25 +27,6 @@ foundation of new servers that are launched within DigitalOcean. The builder does _not_ manage images. Once it creates an image, it is up to you to use it or delete it. -## Installation - -To install this plugin using Packer v1.7.0 or later, copy and paste this code -into your Packer configuration. - -Then, run [`packer init`](https://www.packer.io/docs/commands/init). - - -```hcl -packer { - required_plugins { - digitalocean = { - version = ">= 1.0.4" - source = "github.com/digitalocean/digitalocean" - } - } -} -``` - ## Configuration Reference There are many configuration options available for the builder. They are diff --git a/docs/post-processors/digitalocean-import.mdx b/docs/post-processors/digitalocean-import.mdx index ef67267..c6aba06 100644 --- a/docs/post-processors/digitalocean-import.mdx +++ b/docs/post-processors/digitalocean-import.mdx @@ -25,25 +25,6 @@ import is complete. For information about the requirements to use an image for a DigitalOcean Droplet, see DigitalOcean's [Custom Images documentation](https://www.digitalocean.com/docs/images/custom-images). -### Installation - -To install this plugin using Packer v1.7.0 or later, copy and paste this code -into your Packer configuration. - -Then, run [`packer init`](https://www.packer.io/docs/commands/init). - - -```hcl -packer { - required_plugins { - digitalocean = { - version = ">= 1.0.4" - source = "github.com/digitalocean/digitalocean" - } - } -} -``` - ## Configuration There are some configuration options available for the post-processor. From ff16cfd97e38792e1c3c31a0e972dcdb57d76ff8 Mon Sep 17 00:00:00 2001 From: Wilken Rivera Date: Wed, 22 Nov 2023 16:59:14 +0000 Subject: [PATCH 04/12] Update integration Organization name --- .web-docs/metadata.hcl | 2 +- GNUmakefile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.web-docs/metadata.hcl b/.web-docs/metadata.hcl index fe7b70c..5d374b5 100644 --- a/.web-docs/metadata.hcl +++ b/.web-docs/metadata.hcl @@ -3,7 +3,7 @@ integration { name = "DigitalOcean" description = "TODO" - identifier = "packer/BrandonRomano/digitalocean" + identifier = "packer/digitalocean/digitalocean" component { type = "data-source" name = "DigitalOcean Image" diff --git a/GNUmakefile b/GNUmakefile index 848c46b..5bb7609 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -61,5 +61,5 @@ check-fmt: fmt build-docs: install-packer-sdc @if [ -d ".docs" ]; then rm -r ".docs"; fi @packer-sdc renderdocs -src "docs" -partials docs-partials/ -dst ".docs/" - @./.web-docs/scripts/compile-to-webdocs.sh "." ".docs" ".web-docs" "BrandonRomano" + @./.web-docs/scripts/compile-to-webdocs.sh "." ".docs" ".web-docs" "digitalocean" @rm -r ".docs" From e7b2d777de0c51ea58e7a4afca7b496c34b3660d Mon Sep 17 00:00:00 2001 From: Wilken Rivera Date: Wed, 22 Nov 2023 18:20:22 +0000 Subject: [PATCH 05/12] Update identifier in workflows --- .github/workflows/notify-integration-release-via-manual.yaml | 2 +- .github/workflows/notify-integration-release-via-tag.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/notify-integration-release-via-manual.yaml b/.github/workflows/notify-integration-release-via-manual.yaml index d2c0ff0..397db50 100644 --- a/.github/workflows/notify-integration-release-via-manual.yaml +++ b/.github/workflows/notify-integration-release-via-manual.yaml @@ -44,7 +44,7 @@ jobs: with: # The integration identifier will be used by the Packer team to register the integration # the expected format is packer// - integration_identifier: "packer/hashicorp/scaffolding" + integration_identifier: "packer/digitalocean/digitalocean" release_version: ${{ github.event.inputs.version }} release_sha: ${{ github.event.inputs.branch }} github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/notify-integration-release-via-tag.yaml b/.github/workflows/notify-integration-release-via-tag.yaml index 7318c02..7016c90 100644 --- a/.github/workflows/notify-integration-release-via-tag.yaml +++ b/.github/workflows/notify-integration-release-via-tag.yaml @@ -48,7 +48,7 @@ jobs: with: # The integration identifier will be used by the Packer team to register the integration # the expected format is packer// - integration_identifier: "packer/hashicorp/scaffolding" + integration_identifier: "packer/digitalocean/digitalocean" release_version: ${{ needs.strip-version.outputs.packer-version }} release_sha: ${{ github.ref }} github_token: ${{ secrets.GITHUB_TOKEN }} From f209a001cbaec826328c04a1642dc68568ae6f5a Mon Sep 17 00:00:00 2001 From: Lucas Bajolet Date: Fri, 24 Nov 2023 15:02:35 -0500 Subject: [PATCH 06/12] Makefile: replace build-docs by generate --- .github/workflows/ensure-docs-compiled.yaml | 4 ++-- GNUmakefile | 14 ++++---------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ensure-docs-compiled.yaml b/.github/workflows/ensure-docs-compiled.yaml index 8d10ac7..00c3620 100644 --- a/.github/workflows/ensure-docs-compiled.yaml +++ b/.github/workflows/ensure-docs-compiled.yaml @@ -9,14 +9,14 @@ jobs: uses: actions/checkout@v2 - uses: actions/setup-go@v4 - shell: bash - run: make build-docs + run: make generate - shell: bash run: | if [[ -z "$(git status -s)" ]]; then echo "OK" else echo "Docs have been updated, but the compiled docs have not been committed." - echo "Run 'make build-docs', and commit the result to resolve this error." + echo "Run 'make generate', and commit the result to resolve this error." exit 1 fi diff --git a/GNUmakefile b/GNUmakefile index 5bb7609..1c33f8c 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -20,10 +20,6 @@ test: install-packer-sdc: ## Install packer sofware development command @go install github.com/hashicorp/packer-plugin-sdk/cmd/packer-sdc@${HASHICORP_PACKER_PLUGIN_SDK_VERSION} -ci-release-docs: install-packer-sdc - @packer-sdc renderdocs -src docs -partials docs-partials/ -dst docs/ - @/bin/sh -c "[ -d docs ] && zip -r docs.zip docs/" - plugin-check: install-packer-sdc build @packer-sdc plugin-check ${BINARY} @@ -32,6 +28,10 @@ testacc: dev generate: install-packer-sdc @go generate ./... + @if [ -d ".docs" ]; then rm -r ".docs"; fi + @packer-sdc renderdocs -src "docs" -partials docs-partials/ -dst ".docs/" + @./.web-docs/scripts/compile-to-webdocs.sh "." ".docs" ".web-docs" "digitalocean" + @rm -r ".docs" check-generate: generate echo "==> Checking that auto-generated code is not changed..." @@ -57,9 +57,3 @@ check-fmt: fmt echo "You can use the command: \`go fmt ./...\` to reformat code."; \ exit 1; \ fi - -build-docs: install-packer-sdc - @if [ -d ".docs" ]; then rm -r ".docs"; fi - @packer-sdc renderdocs -src "docs" -partials docs-partials/ -dst ".docs/" - @./.web-docs/scripts/compile-to-webdocs.sh "." ".docs" ".web-docs" "digitalocean" - @rm -r ".docs" From e6a1a682f8016e73f864c4768a55442670a27006 Mon Sep 17 00:00:00 2001 From: Lucas Bajolet Date: Fri, 24 Nov 2023 15:03:00 -0500 Subject: [PATCH 07/12] metadata: add summary for plugin --- .web-docs/metadata.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.web-docs/metadata.hcl b/.web-docs/metadata.hcl index 5d374b5..8a8c056 100644 --- a/.web-docs/metadata.hcl +++ b/.web-docs/metadata.hcl @@ -2,7 +2,7 @@ # https://github.com/hashicorp/integration-template#metadata-configuration integration { name = "DigitalOcean" - description = "TODO" + description = "The DigitalOcean plugin can be used with HashiCorp Packer to create custom images for DigitalOcean." identifier = "packer/digitalocean/digitalocean" component { type = "data-source" From b031f5f4f3576882a8f8ff4f9766bf7db5509049 Mon Sep 17 00:00:00 2001 From: Wilken Rivera Date: Wed, 29 Nov 2023 17:18:23 +0000 Subject: [PATCH 08/12] .web-docs/metadata.hcl: Update component slugs --- .web-docs/metadata.hcl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.web-docs/metadata.hcl b/.web-docs/metadata.hcl index 8a8c056..db2c658 100644 --- a/.web-docs/metadata.hcl +++ b/.web-docs/metadata.hcl @@ -7,7 +7,7 @@ integration { component { type = "data-source" name = "DigitalOcean Image" - slug = "digitalocen-image" + slug = "image" } component { type = "builder" @@ -17,6 +17,6 @@ integration { component { type = "post-processor" name = "DigitalOcean Import" - slug = "digitalocean-import" + slug = "import" } } From f16b4865fdf6c19dd6fdfb28807298e925ecc3e8 Mon Sep 17 00:00:00 2001 From: Wilken Rivera Date: Wed, 29 Nov 2023 17:19:19 +0000 Subject: [PATCH 09/12] docs/README.md: Fix broken plugin links on integration portal --- docs/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/README.md b/docs/README.md index af9de93..c5498c3 100644 --- a/docs/README.md +++ b/docs/README.md @@ -28,12 +28,12 @@ $ packer plugins install github.com/digitalocean/digitalocean #### Builders -- [digitalocean](/packer/integrations/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. +- [digitalocean](/packer/integrations/digitalocean/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. #### Data Sources -- [digitalocean-image](/packer/integrations/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. +- [digitalocean-image](/packer/integrations/digitalocean/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. #### Post-processors -- [digitalocean-import](/packer/integrations/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean +- [digitalocean-import](/packer/integrations/digitalocean/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean From 8b9b1c7af343c1a87ab2f79e036496b98448e4c1 Mon Sep 17 00:00:00 2001 From: Lucas Bajolet Date: Tue, 16 Jan 2024 13:46:13 -0500 Subject: [PATCH 10/12] workflows: remove ensure-docs-compiled Since every PR that gets merged has a check performed to make sure the docs are up-to-date, we don't need to add an extra action that does the same thing differently. --- .github/workflows/ensure-docs-compiled.yaml | 22 --------------------- 1 file changed, 22 deletions(-) delete mode 100644 .github/workflows/ensure-docs-compiled.yaml diff --git a/.github/workflows/ensure-docs-compiled.yaml b/.github/workflows/ensure-docs-compiled.yaml deleted file mode 100644 index 00c3620..0000000 --- a/.github/workflows/ensure-docs-compiled.yaml +++ /dev/null @@ -1,22 +0,0 @@ -name: Ensure Docs are Compiled -on: - push: -jobs: - ensure-docs-compiled: - runs-on: ubuntu-latest - steps: - - name: Checkout 🛎 - uses: actions/checkout@v2 - - uses: actions/setup-go@v4 - - shell: bash - run: make generate - - shell: bash - run: | - if [[ -z "$(git status -s)" ]]; then - echo "OK" - else - echo "Docs have been updated, but the compiled docs have not been committed." - echo "Run 'make generate', and commit the result to resolve this error." - exit 1 - fi - From 666511f06a75274902082eb38b993600ddd3fa64 Mon Sep 17 00:00:00 2001 From: Lucas Bajolet Date: Tue, 16 Jan 2024 13:50:35 -0500 Subject: [PATCH 11/12] workflows: release use make for docs checks Since the Makefile has a target for generating and checking that there's no diff between what's been generated and what's versioned, we don't need custom logic to perform the check, so we reuse this target for that step. --- .../notify-integration-release-via-manual.yaml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/notify-integration-release-via-manual.yaml b/.github/workflows/notify-integration-release-via-manual.yaml index 397db50..1f8a1c8 100644 --- a/.github/workflows/notify-integration-release-via-manual.yaml +++ b/.github/workflows/notify-integration-release-via-manual.yaml @@ -22,17 +22,9 @@ jobs: ref: ${{ github.event.inputs.branch }} # Ensure that Docs are Compiled - uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 - - shell: bash - run: make generate - - shell: bash - run: | - if [[ -z "$(git status -s)" ]]; then - echo "OK" - else - echo "Docs have been updated, but the compiled docs have not been committed." - echo "Run 'make generate', and commit the result to resolve this error." - exit 1 - fi + - name: check docs are up-to-date + shell: bash + run: make check-generate # Perform the Release - name: Checkout integration-release-action uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 From 57e8d8070d91644d12ec6c295f4755081eda0c0a Mon Sep 17 00:00:00 2001 From: Lucas Bajolet Date: Tue, 16 Jan 2024 16:22:04 -0500 Subject: [PATCH 12/12] .web-docs: update generated README While updating the docs, the README forgot to be regenerated before being added to the PR, causing the checks to fail (expectedly). This commit updates the .web-docs/README so they match those in docs. --- .web-docs/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.web-docs/README.md b/.web-docs/README.md index af9de93..c5498c3 100644 --- a/.web-docs/README.md +++ b/.web-docs/README.md @@ -28,12 +28,12 @@ $ packer plugins install github.com/digitalocean/digitalocean #### Builders -- [digitalocean](/packer/integrations/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. +- [digitalocean](/packer/integrations/digitalocean/digitalocean/latest/components/builder/digitalocean) - The builder takes a source image, runs any provisioning necessary on the image after launching it, then snapshots it into a reusable image. This reusable image can then be used as the foundation of new servers that are launched within DigitalOcean. #### Data Sources -- [digitalocean-image](/packer/integrations/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. +- [digitalocean-image](/packer/integrations/digitalocean/digitalocean/latest/components/datasource/image) - The DigitalOcean image data source is used look up the ID of an existing DigitalOcean image for use as a builder source. #### Post-processors -- [digitalocean-import](/packer/integrations/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean +- [digitalocean-import](/packer/integrations/digitalocean/digitalocean/latest/components/post-processor/import) -processor](/docs/post-processors/digitalocean-import.mdx) - The digitalocean-import post-processor is used to import images to DigitalOcean