diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml
deleted file mode 100644
index 3a89ae449..000000000
--- a/.github/workflows/daily.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: Daily
-on:
- workflow_dispatch:
- schedule:
- - cron: '0 5 * * *'
-
-jobs:
- graphql:
- name: 'Update GraphQL TS Schema'
- runs-on: ubuntu-20.04
- defaults:
- run:
- shell: bash
- working-directory: www
- steps:
- - name: 'Checkout'
- uses: actions/checkout@v3
- with:
- token: ${{ secrets.PLURAL_BOT_PAT }}
- - name: Read Node.js version from package.json
- run: echo ::set-output name=nodeVersion::$(node -p "require('./package.json').engines.node")
- id: engines
- - name: 'Setup Node'
- uses: actions/setup-node@v3
- with:
- node-version: ${{ steps.engines.outputs.nodeVersion }}
- - name: 'Update Schema'
- run: |
- yarn install --immutable
- yarn graphql:codegen
- - name: 'Commit and push'
- uses: EndBug/add-and-commit@v9
- with:
- add: 'www'
- author_name: Plural Bot
- author_email: gh-bot@plural.sh
- commit: --signoff
- default_author: user_info
- message: "Update GraphQL TS Schema"
- push: true
diff --git a/.github/workflows/firebase-hosting-pull-request.yml b/.github/workflows/firebase-hosting-pull-request.yml
deleted file mode 100644
index 0fcbef583..000000000
--- a/.github/workflows/firebase-hosting-pull-request.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-# This file was auto-generated by the Firebase CLI
-# https://github.com/firebase/firebase-tools
-
-name: Deploy to Firebase Hosting on PR
-on:
- pull_request:
- branches:
- - master
- paths:
- - ".github/workflows/firebase-hosting-pull-request.yml"
- - "www/**"
-jobs:
- build_and_preview:
- if: ${{ github.triggering_actor != 'plural-renovate[bot]' }}
- runs-on: ubuntu-20.04
- steps:
- - uses: actions/checkout@v3
- - run: cd www && yarn install --immutable && CI=false yarn build:staging
- - uses: FirebaseExtended/action-hosting-deploy@v0
- with:
- repoToken: '${{ secrets.GITHUB_TOKEN }}'
- firebaseServiceAccount: '${{ secrets.FIREBASE_SERVICE_ACCOUNT_PLURALSH }}'
- projectId: pluralsh
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
index 1c2ee8a48..fbe87a7e8 100644
--- a/.github/workflows/test.yaml
+++ b/.github/workflows/test.yaml
@@ -152,6 +152,7 @@ jobs:
curl -sL 'https://github.com/pluralsh/plural-cli/releases/download/v0.5.18/plural-cli_0.5.18_Linux_amd64.tar.gz' | tar xzvf -
chmod +x plural
cp plural /usr/local/bin/plural
+ - run: make install-cockroach
- run: make testup
- name: Restore dependencies cache
uses: actions/cache@v3
diff --git a/.github/workflows/www.yaml b/.github/workflows/www.yaml
index bbbfa28ab..1577a319f 100644
--- a/.github/workflows/www.yaml
+++ b/.github/workflows/www.yaml
@@ -136,41 +136,41 @@ jobs:
node-version: ${{ steps.engines.outputs.nodeVersion }}
- run: yarn --immutable
- run: yarn lint
- e2e:
- name: End-to-end test
- runs-on: ubuntu-20.04
- env:
- CYPRESS_EMAIL: ${{ secrets.CYPRESS_EMAIL }}
- CYPRESS_PASSWORD: ${{ secrets.CYPRESS_PASSWORD }}
- defaults:
- run:
- shell: bash
- working-directory: www
- steps:
- - name: 'Checkout'
- uses: actions/checkout@v3
- - name: Read Node.js version from package.json
- run: echo ::set-output name=nodeVersion::$(node -p "require('./package.json').engines.node")
- id: engines
- - name: 'Setup Node'
- uses: actions/setup-node@v3
- with:
- node-version: ${{ steps.engines.outputs.nodeVersion }}
- - run: yarn # Should run the --immutable in the CI by default
- - run: cd e2e && yarn
- - run: yarn e2e
- - uses: 8398a7/action-slack@v3
- if: failure()
- with:
- status: ${{ job.status }}
- fields: workflow,repo,commit,author,pullRequest
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CYPRESS_WEBHOOK }}
- - name: Upload Screenshots and Videos to Slack
- if: failure()
- uses: trymbill/cypress-slack-video-upload-action@v1.3.0
- with:
- token: ${{ secrets.SLACK_CYPRESS_TOKEN }}
- workdir: www/e2e/cypress
- channels: cypress-artifacts
- message-text: "See the attached videos and screenshots for more information."
+ # e2e:
+ # name: End-to-end test
+ # runs-on: ubuntu-20.04
+ # env:
+ # CYPRESS_EMAIL: ${{ secrets.CYPRESS_EMAIL }}
+ # CYPRESS_PASSWORD: ${{ secrets.CYPRESS_PASSWORD }}
+ # defaults:
+ # run:
+ # shell: bash
+ # working-directory: www
+ # steps:
+ # - name: 'Checkout'
+ # uses: actions/checkout@v3
+ # - name: Read Node.js version from package.json
+ # run: echo ::set-output name=nodeVersion::$(node -p "require('./package.json').engines.node")
+ # id: engines
+ # - name: 'Setup Node'
+ # uses: actions/setup-node@v3
+ # with:
+ # node-version: ${{ steps.engines.outputs.nodeVersion }}
+ # - run: yarn # Should run the --immutable in the CI by default
+ # - run: cd e2e && yarn
+ # - run: yarn e2e
+ # - uses: 8398a7/action-slack@v3
+ # if: failure()
+ # with:
+ # status: ${{ job.status }}
+ # fields: workflow,repo,commit,author,pullRequest
+ # env:
+ # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CYPRESS_WEBHOOK }}
+ # - name: Upload Screenshots and Videos to Slack
+ # if: failure()
+ # uses: trymbill/cypress-slack-video-upload-action@v1.3.0
+ # with:
+ # token: ${{ secrets.SLACK_CYPRESS_TOKEN }}
+ # workdir: www/e2e/cypress
+ # channels: cypress-artifacts
+ # message-text: "See the attached videos and screenshots for more information."
diff --git a/.gitignore b/.gitignore
index 654e49ab9..e822bae81 100644
--- a/.gitignore
+++ b/.gitignore
@@ -67,4 +67,7 @@ yarn-error.log*
cert.pem
key.pem
+
+/test-certs/
+
.vscode
diff --git a/.tool-versions b/.tool-versions
index 265d1a352..72808ec94 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -1,2 +1,2 @@
-erlang 24.3.4.14
-elixir 1.12.3
+erlang 24.3.4.17
+elixir 1.13.4
diff --git a/Dockerfile b/Dockerfile
index f82fb15e2..40a464b58 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM bitwalker/alpine-elixir:1.12.3 AS builder
+FROM bitwalker/alpine-elixir:1.13.4 AS builder
# The following are build arguments used to change variable parts of the image.
# The name of your application/release (required)
diff --git a/Makefile b/Makefile
index 149a2b8c8..edfcf8044 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,7 @@ DKR_HOST ?= dkr.plural.sh
dep ?= forge-core
GIT_COMMIT ?= abe123
TARGETARCH ?= amd64
+COCKROACH_VSN ?= v24.1.3
help:
@perl -nle'print $& if m{^[a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@@ -48,11 +49,26 @@ else
docker push $(DKR_HOST)/plural/${APP_NAME}:$(APP_VSN)
endif
-testup: ## sets up dependent services for test
- docker-compose up -d
+install-cockroach:
+ sudo curl https://binaries.cockroachdb.com/cockroach-$(COCKROACH_VSN).linux-amd64.tgz | tar -xz && \
+ sudo cp -i cockroach-$(COCKROACH_VSN).linux-amd64/cockroach /usr/local/bin/ && \
+ sudo mkdir -p /usr/local/lib/cockroach && \
+ sudo cp -i cockroach-$(COCKROACH_VSN).linux-amd64/lib/libgeos.so /usr/local/lib/cockroach/ && \
+ sudo cp -i cockroach-$(COCKROACH_VSN).linux-amd64/lib/libgeos_c.so /usr/local/lib/cockroach/ && \
+ cockroach version
+
+test-certs:
+ mkdir test-certs && \
+ cockroach cert create-ca --certs-dir test-certs --ca-key test-certs/ca.key && \
+ cockroach cert create-node localhost 127.0.0.1 --certs-dir test-certs --ca-key test-certs/ca.key && \
+ cockroach cert create-client root --certs-dir test-certs --ca-key test-certs/ca.key && \
+ cockroach cert list --certs-dir test-certs
+
+testup: test-certs ## sets up dependent services for test
+ docker compose up -d
testdown: ## tear down test dependencies
- docker-compose down
+ docker compose down
connectdb: ## proxies the db in kubernetes via kubectl
@echo "run psql -U forge -h 127.0.0.1 forge to connect"
diff --git a/apps/core/lib/core/clients/console.ex b/apps/core/lib/core/clients/console.ex
new file mode 100644
index 000000000..85f726e6a
--- /dev/null
+++ b/apps/core/lib/core/clients/console.ex
@@ -0,0 +1,96 @@
+defmodule Core.Clients.Console do
+ require Logger
+
+ @clusters_q """
+ query {
+ clusters(first: 100) {
+ edges { node { name id distro metadata } }
+ }
+ }
+ """
+
+ @create_svc_q """
+ mutation Create($clusterId: ID!, $attributes: ServiceDeploymentAttributes!) {
+ createServiceDeployment(clusterId: $clusterId, attributes: $attributes) {
+ id
+ }
+ }
+ """
+
+ @delete_svc_q """
+ mutation Delete($id: ID!) {
+ deleteServiceDeployment(id: $id) {
+ id
+ }
+ }
+ """
+
+ @update_svc_q """
+ mutation Update($id: ID!, $attributes: ServiceUpdateAttributes!) {
+ updateServiceDeployment(id: $id) {
+ id
+ }
+ }
+ """
+
+ @repo_q """
+ query Repo($url: String!) {
+ gitRepository(url: $url) {
+ id
+ }
+ }
+ """
+
+ def new(url, token) do
+ Req.new(base_url: url, auth: "Token #{token}")
+ |> AbsintheClient.attach()
+ end
+
+ def clusters(client) do
+ Req.post(client, graphql: @clusters_q)
+ |> case do
+ {:ok, %Req.Response{body: %{"clusters" => %{"edges" => edges}}}} -> {:ok, Enum.map(edges, & &1["node"])}
+ res ->
+ Logger.warn "Failed to fetch clusters: #{inspect(res)}"
+ {:error, "could not fetch clusters"}
+ end
+ end
+
+ def repo(client, url) do
+ Req.post(client, graphql: {@repo_q, %{url: url}})
+ |> case do
+ {:ok, %Req.Response{body: %{"gitRepository" => %{"id" => id}}}} -> {:ok, id}
+ res ->
+ Logger.warn "Failed to fetch clusters: #{inspect(res)}"
+ {:error, "could not fetch repo"}
+ end
+ end
+
+ def create_service(client, cluster_id, attrs) do
+ Req.post(client, graphql: {@create_svc_q, %{clusterId: cluster_id, attributes: attrs}})
+ |> service_resp("createServiceDeployment")
+ end
+
+ def update_service(client, id, attrs) do
+ Req.post(client, graphql: {@update_svc_q, %{id: id, attributes: attrs}})
+ |> service_resp("updateServiceDeployment")
+ end
+
+ def delete_service(client, id) do
+ Req.post(client, graphql: {@delete_svc_q, %{id: id}})
+ |> service_resp("deleteServiceDeployment")
+ end
+
+ defp service_resp({:ok, %Req.Response{status: 200, body: body}}, field) do
+ case body[field] do
+ %{"id" => id} -> {:ok, id}
+ err ->
+ Logger.warn "invalid console gql response: #{inspect(err)}"
+ end
+ end
+
+ defp service_resp(resp, _) do
+ Logger.error "failed to fetch from console: #{inspect(resp)}"
+ {:error, "console error"}
+ end
+end
diff --git a/apps/core/lib/core/conduit/base.ex b/apps/core/lib/core/conduit/base.ex
index 124086c34..aca468c1e 100644
--- a/apps/core/lib/core/conduit/base.ex
+++ b/apps/core/lib/core/conduit/base.ex
@@ -13,6 +13,7 @@ defmodule Core.Conduit.Base do
defqueue "plural.upgrade"
defqueue "plural.scan"
defqueue "plural.cluster"
+ defqueue "plural.cloud"
end
pipeline :out_tracking do
@@ -36,6 +37,7 @@ defmodule Core.Conduit.Base do
publish :upgrade, exchange: "plural.topic", to: "plural.upgrade"
publish :scan, exchange: "plural.topic", to: "plural.scan"
publish :cluster, exchange: "plural.topic", to: "plural.cluster"
+ publish :cloud, exchange: "plural.topic", to: "plural.cloud"
end
outgoing do
diff --git a/apps/core/lib/core/policies/cloud.ex b/apps/core/lib/core/policies/cloud.ex
new file mode 100644
index 000000000..b63f23c0e
--- /dev/null
+++ b/apps/core/lib/core/policies/cloud.ex
@@ -0,0 +1,16 @@
+defmodule Core.Policies.Cloud do
+ use Piazza.Policy
+ alias Core.Schema.{User, ConsoleInstance}
+ alias Core.Services.Payments
+
+ def can?(%User{} = user, %ConsoleInstance{}, :create) do
+ case Payments.has_feature?(user, :cd) do
+ true -> :pass
+ _ -> {:error, "you must be on a paid plan to use Plural Cloud"}
+ end
+ end
+
+ def can?(u, %Ecto.Changeset{} = cs, action), do: can?(u, apply_changes(cs), action)
+
+ def can?(_, _, _), do: :pass
+end
diff --git a/apps/core/lib/core/pubsub/events.ex b/apps/core/lib/core/pubsub/events.ex
index 89037f6a1..4034f2af4 100644
--- a/apps/core/lib/core/pubsub/events.ex
+++ b/apps/core/lib/core/pubsub/events.ex
@@ -90,3 +90,8 @@ defmodule Core.PubSub.ClusterDependencyCreated, do: use Piazza.PubSub.Event
defmodule Core.PubSub.DeferredUpdateCreated, do: use Piazza.PubSub.Event
defmodule Core.PubSub.UpgradesPromoted, do: use Piazza.PubSub.Event
+
+defmodule Core.PubSub.ConsoleInstanceCreated, do: use Piazza.PubSub.Event
+defmodule Core.PubSub.ConsoleInstanceUpdated, do: use Piazza.PubSub.Event
+defmodule Core.PubSub.ConsoleInstanceDeleted, do: use Piazza.PubSub.Event
+defmodule Core.PubSub.ConsoleInstanceReaped, do: use Piazza.PubSub.Event
diff --git a/apps/core/lib/core/pubsub/protocols/fanout.ex b/apps/core/lib/core/pubsub/protocols/fanout.ex
index 8ee3d6062..3db0c6eca 100644
--- a/apps/core/lib/core/pubsub/protocols/fanout.ex
+++ b/apps/core/lib/core/pubsub/protocols/fanout.ex
@@ -235,3 +235,14 @@ defimpl Core.PubSub.Fanout, for: [Core.PubSub.RoleCreated, Core.PubSub.RoleUpdat
|> Enum.count()
end
end
+
+defimpl Core.PubSub.Fanout, for: [
+ Core.PubSub.ConsoleInstanceCreated,
+ Core.PubSub.ConsoleInstanceUpdated,
+ Core.PubSub.ConsoleInstanceDeleted
+ ] do
+ def fanout(event) do
+ %Conduit.Message{body: event}
+ |> Core.Conduit.Broker.publish(:cloud)
+ end
+end
diff --git a/apps/core/lib/core/schema/cloud_cluster.ex b/apps/core/lib/core/schema/cloud_cluster.ex
new file mode 100644
index 000000000..9a179ab18
--- /dev/null
+++ b/apps/core/lib/core/schema/cloud_cluster.ex
@@ -0,0 +1,42 @@
+defmodule Core.Schema.CloudCluster do
+ use Piazza.Ecto.Schema
+
+ defenum Cloud, aws: 0
+
+ @saturation 1000
+
+ @region_map %{
+ aws: ~w(us-east-1)
+ }
+
+ schema "cloud_clusters" do
+ field :name, :string
+ field :external_id, :binary_id
+ field :cloud, Cloud
+ field :region, :string
+ field :count, :integer
+
+ timestamps()
+ end
+
+ def for_cloud(query \\ __MODULE__, cloud) do
+ from(c in query, where: c.cloud == ^cloud)
+ end
+
+ def unsaturated(query \\ __MODULE__) do
+ from(c in query, where: c.count < @saturation)
+ end
+
+ def for_region(query \\ __MODULE__, region) do
+ from(c in query, where: c.region == ^region)
+ end
+
+ def region_information(), do: @region_map
+
+ def changeset(model, attrs \\ %{}) do
+ model
+ |> cast(attrs, ~w(name external_id cloud region)a)
+ |> unique_constraint(:name)
+ |> validate_required(~w(name external_id cloud region)a)
+ end
+end
diff --git a/apps/core/lib/core/schema/cockroach_cluster.ex b/apps/core/lib/core/schema/cockroach_cluster.ex
new file mode 100644
index 000000000..0e92016a5
--- /dev/null
+++ b/apps/core/lib/core/schema/cockroach_cluster.ex
@@ -0,0 +1,34 @@
+defmodule Core.Schema.CockroachCluster do
+ use Piazza.Ecto.Schema
+ alias Piazza.Ecto.EncryptedString
+ alias Core.Schema.CloudCluster
+
+ @saturation 1000
+
+ schema "cockroach_clusters" do
+ field :name, :string
+ field :cloud, CloudCluster.Cloud
+ field :region, :string
+ field :url, EncryptedString
+ field :certificate, :string
+ field :endpoints, :map
+ field :count, :integer, default: 0
+
+ timestamps()
+ end
+
+ def for_cloud(query \\ __MODULE__ , cloud) do
+ from(c in query, where: c.cloud == ^cloud)
+ end
+
+ def unsaturated(query \\ __MODULE__) do
+ from(c in query, where: c.count < @saturation)
+ end
+
+ def changeset(model, attrs \\ %{}) do
+ model
+ |> cast(attrs, ~w(name cloud region url certificate endpoints)a)
+ |> unique_constraint(:name)
+ |> validate_required(~w(name cloud region url certificate endpoints)a)
+ end
+end
diff --git a/apps/core/lib/core/schema/console_instance.ex b/apps/core/lib/core/schema/console_instance.ex
new file mode 100644
index 000000000..5dbd47d63
--- /dev/null
+++ b/apps/core/lib/core/schema/console_instance.ex
@@ -0,0 +1,136 @@
+defmodule Core.Schema.ConsoleInstance do
+ use Piazza.Ecto.Schema
+ alias Piazza.Ecto.EncryptedString
+ alias Core.Schema.{CockroachCluster, CloudCluster, User}
+
+ defenum Size, small: 0, medium: 1, large: 2
+ defenum Status,
+ pending: 0,
+ database_created: 1,
+ deployment_created: 2,
+ provisioned: 3,
+ deployment_deleted: 4,
+ database_deleted: 5
+
+ @region_map %{
+ aws: ~w(us-east-1)
+ }
+
+ schema "console_instances" do
+ field :name, :string
+ field :status, Status
+ field :subdomain, :string
+ field :url, :string
+ field :external_id, :string
+ field :cloud, CloudCluster.Cloud
+ field :size, Size
+ field :region, :string
+
+ field :first_notif_at, :utc_datetime_usec
+ field :second_notif_at, :utc_datetime_usec
+ field :deleted_at, :utc_datetime_usec
+
+ embeds_one :instance_status, InstanceStatus, on_replace: :update do
+ field :db, :boolean, default: false
+ field :svc, :boolean, default: false
+ end
+
+ embeds_one :configuration, Configuration, on_replace: :update do
+ field :database, :string
+ field :dbuser, :string
+ field :dbpassword, EncryptedString
+ field :subdomain, :string
+ field :jwt_secret, EncryptedString
+ field :owner_name, :string
+ field :owner_email, :string
+ field :admin_password, EncryptedString
+ field :aes_key, EncryptedString
+ field :encryption_key, EncryptedString
+ field :client_id, :string
+ field :client_secret, EncryptedString
+ field :plural_token, EncryptedString
+ field :kas_api, EncryptedString
+ field :kas_private, EncryptedString
+ field :kas_redis, EncryptedString
+ end
+
+ belongs_to :cockroach, CockroachCluster
+ belongs_to :cluster, CloudCluster
+ belongs_to :owner, User
+
+ timestamps()
+ end
+
+ def for_account(query \\ __MODULE__, account_id) do
+ from(c in query,
+ join: u in assoc(c, :owner),
+ where: u.account_id == ^account_id
+ )
+ end
+
+ def unpaid(query \\ __MODULE__) do
+ from(c in query,
+ join: u in assoc(c, :owner),
+ join: a in assoc(u, :account),
+ left_join: s in assoc(a, :subscription),
+ where: not is_nil(a.delinquent_at) or is_nil(s.id)
+ )
+ end
+
+ def reapable(query \\ __MODULE__) do
+ week_ago = Timex.now() |> Timex.shift(weeks: -1)
+ default = Timex.shift(week_ago, weeks: -1)
+ from(c in query,
+ where: coalesce(coalesce(c.second_notif_at, c.first_notif_at), ^default) < ^week_ago
+ )
+ end
+
+ def ordered(query \\ __MODULE__, order \\ [asc: :name]) do
+ from(c in query, order_by: ^order)
+ end
+
+ def regions(), do: @region_map
+
+ @valid ~w(name cloud size region status subdomain url external_id cockroach_id cluster_id owner_id)a
+
+ def changeset(model, attrs \\ %{}) do
+ model
+ |> cast(attrs, @valid)
+ |> cast_embed(:configuration, with: &configuration_changeset/2)
+ |> cast_embed(:instance_status, with: &status_changeset/2)
+ |> validate_required(@valid -- [:external_id])
+ |> unique_constraint(:subdomain)
+ |> unique_constraint(:name)
+ |> validate_format(:name, ~r/[a-z][a-z0-9]{5,10}/, message: "must be an alphanumeric string between 5 and 10 characters")
+ |> validate_region()
+ end
+
+ defp validate_region(cs) do
+ cloud = get_field(cs, :cloud)
+ regions = @region_map[cloud]
+ validate_change(cs, :region, fn :region, reg ->
+ case reg in regions do
+ true -> []
+ _ -> [region: "Invalid region #{reg} for cloud #{cloud}"]
+ end
+ end)
+ end
+
+ @conf_valid ~w(
+ database dbuser dbpassword
+ subdomain jwt_secret owner_name owner_email admin_password aes_key
+ encryption_key client_id client_secret plural_token
+ kas_api kas_private kas_redis
+ )a
+
+ defp configuration_changeset(model, attrs) do
+ model
+ |> cast(attrs, @conf_valid)
+ |> validate_required(@conf_valid)
+ end
+
+ defp status_changeset(model, attrs) do
+ model
+ |> cast(attrs, ~w(db svc)a)
+ end
+end
diff --git a/apps/core/lib/core/services/cloud.ex b/apps/core/lib/core/services/cloud.ex
new file mode 100644
index 000000000..7a85a858d
--- /dev/null
+++ b/apps/core/lib/core/services/cloud.ex
@@ -0,0 +1,221 @@
+defmodule Core.Services.Cloud do
+ use Core.Services.Base
+ import Core.Policies.Cloud
+ alias Core.Repo
+ alias Core.PubSub
+ alias Core.Services.{Accounts, Users, Repositories, Shell}
+ alias Core.Schema.{CloudCluster, CockroachCluster, ConsoleInstance, User, OIDCProvider}
+
+ @type error :: {:error, term}
+ @type console_resp :: {:ok, ConsoleInstance.t} | error
+ @type cluster_resp :: {:ok, CloudCluster.t} | error
+ @type cockroach_resp :: {:ok, CockroachCluster.t} | error
+
+ def get_instance!(id), do: Repo.get!(ConsoleInstance, id)
+
+ @spec upsert_cluster(map, binary) :: cluster_resp
+ def upsert_cluster(attrs, name) do
+ case Repo.get_by(CloudCluster, name: name) do
+ %CloudCluster{} = cluster -> cluster
+ nil -> %CloudCluster{name: name}
+ end
+ |> CloudCluster.changeset(attrs)
+ |> Repo.insert_or_update()
+ end
+
+ @spec upsert_cockroach(map, binary) :: cockroach_resp
+ def upsert_cockroach(attrs, name) do
+ case Repo.get_by(CockroachCluster, name: name) do
+ %CockroachCluster{} = cluster -> cluster
+ nil -> %CockroachCluster{name: name}
+ end
+ |> CockroachCluster.changeset(attrs)
+ |> Repo.insert_or_update()
+ end
+
+ @doc """
+ Creates a new Cloud instance of the Plural console
+ """
+ @spec create_instance(map, User.t) :: console_resp
+ def create_instance(%{name: name} = attrs, %User{} = user) do
+ start_transaction()
+ |> add_operation(:auth, fn _ -> allow(%ConsoleInstance{}, user, :create) end)
+ |> add_operation(:cluster, fn _ -> select_cluster(attrs[:cloud], attrs[:region]) end)
+ |> add_operation(:cockroach, fn _ -> select_roach(attrs[:cloud]) end)
+ |> add_operation(:sa, fn _ ->
+ Accounts.create_service_account(%{name: "#{name}-cloud-sa", email: "#{name}-cloud-sa@srv.plural.sh"}, user)
+ end)
+ |> add_operation(:token, fn %{sa: sa} -> Users.create_persisted_token(sa) end)
+ |> add_operation(:install, fn %{sa: sa} ->
+ repo = Repositories.get_repository_by_name!("console")
+ case Repositories.get_installation(sa.id, repo.id) do
+ nil -> Repositories.create_installation(%{}, repo, sa)
+ inst -> {:ok, inst}
+ end
+ end)
+ |> add_operation(:oidc, fn %{install: inst, sa: sa} ->
+ inst = Core.Repo.preload(inst, [oidc_provider: :bindings])
+ Repositories.upsert_oidc_provider(%{
+ auth_method: :post,
+ bindings: Shell.oidc_bindings(inst.oidc_provider, user),
+ redirect_uris: Shell.merge_uris(["https://console.#{name}.cloud.plural.sh/oauth/callback"], inst.oidc_provider)
+ }, inst.id, sa)
+ end)
+ |> add_operation(:instance, fn %{oidc: oidc, token: token, cluster: cluster, cockroach: roach, sa: sa} ->
+ %ConsoleInstance{status: :pending, cluster_id: cluster.id, cockroach_id: roach.id, owner_id: sa.id}
+ |> ConsoleInstance.changeset(add_configuration(attrs, name, token.token, oidc, user))
+ |> Repo.insert()
+ end)
+ |> execute(extract: :instance)
+ |> notify(:create, user)
+ end
+
+ @doc """
+ Updates base attributes of a console instance
+ """
+ @spec update_instance(map, binary, User.t) :: console_resp
+ def update_instance(attrs, id, %User{} = user) do
+ start_transaction()
+ |> add_operation(:inst, fn _ -> authorize(id, user) end)
+ |> add_operation(:updated, fn %{inst: inst} ->
+ ConsoleInstance.changeset(inst, attrs)
+ |> Repo.update()
+ end)
+ |> execute(extract: :updated)
+ |> notify(:update, user)
+ end
+
+ @doc """
+ Schedules a console instance to be cleaned up
+ """
+ @spec delete_instance(binary, User.t) :: console_resp
+ def delete_instance(id, %User{} = user) do
+ start_transaction()
+ |> add_operation(:inst, fn _ -> authorize(id, user) end)
+ |> add_operation(:deleted, fn %{inst: inst} ->
+ Ecto.Changeset.change(inst, %{deleted_at: Timex.now()})
+ |> Repo.update()
+ end)
+ |> execute(extract: :deleted)
+ |> notify(:delete, user)
+ end
+
+ @doc """
+ Proceeds to attempt to reap a cloud cluster, we'll give two notifications, then
+ """
+ @spec reap(ConsoleInstance.t) :: console_resp
+ def reap(%ConsoleInstance{first_notif_at: nil} = inst),
+ do: notify_reaping(inst, :first_notif_at)
+ def reap(%ConsoleInstance{second_notif_at: nil} = inst),
+ do: notify_reaping(inst, :second_notif_at)
+ def reap(%ConsoleInstance{} = inst) do
+ %{owner: owner} = Repo.preload(inst, [:owner])
+ delete_instance(inst.id, owner)
+ end
+
+ defp notify_reaping(instance, field) do
+ Ecto.Changeset.change(instance, %{field => Timex.now()})
+ |> Repo.update()
+ |> notify(:reap)
+ end
+
+ def authorize(id, %User{} = user) do
+ inst = get_instance!(id) |> Repo.preload([:owner])
+ with {:ok, _} <- Core.Policies.Account.allow(inst.owner, user, :impersonate),
+ do: {:ok, inst}
+ end
+
+ def visible(id, %User{account_id: aid}) do
+ get_instance!(id)
+ |> Repo.preload([:owner])
+ |> case do
+ %ConsoleInstance{owner: %User{account_id: ^aid}} = instance -> {:ok, instance}
+ _ -> {:error, :forbidden}
+ end
+ end
+
+ defp add_configuration(attrs, name, token, %OIDCProvider{} = oidc, %User{} = user) do
+ Map.merge(attrs, %{subdomain: "#{name}.cloud.plural.sh", url: "console.#{name}.cloud.plural.sh"})
+ |> Map.put(:configuration, %{
+ aes_key: aes_key(),
+ encryption_key: encryption_key(),
+ database: "#{name}_cloud",
+ dbuser: "#{name}_user",
+ dbpassword: Core.random_alphanum(30),
+ subdomain: "#{name}.cloud.plural.sh",
+ jwt_secret: Core.random_alphanum(30),
+ owner_name: user.name,
+ owner_email: user.email,
+ admin_password: Core.random_alphanum(30),
+ client_id: oidc.client_id,
+ client_secret: oidc.client_secret,
+ plural_token: token,
+ kas_api: Core.random_alphanum(30),
+ kas_private: Core.random_alphanum(30),
+ kas_redis: Core.random_alphanum(30)
+ })
+ end
+
+ defp select_cluster(cloud, region) do
+ CloudCluster.for_cloud(cloud)
+ |> CloudCluster.for_region(region)
+ |> CloudCluster.unsaturated()
+ |> Repo.all()
+ |> random_choice("Could not find cluster for #{cloud} and #{region}")
+ end
+
+ defp select_roach(cloud) do
+ CockroachCluster.for_cloud(cloud)
+ |> CockroachCluster.unsaturated()
+ |> Repo.all()
+ |> random_choice("Could not place in #{cloud}")
+ end
+
+ defp random_choice([], message), do: {:error, message}
+ defp random_choice(l, _) do
+ Enum.random(l)
+ |> inc()
+ end
+
+ def inc(%schema{id: id}) do
+ schema.selected()
+ |> schema.for_id(id)
+ |> Core.Repo.update_all(inc: [count: 1])
+ |> case do
+ {1, [res]} -> {:ok, res}
+ _ -> {:error, "could not increment #{schema} [id=#{id}]"}
+ end
+ end
+
+ def dec(%schema{id: id}) do
+ schema.selected()
+ |> schema.for_id(id)
+ |> Core.Repo.update_all(inc: [count: -1])
+ |> case do
+ {1, [res]} -> {:ok, res}
+ _ -> {:error, "could not increment #{schema} [id=#{id}]"}
+ end
+ end
+
+ defp aes_key() do
+ :crypto.strong_rand_bytes(32)
+ |> Base.url_encode64()
+ end
+
+ defp encryption_key() do
+ :crypto.strong_rand_bytes(32)
+ |> Base.encode64()
+ end
+
+ defp notify({:ok, %ConsoleInstance{} = inst}, :create, user),
+ do: handle_notify(PubSub.ConsoleInstanceCreated, inst, actor: user)
+ defp notify({:ok, %ConsoleInstance{} = inst}, :update, user),
+ do: handle_notify(PubSub.ConsoleInstanceUpdated, inst, actor: user)
+ defp notify({:ok, %ConsoleInstance{} = inst}, :delete, user),
+ do: handle_notify(PubSub.ConsoleInstanceDeleted, inst, actor: user)
+ defp notify(pass, _, _), do: pass
+
+ defp notify({:ok, %ConsoleInstance{} = inst}, :reap),
+ do: handle_notify(PubSub.ConsoleInstanceReaped, inst)
+ defp notify(pass, _), do: pass
+end
diff --git a/apps/core/lib/core/services/cloud/configuration.ex b/apps/core/lib/core/services/cloud/configuration.ex
new file mode 100644
index 000000000..74140670c
--- /dev/null
+++ b/apps/core/lib/core/services/cloud/configuration.ex
@@ -0,0 +1,38 @@
+defmodule Core.Services.Cloud.Configuration do
+ alias Core.Schema.{ConsoleInstance, CockroachCluster}
+
+ def build(%ConsoleInstance{configuration: conf, size: size} = inst) do
+ Map.take(conf, ~w(
+ subdomain
+ jwt_secret
+ owner_name
+ owner_email
+ admin_password
+ aes_key
+ encryption_key
+ client_id
+ client_secret
+ plural_token
+ kas_api
+ kas_private
+ kas_redis
+ )a)
+ |> Map.merge(%{
+ postgres_url: build_pg_url(inst),
+ size: "#{size}",
+ postgres_certificate: certificate(inst)
+ })
+ |> Map.put(:size, "#{size}")
+ |> Enum.map(fn {k, v} -> %{name: Macro.camelize("#{k}"), value: v} end)
+ end
+
+ defp certificate(%ConsoleInstance{cockroach: %CockroachCluster{certificate: cert}}), do: cert
+
+ defp build_pg_url(%ConsoleInstance{
+ configuration: %{dbuser: u, dbpassword: p, database: database},
+ region: region,
+ cockroach: %CockroachCluster{endpoints: endpoints}
+ }) do
+ "postgresql://#{u}:#{p}@#{endpoints[region]}/#{database}"
+ end
+end
diff --git a/apps/core/lib/core/services/cloud/poller.ex b/apps/core/lib/core/services/cloud/poller.ex
new file mode 100644
index 000000000..480765267
--- /dev/null
+++ b/apps/core/lib/core/services/cloud/poller.ex
@@ -0,0 +1,83 @@
+defmodule Core.Services.Cloud.Poller do
+ use GenServer
+ alias Core.Clients.Console
+ alias Core.Services.Cloud
+ alias Kazan.Apis.Core.V1, as: CoreV1
+
+ @poll :timer.minutes(5)
+
+ defmodule State, do: defstruct [:client, :repo]
+
+ def start_link(_) do
+ GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
+ end
+
+ def init(_) do
+ :timer.send_interval(@poll, :clusters)
+ :timer.send_interval(@poll, :roaches)
+ send self(), :repo
+ {:ok, %State{client: Console.new(Core.conf(:console_url), Core.conf(:console_token))}}
+ end
+
+ def repository(), do: GenServer.call(__MODULE__, :repo)
+
+ def handle_call(:repo, %{repo: id} = state) when is_binary(id),
+ do: {:reply, {:ok, id}, state}
+ def handle_call(:repo, state), do: {:reply, {:error, "repo not pulled"}, state}
+
+ def handle_info(:repo, %{client: client} = state) do
+ case Console.repo(client, Core.conf(:mgmt_repo)) do
+ {:ok, id} -> {:noreply, %{state | repo: id}}
+ _ -> {:noreply, state}
+ end
+ end
+
+ def handle_info(:clusters, %{client: client} = state) do
+ with {:ok, clusters} <- Console.clusters(client) do
+ Enum.each(clusters, &upsert_cluster/1)
+ end
+
+ {:noreply, state}
+ end
+
+ def handle_info(:roaches, state) do
+ with {:ok, roaches} <- read_secret() do
+ Enum.each(roaches, &upsert_roach/1)
+ end
+ {:noreply, state}
+ end
+
+ def handle_info(_, state), do: {:noreply, state}
+
+ defp upsert_cluster(%{"id" => id, "name" => name, "distro" => distro, "metadata" => meta}) do
+ Cloud.upsert_cluster(%{
+ external_id: id,
+ cloud: to_cloud(distro),
+ region: meta["region"]
+ }, name)
+ end
+
+ defp upsert_roach(%{"name" => name} = roach) do
+ Cloud.upsert_cockroach(%{
+ cloud: roach["cloud"],
+ url: roach["url"],
+ certificate: roach["certificate"],
+ endpoints: roach["endpoints"]
+ }, name)
+ end
+
+ defp read_secret() do
+ CoreV1.read_namespaced_secret!("plural", "plrl-cloud-config")
+ |> Kazan.run()
+ |> case do
+ {:ok, %CoreV1.Secret{data: %{"cockroaches" => roaches}}} ->
+ Jason.decode(roaches)
+ _ -> {:error, "could not find secret"}
+ end
+ end
+
+ defp to_cloud("EKS"), do: :aws
+ defp to_cloud("GKE"), do: :gcp
+ defp to_cloud("AKS"), do: :azure
+ defp to_cloud(_), do: :aws
+end
diff --git a/apps/core/lib/core/services/cloud/workflow.ex b/apps/core/lib/core/services/cloud/workflow.ex
new file mode 100644
index 000000000..3c626ead3
--- /dev/null
+++ b/apps/core/lib/core/services/cloud/workflow.ex
@@ -0,0 +1,159 @@
+defmodule Core.Services.Cloud.Workflow do
+ use Core.Services.Base
+ alias Core.Clients.Console
+ alias Core.Services.Cloud
+ alias Core.Services.Cloud.{Poller, Configuration}
+ alias Core.Schema.{ConsoleInstance, CockroachCluster}
+ alias Core.Repo
+
+ require Logger
+
+ def sync(%ConsoleInstance{external_id: id} = instance) when is_binary(id) do
+ instance = Repo.preload(instance, [:cluster, :cockroach])
+ Console.update_service(console(), %{
+ size: instance.size,
+ configuration: Configuration.build(instance)
+ }, id)
+ end
+ def sync(_), do: :ok
+
+ def provision(%ConsoleInstance{} = instance) do
+ instance = Repo.preload(instance, [:cockroach, :cluster])
+
+ Enum.reduce_while(0..10, instance, fn _, acc ->
+ case up(acc) do
+ {:ok, %ConsoleInstance{status: :deployment_created} = inst} -> {:halt, inst}
+ {:ok, inst} -> {:cont, inst}
+ err ->
+ :timer.sleep(:timer.seconds(1))
+ Logger.error "failed to transition provisioning console: #{inspect(err)}"
+ {:cont, acc}
+ end
+ end)
+ |> finalize(:up)
+ end
+
+ def deprovision(%ConsoleInstance{} = instance) do
+ instance = Repo.preload(instance, [:cockroach, :cluster])
+
+ Enum.reduce_while(0..10, instance, fn _, acc ->
+ case down(acc) do
+ {:ok, %ConsoleInstance{status: :database_deleted} = inst} -> {:halt, inst}
+ {:ok, inst} -> {:cont, inst}
+ err ->
+ :timer.sleep(:timer.seconds(1))
+ Logger.error "failed to transition deprovisioning console: #{inspect(err)}"
+ {:cont, acc}
+ end
+ end)
+ |> finalize(:down)
+ end
+
+ defp up(%ConsoleInstance{status: :pending, cockroach: roach, configuration: conf} = inst) do
+ with {:ok, pid} <- connect(roach),
+ {:ok, _} <- Postgrex.transaction(pid, fn conn ->
+ Postgrex.query!(conn, "CREATE DATABASE #{conf.database}", [])
+ Postgrex.query!(conn, "CREATE USER #{conf.dbuser} WITH PASSWORD $1", [conf.dbpassword])
+ Postgrex.query!(conn, "GRANT ALL ON DATABASE #{conf.database} TO #{conf.dbuser}", [])
+ end) do
+ ConsoleInstance.changeset(inst, %{
+ instance_status: %{db: true},
+ status: :database_created,
+ })
+ |> Repo.update()
+ end
+ end
+
+ defp up(%ConsoleInstance{instance_status: %{db: true}, name: name, cluster: cluster} = inst) do
+ with {:ok, id} <- Poller.repository(),
+ {:ok, svc_id} <- Console.create_service(console(), cluster.external_id, %{
+ name: "console-cloud-#{name}",
+ namespace: "plrl-cloud-#{name}",
+ helm: %{
+ url: "https://pluralsh.github.io/console",
+ chart: "console",
+ version: "x.x.x",
+ valuesFiles: ["console.yaml.liquid"]
+ },
+ repository_id: id,
+ git: %{ref: "main", folder: "helm"},
+ configuration: Configuration.build(inst),
+ }) do
+ ConsoleInstance.changeset(inst, %{
+ external_id: svc_id,
+ instance_status: %{svc: true},
+ status: :deployment_created
+ })
+ |> Repo.update()
+ end
+ end
+
+ defp down(%ConsoleInstance{instance_status: %{svc: false, db: true}, configuration: conf, cockroach: roach} = inst) do
+ with {:ok, pid} <- connect(roach),
+ {:ok, _} <- Postgrex.transaction(pid, fn conn ->
+ Postgrex.query!(conn, "DROP DATABASE #{conf.database}", [])
+ Postgrex.query!(conn, "DROP USER #{conf.dbuser}", [])
+ end) do
+ ConsoleInstance.changeset(inst, %{
+ instance_status: %{db: false},
+ status: :database_deleted,
+ })
+ |> Repo.update()
+ end
+ end
+
+ defp down(%ConsoleInstance{instance_status: %{svc: true}} = inst) do
+ with {:ok, _} <- Console.delete_service(console(), inst.external_id) do
+ ConsoleInstance.changeset(inst, %{
+ instance_status: %{svc: false},
+ status: :deployment_deleted,
+ })
+ |> Repo.update()
+ end
+ end
+
+ defp finalize(%ConsoleInstance{status: :deployment_created} = inst, :up) do
+ ConsoleInstance.changeset(inst, %{status: :provisioned})
+ |> Repo.update()
+ end
+
+ defp finalize(%ConsoleInstance{status: :database_deleted, cluster: cluster, cockroach: roach} = inst, :down) do
+ start_transaction()
+ |> add_operation(:inst, fn _ -> Repo.delete(inst) end)
+ |> add_operation(:cluster, fn _ -> Cloud.dec(cluster) end)
+ |> add_operation(:roach, fn _ -> Cloud.dec(roach) end)
+ |> execute(extract: :inst)
+ end
+
+ defp finalize(inst, _) do
+ Logger.warn "failed to finalize console instance: #{inst.id}"
+ {:ok, inst}
+ end
+
+ defp connect(%CockroachCluster{certificate: cert_pem} = roach) do
+ with [cert | _] <- :public_key.pem_decode(cert_pem) do
+ uri = URI.parse(roach.url)
+ user = userinfo(uri)
+ Postgrex.start_link(
+ database: uri.path && String.trim_leading(uri.path, "/"),
+ username: user[:username],
+ password: user[:password],
+ hostname: uri.host,
+ port: uri.port,
+ ssl: true,
+ ssl_opts: [cacerts: [:public_key.pem_entry_decode(cert)]]
+ )
+ end
+ end
+
+ defp userinfo(%URI{userinfo: info}) when is_binary(info) do
+ case String.split(info, ":") do
+ [user, pwd] -> %{username: user, password: pwd}
+ [user] -> %{username: user}
+ _ -> %{}
+ end
+ end
+ defp userinfo(_), do: %{}
+
+ defp console(), do: Console.new(Core.conf(:console_url), Core.conf(:console_token))
+end
diff --git a/apps/core/lib/core/services/clusters.ex b/apps/core/lib/core/services/clusters.ex
index 4b5d64ed6..5aaae9c29 100644
--- a/apps/core/lib/core/services/clusters.ex
+++ b/apps/core/lib/core/services/clusters.ex
@@ -21,6 +21,9 @@ defmodule Core.Services.Clusters do
@spec get_cluster_by_owner(binary) :: Cluster.t | nil
def get_cluster_by_owner(user_id), do: Core.Repo.get_by(Cluster, owner_id: user_id)
+ @spec get_cluster_by_url(binary) :: Cluster.t | nil
+ def get_cluster_by_url(url), do: Core.Repo.get_by(Cluster, console_url: url)
+
@spec has_cluster?(User.t) :: boolean
def has_cluster?(%User{id: user_id}) do
Cluster.for_user(user_id)
diff --git a/apps/core/lib/core/services/payments.ex b/apps/core/lib/core/services/payments.ex
index ba27d0d07..01e60e257 100644
--- a/apps/core/lib/core/services/payments.ex
+++ b/apps/core/lib/core/services/payments.ex
@@ -271,7 +271,7 @@ defmodule Core.Services.Payments do
case {enforce?(), delinquent?(account), grandfathered?(account), account} do
{false, _, _, _} -> true
{_, true, _, _} -> false
- {_, _, true, _} -> true
+ {_, _, true, _} when feature != :cd -> true
{_, _, _, %Account{subscription: %PlatformSubscription{plan: %PlatformPlan{enterprise: true}}}} -> true
{_, _, _, %Account{subscription: %PlatformSubscription{plan: %PlatformPlan{features: %{^feature => true}}}}} -> true
_ -> false
diff --git a/apps/core/lib/core/services/shell.ex b/apps/core/lib/core/services/shell.ex
index dac8fe792..1e1016796 100644
--- a/apps/core/lib/core/services/shell.ex
+++ b/apps/core/lib/core/services/shell.ex
@@ -220,8 +220,8 @@ defmodule Core.Services.Shell do
defp maybe_enable_oidc(_, _, _, _), do: {:ok, true}
- defp oidc_bindings(nil, %User{id: uid}), do: [%{user_id: uid}]
- defp oidc_bindings(%OIDCProvider{bindings: bindings}, %User{id: uid}) do
+ def oidc_bindings(nil, %User{id: uid}), do: [%{user_id: uid}]
+ def oidc_bindings(%OIDCProvider{bindings: bindings}, %User{id: uid}) do
bindings = Enum.map(bindings, fn
%{user_id: uid, id: id} when is_binary(uid) -> %{user_id: uid, id: id}
%{group_id: gid, id: id} when is_binary(gid) -> %{group_id: gid, id: id}
@@ -244,8 +244,8 @@ defmodule Core.Services.Shell do
|> String.replace("{subdomain}", domain)
end
- defp merge_uris(uris, nil), do: uris
- defp merge_uris(new, %OIDCProvider{redirect_uris: old}), do: Enum.uniq(new ++ old)
+ def merge_uris(uris, nil), do: uris
+ def merge_uris(new, %OIDCProvider{redirect_uris: old}), do: Enum.uniq(new ++ old)
@doc """
updates a user's shell workspace context (eg for configuring bundles)
diff --git a/apps/core/mix.exs b/apps/core/mix.exs
index 2a6f7098e..6fe107e44 100644
--- a/apps/core/mix.exs
+++ b/apps/core/mix.exs
@@ -91,6 +91,11 @@ defmodule Core.MixProject do
{:dictionary, "~> 0.1.0"},
{:mojito, "~> 0.7.0"},
{:nebulex, "== 2.4.2"},
+ {:castore, "~> 0.1.7"},
+ {:req, "~> 0.4.14", override: true},
+ {:mint, "~> 1.4.0", override: true},
+ {:finch, "~> 0.17.0", override: true},
+ {:absinthe_client, "~> 0.1.0"},
{:kazan, "~> 0.11", github: "michaeljguarino/kazan", branch: "k8s-1.23"},
{:workos, "~> 0.1.2"},
{:decorator, "~> 1.3"}, #=> For using Caching Annotations
@@ -119,7 +124,6 @@ defmodule Core.MixProject do
{:swarm, "~> 3.4.0"},
{:poison, "~> 3.0"},
{:cloudflare, "~> 0.2"},
- {:mint, "~> 1.0", override: true},
{:mimic, "~> 1.1", only: :test},
{:google_api_iam, "~> 0.40"},
{:google_api_cloud_resource_manager, "~> 0.41"},
diff --git a/apps/core/priv/repo/migrations/20240806022151_add_cloud_schemas.exs b/apps/core/priv/repo/migrations/20240806022151_add_cloud_schemas.exs
new file mode 100644
index 000000000..bcf8671bd
--- /dev/null
+++ b/apps/core/priv/repo/migrations/20240806022151_add_cloud_schemas.exs
@@ -0,0 +1,62 @@
+defmodule Core.Repo.Migrations.AddCloudSchemas do
+ use Ecto.Migration
+
+ def change do
+ create table(:cloud_clusters, primary_key: false) do
+ add :id, :uuid, primary_key: true
+ add :name, :string
+ add :external_id, :uuid
+ add :cloud, :integer
+ add :region, :string
+ add :count, :integer, default: 0
+
+ timestamps()
+ end
+
+ create table(:cockroach_clusters, primary_key: false) do
+ add :id, :uuid, primary_key: true
+ add :name, :string
+ add :cloud, :integer
+ add :region, :string
+ add :url, :string
+ add :certificate, :binary
+ add :endpoints, :map
+ add :count, :integer, default: 0
+
+ timestamps()
+ end
+
+ create table(:console_instances, primary_key: false) do
+ add :id, :uuid, primary_key: true
+ add :name, :string
+ add :cloud, :integer
+ add :size, :integer
+ add :region, :string
+ add :status, :integer
+ add :subdomain, :string
+ add :url, :string
+ add :external_id, :string
+ add :configuration, :map
+ add :deleted_at, :utc_datetime_usec
+
+ add :first_notif_at, :utc_datetime_usec
+ add :second_notif_at, :utc_datetime_usec
+
+ add :instance_status, :map
+
+ add :cockroach_id, references(:cockroach_clusters, type: :uuid)
+ add :cluster_id, references(:cloud_clusters, type: :uuid)
+ add :owner_id, references(:users, type: :uuid)
+
+ timestamps()
+ end
+
+ create unique_index(:console_instances, [:name])
+ create unique_index(:console_instances, [:subdomain])
+ create unique_index(:console_instances, [:url])
+
+ create unique_index(:cockroach_clusters, [:name])
+
+ create unique_index(:cloud_clusters, [:name])
+ end
+end
diff --git a/apps/core/test/pubsub/fanout/cloud_test.exs b/apps/core/test/pubsub/fanout/cloud_test.exs
new file mode 100644
index 000000000..169e08ca2
--- /dev/null
+++ b/apps/core/test/pubsub/fanout/cloud_test.exs
@@ -0,0 +1,32 @@
+defmodule Core.PubSub.Fanout.CloudTest do
+ use Core.SchemaCase, async: false
+ alias Core.PubSub
+ use Mimic
+
+ describe "ConsoleInstanceCreated" do
+ test "it will enqueue" do
+ expect(Core.Conduit.Broker, :publish, fn msg, :cloud -> {:ok, msg} end)
+
+ event = %PubSub.ConsoleInstanceCreated{item: insert(:console_instance)}
+ {:ok, %Conduit.Message{body: ^event}} = PubSub.Fanout.fanout(event)
+ end
+ end
+
+ describe "ConsoleInstanceUpdated" do
+ test "it will enqueue" do
+ expect(Core.Conduit.Broker, :publish, fn msg, :cloud -> {:ok, msg} end)
+
+ event = %PubSub.ConsoleInstanceUpdated{item: insert(:console_instance)}
+ {:ok, %Conduit.Message{body: ^event}} = PubSub.Fanout.fanout(event)
+ end
+ end
+
+ describe "ConsoleInstanceDeleted" do
+ test "it will enqueue" do
+ expect(Core.Conduit.Broker, :publish, fn msg, :cloud -> {:ok, msg} end)
+
+ event = %PubSub.ConsoleInstanceDeleted{item: insert(:console_instance)}
+ {:ok, %Conduit.Message{body: ^event}} = PubSub.Fanout.fanout(event)
+ end
+ end
+end
diff --git a/apps/core/test/services/cloud/workflow_test.exs b/apps/core/test/services/cloud/workflow_test.exs
new file mode 100644
index 000000000..105717a88
--- /dev/null
+++ b/apps/core/test/services/cloud/workflow_test.exs
@@ -0,0 +1,47 @@
+defmodule Core.Services.Cloud.WorkflowTest do
+ use Core.SchemaCase, async: true
+ use Mimic
+ alias Core.Services.{Cloud, Cloud.Workflow}
+
+ describe "up and down" do
+ test "it will consistently provision a cloud console's infrastructure" do
+ account = insert(:account)
+ enable_features(account, [:cd])
+ user = admin_user(account)
+ %{external_id: cluster_id} = cluster = insert(:cloud_cluster)
+ roach = insert(:cockroach_cluster)
+ insert(:repository, name: "console")
+
+ expect(HTTPoison, :post, fn _, _, _ ->
+ {:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
+ end)
+
+ {:ok, instance} = Cloud.create_instance(%{
+ name: "plrltest",
+ cloud: :aws,
+ region: "us-east-1",
+ size: :small
+ }, user)
+
+ expect(Core.Services.Cloud.Poller, :repository, fn -> {:ok, "some-id"} end)
+ expect(Core.Clients.Console, :create_service, fn _, ^cluster_id, _ -> {:ok, Ecto.UUID.generate()} end)
+
+ {:ok, %{external_id: svc_id} = instance} = Workflow.provision(instance)
+
+ assert instance.status == :provisioned
+ assert instance.instance_status.db
+ assert instance.instance_status.svc
+
+ expect(Core.Clients.Console, :delete_service, fn _, ^svc_id -> {:ok, svc_id} end)
+
+ {:ok, instance} = Workflow.deprovision(instance)
+
+ refute instance.instance_status.db
+ refute instance.instance_status.svc
+ refute refetch(instance)
+
+ assert refetch(roach).count == 0
+ assert refetch(cluster).count == 0
+ end
+ end
+end
diff --git a/apps/core/test/services/cloud_test.exs b/apps/core/test/services/cloud_test.exs
new file mode 100644
index 000000000..e64f7aaab
--- /dev/null
+++ b/apps/core/test/services/cloud_test.exs
@@ -0,0 +1,142 @@
+defmodule Core.Services.CloudTest do
+ use Core.SchemaCase, async: true
+ use Mimic
+ alias Core.Services.Cloud
+ alias Core.PubSub
+
+ describe "#create_instance/2" do
+ test "creates a new cloud console instance" do
+ account = insert(:account)
+ enable_features(account, [:cd])
+ user = admin_user(account)
+ cluster = insert(:cloud_cluster)
+ cockroach = insert(:cockroach_cluster)
+ insert(:repository, name: "console")
+
+ expect(HTTPoison, :post, fn _, _, _ ->
+ {:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
+ end)
+
+ {:ok, instance} = Cloud.create_instance(%{
+ name: "plrltest",
+ cloud: :aws,
+ region: "us-east-1",
+ size: :small
+ }, user)
+
+ assert instance.name == "plrltest"
+ assert instance.cloud == :aws
+ assert instance.region == "us-east-1"
+ assert instance.size == :small
+
+ assert refetch(cluster).count == 1
+ assert refetch(cockroach).count == 1
+
+ assert_receive {:event, %PubSub.ConsoleInstanceCreated{item: ^instance}}
+ end
+
+ test "unpaid users cannot create instances" do
+ account = insert(:account)
+ user = admin_user(account)
+ insert(:cloud_cluster)
+ insert(:cockroach_cluster)
+ insert(:repository, name: "console")
+
+ {:error, "you must be on a paid plan to use Plural Cloud"} = Cloud.create_instance(%{
+ name: "plrltest",
+ cloud: :aws,
+ region: "us-east-1",
+ size: :small
+ }, user)
+ end
+ end
+
+ describe "#update_instance/3" do
+ test "managers can update the configuration of a console instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: sa),
+ user: user
+ )
+ instance = insert(:console_instance, owner: sa)
+
+ {:ok, updated} = Cloud.update_instance(%{size: :large}, instance.id, user)
+
+ assert updated.id == instance.id
+ assert updated.size == :large
+
+ assert_receive {:event, %PubSub.ConsoleInstanceUpdated{item: ^updated}}
+ end
+
+ test "non-managers can update the configuration of a console instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ instance = insert(:console_instance, owner: sa)
+
+ {:error, _} = Cloud.update_instance(%{size: :large}, instance.id, user)
+ end
+ end
+
+ describe "#delete_instance/2" do
+ test "managers can update the configuration of a console instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: sa),
+ user: user
+ )
+ instance = insert(:console_instance, owner: sa)
+
+ {:ok, deleted} = Cloud.delete_instance(instance.id, user)
+
+ assert deleted.id == instance.id
+ assert deleted.deleted_at
+
+ assert_receive {:event, %PubSub.ConsoleInstanceDeleted{item: ^deleted}}
+ end
+
+ test "non-managers can update the configuration of a console instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ instance = insert(:console_instance, owner: sa)
+
+ {:error, _} = Cloud.delete_instance(instance.id, user)
+ end
+ end
+
+ describe "#reap/1" do
+ test "it will send a first warning" do
+ inst = insert(:console_instance)
+
+ {:ok, reaped} = Cloud.reap(inst)
+
+ assert reaped.first_notif_at
+
+ assert_receive {:event, %PubSub.ConsoleInstanceReaped{item: ^reaped}}
+ end
+
+ test "it will send a second warning" do
+ inst = insert(:console_instance, first_notif_at: Timex.now())
+
+ {:ok, reaped} = Cloud.reap(inst)
+
+ assert reaped.second_notif_at
+
+ assert_receive {:event, %PubSub.ConsoleInstanceReaped{item: ^reaped}}
+ end
+
+ test "it will finally delete" do
+ inst = insert(:console_instance,
+ first_notif_at: Timex.now(),
+ second_notif_at: Timex.now()
+ )
+
+ {:ok, reaped} = Cloud.reap(inst)
+
+ assert reaped.deleted_at
+
+ assert_receive {:event, %PubSub.ConsoleInstanceDeleted{item: ^reaped}}
+ end
+ end
+end
diff --git a/apps/core/test/support/factory.ex b/apps/core/test/support/factory.ex
index 07436022c..41435fe31 100644
--- a/apps/core/test/support/factory.ex
+++ b/apps/core/test/support/factory.ex
@@ -641,6 +641,42 @@ defmodule Core.Factory do
}
end
+ def console_instance_factory do
+ name = sequence(:console_instance, & "instance#{&1}")
+ %Schema.ConsoleInstance{
+ name: name,
+ cloud: :aws,
+ external_id: Ecto.UUID.generate(),
+ status: :provisioned,
+ size: :small,
+ cluster: build(:cloud_cluster),
+ cockroach: build(:cockroach_cluster),
+ owner: build(:user, service_account: true),
+ subdomain: "#{name}.cloud.plural.sh",
+ url: "console.#{name}.cloud.plural.sh",
+ instance_status: %{db: true, svc: true},
+ region: "us-east-1"
+ }
+ end
+
+ def cloud_cluster_factory do
+ %Schema.CloudCluster{
+ name: sequence(:cloud_cluster, & "cluster-#{&1}"),
+ cloud: :aws,
+ region: "us-east-1",
+ external_id: Ecto.UUID.generate()
+ }
+ end
+
+ def cockroach_cluster_factory do
+ %Schema.CockroachCluster{
+ name: sequence(:cockroach, & "cockroach-#{&1}"),
+ cloud: :aws,
+ url: "postgresql://plrl:plural@localhost:26257/plural",
+ certificate: File.read!("../../test-certs/client.root.crt")
+ }
+ end
+
def with_password(%Schema.User{} = user, password) do
Schema.User.changeset(user, %{password: password})
|> Ecto.Changeset.apply_changes()
diff --git a/apps/core/test/test_helper.exs b/apps/core/test/test_helper.exs
index 433893338..fa1d67468 100644
--- a/apps/core/test/test_helper.exs
+++ b/apps/core/test/test_helper.exs
@@ -33,5 +33,7 @@ Mimic.copy(Core.Services.Shell.Pods)
Mimic.copy(Vault)
Mimic.copy(System)
Mimic.copy(Core.Clients.Vault)
+Mimic.copy(Core.Clients.Console)
+Mimic.copy(Core.Services.Cloud.Poller)
{:ok, _} = Application.ensure_all_started(:ex_machina)
diff --git a/apps/cron/lib/cron/prune/cloud.ex b/apps/cron/lib/cron/prune/cloud.ex
new file mode 100644
index 000000000..699e81a06
--- /dev/null
+++ b/apps/cron/lib/cron/prune/cloud.ex
@@ -0,0 +1,17 @@
+defmodule Cron.Prune.Cloud do
+ @moduledoc """
+ Reaps unpaid cloud consoles
+ """
+ use Cron
+ alias Core.Schema.{ConsoleInstance}
+ alias Core.Services.Cloud
+
+ def run() do
+ ConsoleInstance.unpaid()
+ |> ConsoleInstance.reapable()
+ |> ConsoleInstance.ordered(asc: :id)
+ |> Core.Repo.stream(method: :keyset)
+ |> Core.throttle()
+ |> Enum.each(&Cloud.reap/1)
+ end
+end
diff --git a/apps/cron/test/cron/prune/cloud_test.exs b/apps/cron/test/cron/prune/cloud_test.exs
new file mode 100644
index 000000000..db6182994
--- /dev/null
+++ b/apps/cron/test/cron/prune/cloud_test.exs
@@ -0,0 +1,27 @@
+defmodule Cron.Prune.CloudTest do
+ use Core.SchemaCase
+ alias Cron.Prune.Cloud
+
+ describe "#run/0" do
+ test "it will prune old, unused invites" do
+ old = Timex.now() |> Timex.shift(weeks: -1) |> Timex.shift(minutes: -30)
+ account = insert(:account)
+ user = insert(:user, account: account)
+ insert(:platform_subscription, account: account)
+
+ first = insert(:console_instance)
+ ignore = insert(:console_instance, owner: user)
+ ignore2 = insert(:console_instance, first_notif_at: Timex.now())
+ second = insert(:console_instance, first_notif_at: old)
+ third = insert(:console_instance, first_notif_at: old, second_notif_at: old)
+
+ Cloud.run()
+
+ assert refetch(first).first_notif_at
+ refute refetch(ignore).first_notif_at
+ refute refetch(ignore2).second_notif_at
+ assert refetch(second).second_notif_at
+ assert refetch(third).deleted_at
+ end
+ end
+end
diff --git a/apps/email/lib/email/builder/console_reaped.ex b/apps/email/lib/email/builder/console_reaped.ex
new file mode 100644
index 000000000..b79831b36
--- /dev/null
+++ b/apps/email/lib/email/builder/console_reaped.ex
@@ -0,0 +1,18 @@
+defmodule Email.Builder.ConsoleReaped do
+ use Email.Builder.Base
+ alias Core.Schema.ConsoleInstance
+
+ def email(inst) do
+ %{owner: user} = inst = Core.Repo.preload(inst, [:owner])
+
+ base_email()
+ |> to(expand_service_account(user))
+ |> subject("Your Plural Cloud Instance #{inst.name} is eligible to be decommissioned")
+ |> assign(:inst, inst)
+ |> assign(:warning, warning(inst))
+ |> render(:console_reaped)
+ end
+
+ defp warning(%ConsoleInstance{second_notif_at: nil}), do: 1
+ defp warning(_), do: 2
+end
diff --git a/apps/email/lib/email/deliverable/cloud.ex b/apps/email/lib/email/deliverable/cloud.ex
new file mode 100644
index 000000000..db88f877f
--- /dev/null
+++ b/apps/email/lib/email/deliverable/cloud.ex
@@ -0,0 +1,3 @@
+defimpl Email.Deliverable, for: Core.PubSub.ConsoleInstanceReaped do
+ def email(%{item: inst}), do: Email.Builder.ConsoleReaped.email(inst)
+end
diff --git a/apps/email/lib/email_web/templates/email/console_reaped.html.eex b/apps/email/lib/email_web/templates/email/console_reaped.html.eex
new file mode 100644
index 000000000..58a6b7bb2
--- /dev/null
+++ b/apps/email/lib/email_web/templates/email/console_reaped.html.eex
@@ -0,0 +1,22 @@
+
+ Your Plural Cloud instance <%= @inst.name %> is eligible to be deprovisioned.
+
+
+ You must have an active, paid plan to continue using Plural Cloud, please update your billing information and/or initiate a new subscription
+ to continue using your instance
+
+
+<%= if @warning == 2 do %>
+
+ Since your instance has been unpaid for 1 week, you have only one more week before we reap your instance
+
+<% else %>
+
+ We provide a two week grace period for delinquent accounts to fix any billing issues. After that period, your instance will
+ be scheduled to be reaped.
+
+<% end %>
+
+">
+ Go to Billing
+
diff --git a/apps/email/lib/email_web/templates/email/console_reaped.text.eex b/apps/email/lib/email_web/templates/email/console_reaped.text.eex
new file mode 100644
index 000000000..ea350316d
--- /dev/null
+++ b/apps/email/lib/email_web/templates/email/console_reaped.text.eex
@@ -0,0 +1,11 @@
+Your Plural Cloud instance is eligible to be deprovisioned.
+
+You must have an active, paid plan to continue using Plural Cloud, please update your billing information and/or initiate a new subscription to continue using your instance.
+
+<%= if @warning == 2 do %>
+Since your instance has been unpaid for 1 week, you have *only one more week* before we reap your instance
+<% else %>
+We provide a *two week grace period* for delinquent accounts to fix any billing issues. After that period, your instance will be scheduled to be reaped.
+<% end %>
+
+Go to Billing here: <%= url("/account/billing") %>
diff --git a/apps/email/test/email/deliverable/cloud_test.exs b/apps/email/test/email/deliverable/cloud_test.exs
new file mode 100644
index 000000000..b6a29b346
--- /dev/null
+++ b/apps/email/test/email/deliverable/cloud_test.exs
@@ -0,0 +1,37 @@
+defmodule Email.Deliverable.CloudTest do
+ use Core.SchemaCase, async: true
+ use Bamboo.Test
+
+ alias Core.PubSub
+ alias Email.PubSub.Consumer
+
+ describe "ConsoleInstanceReaped" do
+ test "it can send a first warning" do
+ owner = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: owner),
+ user: build(:user)
+ )
+ inst = insert(:console_instance, owner: owner, first_notif_at: Timex.now())
+
+ event = %PubSub.ConsoleInstanceReaped{item: inst}
+ Consumer.handle_event(event)
+
+ assert_delivered_email Email.Builder.ConsoleReaped.email(inst)
+ end
+
+ test "it can send a second warning email" do
+ owner = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: owner),
+ user: build(:user)
+ )
+ inst = insert(:console_instance, owner: owner, second_notif_at: Timex.now())
+
+ event = %PubSub.ConsoleInstanceReaped{item: inst}
+ Consumer.handle_event(event)
+
+ assert_delivered_email Email.Builder.ConsoleReaped.email(inst)
+ end
+ end
+end
diff --git a/apps/graphql/lib/graphql.ex b/apps/graphql/lib/graphql.ex
index e28eabdb4..62cb20d24 100644
--- a/apps/graphql/lib/graphql.ex
+++ b/apps/graphql/lib/graphql.ex
@@ -26,6 +26,7 @@ defmodule GraphQl do
import_types GraphQl.Schema.Test
import_types GraphQl.Schema.AI
import_types GraphQl.Schema.Cluster
+ import_types GraphQl.Schema.Cloud
alias GraphQl.Resolvers.{
User,
@@ -43,7 +44,8 @@ defmodule GraphQl do
Dns,
Test,
Cluster,
- Upgrade
+ Upgrade,
+ Cloud
}
@sources [
@@ -63,6 +65,7 @@ defmodule GraphQl do
Test,
Cluster,
Upgrade,
+ Cloud,
GraphQl.InstallationLoader,
GraphQl.ShellLoader,
GraphQl.LockLoader,
@@ -124,6 +127,7 @@ defmodule GraphQl do
import_fields :test_queries
import_fields :ai_queries
import_fields :cluster_queries
+ import_fields :cloud_queries
end
mutation do
@@ -144,6 +148,7 @@ defmodule GraphQl do
import_fields :rollout_mutations
import_fields :test_mutations
import_fields :cluster_mutations
+ import_fields :cloud_mutations
end
subscription do
diff --git a/apps/graphql/lib/graphql/resolvers/cloud.ex b/apps/graphql/lib/graphql/resolvers/cloud.ex
new file mode 100644
index 000000000..497217a3c
--- /dev/null
+++ b/apps/graphql/lib/graphql/resolvers/cloud.ex
@@ -0,0 +1,24 @@
+defmodule GraphQl.Resolvers.Cloud do
+ use GraphQl.Resolvers.Base, model: Core.Schema.ConsoleInstance
+ alias Core.Services.{Cloud, Clusters}
+
+ def resolve_instance(%{id: id}, %{context: %{current_user: user}}),
+ do: Cloud.visible(id, user)
+
+ def resolve_cluster(%ConsoleInstance{url: url}, _, _), do: {:ok, Clusters.get_cluster_by_url(url)}
+
+ def list_instances(args, %{context: %{current_user: user}}) do
+ ConsoleInstance.for_account(user.account_id)
+ |> ConsoleInstance.ordered()
+ |> paginate(args)
+ end
+
+ def create_instance(%{attributes: attrs}, %{context: %{current_user: user}}),
+ do: Cloud.create_instance(attrs, user)
+
+ def update_instance(%{id: id, attributes: attrs}, %{context: %{current_user: user}}),
+ do: Cloud.update_instance(attrs, id, user)
+
+ def delete_instance(%{id: id}, %{context: %{current_user: user}}),
+ do: Cloud.delete_instance(id, user)
+end
diff --git a/apps/graphql/lib/graphql/schema/cloud.ex b/apps/graphql/lib/graphql/schema/cloud.ex
new file mode 100644
index 000000000..3e4a7ff71
--- /dev/null
+++ b/apps/graphql/lib/graphql/schema/cloud.ex
@@ -0,0 +1,85 @@
+defmodule GraphQl.Schema.Cloud do
+ use GraphQl.Schema.Base
+ alias Core.Schema.{CloudCluster, ConsoleInstance}
+ alias GraphQl.Resolvers.{User, Cloud}
+
+ ecto_enum :cloud_provider, CloudCluster.Cloud
+ ecto_enum :console_instance_status, ConsoleInstance.Status
+ ecto_enum :console_size, ConsoleInstance.Size
+
+ input_object :console_instance_attributes do
+ field :name, non_null(:string), description: "the name of this instance (globally unique)"
+ field :size, non_null(:console_size), description: "a heuristic size of this instance"
+ field :cloud, non_null(:cloud_provider), description: "the cloud provider to deploy to"
+ field :region, non_null(:string), description: "the region to deploy to (provider specific)"
+ end
+
+ input_object :console_instance_update_attributes do
+ field :size, :console_size
+ field :configuration, :console_configuration_update_attributes
+ end
+
+ input_object :console_configuration_update_attributes do
+ field :encryption_key, :string
+ end
+
+ object :console_instance do
+ field :id, non_null(:id)
+ field :name, non_null(:string), description: "the name of this instance (globally unique)"
+ field :subdomain, non_null(:string), description: "the subdomain this instance lives under"
+ field :url, non_null(:string), description: "full console url of this instance"
+ field :cloud, non_null(:cloud_provider), description: "the cloud provider hosting this instance"
+ field :size, non_null(:console_size), description: "the heuristic size of this instance"
+ field :region, non_null(:string), description: "the region this instance is hosted in"
+ field :status, non_null(:console_instance_status),
+ description: "the provisioning status of this instance, liveness is fetched through the console field"
+
+ field :deleted_at, :datetime, description: "the time this instance was deleted on"
+
+ field :console, :cluster, resolve: &Cloud.resolve_cluster/3
+ field :owner, :user, resolve: dataloader(User)
+
+ timestamps()
+ end
+
+ connection node_type: :console_instance
+
+ object :cloud_queries do
+ field :console_instance, :console_instance do
+ middleware Authenticated
+ arg :id, non_null(:id)
+
+ safe_resolve &Cloud.resolve_instance/2
+ end
+
+ connection field :console_instances, node_type: :console_instance do
+ middleware Authenticated
+
+ safe_resolve &Cloud.list_instances/2
+ end
+ end
+
+ object :cloud_mutations do
+ field :create_console_instance, :console_instance do
+ middleware Authenticated
+ arg :attributes, non_null(:console_instance_attributes)
+
+ safe_resolve &Cloud.create_instance/2
+ end
+
+ field :update_console_instance, :console_instance do
+ middleware Authenticated
+ arg :id, non_null(:id)
+ arg :attributes, non_null(:console_instance_update_attributes)
+
+ safe_resolve &Cloud.update_instance/2
+ end
+
+ field :delete_console_instance, :console_instance do
+ middleware Authenticated
+ arg :id, non_null(:id)
+
+ safe_resolve &Cloud.delete_instance/2
+ end
+ end
+end
diff --git a/apps/graphql/test/mutations/cloud_mutations_test.exs b/apps/graphql/test/mutations/cloud_mutations_test.exs
new file mode 100644
index 000000000..98d2b1ccc
--- /dev/null
+++ b/apps/graphql/test/mutations/cloud_mutations_test.exs
@@ -0,0 +1,89 @@
+defmodule GraphQl.CloudMutationsTest do
+ use Core.SchemaCase, async: true
+ use Mimic
+ import GraphQl.TestHelpers
+
+ describe "createConsoleInstance" do
+ test "it can create an instance" do
+ account = insert(:account)
+ enable_features(account, [:cd])
+ user = admin_user(account)
+ insert(:cloud_cluster)
+ insert(:cockroach_cluster)
+ insert(:repository, name: "console")
+
+ expect(HTTPoison, :post, fn _, _, _ ->
+ {:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
+ end)
+
+ {:ok, %{data: %{"createConsoleInstance" => created}}} = run_query("""
+ mutation Create($attrs: ConsoleInstanceAttributes!) {
+ createConsoleInstance(attributes: $attrs) {
+ id
+ name
+ region
+ size
+ cloud
+ }
+ }
+ """, %{"attrs" => %{
+ "name" => "plrltest",
+ "cloud" => "AWS",
+ "size" => "SMALL",
+ "region" => "us-east-1"
+ }}, %{current_user: user})
+
+ assert created["name"] == "plrltest"
+ assert created["cloud"] == "AWS"
+ assert created["size"] == "SMALL"
+ assert created["region"] == "us-east-1"
+ end
+ end
+
+ describe "updateConsoleInstance" do
+ test "you can update an instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: sa),
+ user: user
+ )
+ instance = insert(:console_instance, owner: sa)
+
+ {:ok, %{data: %{"updateConsoleInstance" => updated}}} = run_query("""
+ mutation Update($id: ID!, $attrs: ConsoleInstanceUpdateAttributes!) {
+ updateConsoleInstance(id: $id, attributes: $attrs) {
+ id
+ size
+ }
+ }
+ """, %{"id" => instance.id, "attrs" => %{"size" => "MEDIUM"}}, %{current_user: user})
+
+ assert updated["id"] == instance.id
+ assert updated["size"] == "MEDIUM"
+ end
+ end
+
+ describe "deleteConsoleInstance" do
+ test "you can delete your instance" do
+ user = insert(:user)
+ sa = insert(:user, service_account: true)
+ insert(:impersonation_policy_binding,
+ policy: build(:impersonation_policy, user: sa),
+ user: user
+ )
+ instance = insert(:console_instance, owner: sa)
+
+ {:ok, %{data: %{"deleteConsoleInstance" => deleted}}} = run_query("""
+ mutation Deleted($id: ID!) {
+ deleteConsoleInstance(id: $id) {
+ id
+ deletedAt
+ }
+ }
+ """, %{"id" => instance.id}, %{current_user: user})
+
+ assert deleted["id"] == instance.id
+ end
+ end
+end
diff --git a/apps/graphql/test/queries/cloud_queries_test.exs b/apps/graphql/test/queries/cloud_queries_test.exs
new file mode 100644
index 000000000..3cd410d0e
--- /dev/null
+++ b/apps/graphql/test/queries/cloud_queries_test.exs
@@ -0,0 +1,49 @@
+defmodule GraphQl.CloudQueriesTest do
+ use Core.SchemaCase, async: true
+ import GraphQl.TestHelpers
+
+ describe "consoleInstances" do
+ test "it can fetch the cloud instances in your account" do
+ user = insert(:user)
+ instances = insert_list(3, :console_instance, owner: insert(:user, account: user.account))
+ insert_list(2, :console_instance)
+
+ {:ok, %{data: %{"consoleInstances" => found}}} = run_query("""
+ query {
+ consoleInstances(first: 5) {
+ edges { node { id } }
+ }
+ }
+ """, %{}, %{current_user: user})
+
+ assert from_connection(found)
+ |> ids_equal(instances)
+ end
+ end
+
+ describe "consoleInstance" do
+ test "you can describe console instances you can see" do
+ user = insert(:user)
+ instance = insert(:console_instance, owner: insert(:user, account: user.account))
+
+ {:ok, %{data: %{"consoleInstance" => found}}} = run_query("""
+ query Get($id: ID!) {
+ consoleInstance(id: $id) { id }
+ }
+ """, %{"id" => instance.id}, %{current_user: user})
+
+ assert found["id"] == instance.id
+ end
+
+ test "you cannot describe console instances you cannot see" do
+ user = insert(:user)
+ instance = insert(:console_instance)
+
+ {:ok, %{errors: [_ | _]}} = run_query("""
+ query Get($id: ID!) {
+ consoleInstance(id: $id) { id }
+ }
+ """, %{"id" => instance.id}, %{current_user: user})
+ end
+ end
+end
diff --git a/apps/worker/lib/worker/application.ex b/apps/worker/lib/worker/application.ex
index 2c51bc900..7495bec39 100644
--- a/apps/worker/lib/worker/application.ex
+++ b/apps/worker/lib/worker/application.ex
@@ -26,7 +26,7 @@ defmodule Worker.Application do
def broker() do
case Worker.conf(:start_broker) do
- true -> [{Worker.Conduit.Broker, []}]
+ true -> [{Worker.Conduit.Broker, []}, Core.Services.Coud.Poller]
_ -> []
end
end
diff --git a/apps/worker/lib/worker/conduit/broker.ex b/apps/worker/lib/worker/conduit/broker.ex
index a192e9692..13860b11f 100644
--- a/apps/worker/lib/worker/conduit/broker.ex
+++ b/apps/worker/lib/worker/conduit/broker.ex
@@ -19,5 +19,6 @@ defmodule Worker.Conduit.Broker do
pipe_through [:in_tracking, :error_handling, :deserialize]
subscribe :scan, Scan, from: "plural.scan"
subscribe :cluster, Cluster, from: "plural.cluster"
+ subscribe :cloud, Cloud, from: "plural.cloud"
end
end
diff --git a/apps/worker/lib/worker/conduit/subscribers/cloud.ex b/apps/worker/lib/worker/conduit/subscribers/cloud.ex
new file mode 100644
index 000000000..12ac4cbed
--- /dev/null
+++ b/apps/worker/lib/worker/conduit/subscribers/cloud.ex
@@ -0,0 +1,16 @@
+defmodule Worker.Conduit.Subscribers.Cloud do
+ use Worker.Conduit.Subscribers.Base
+ alias Core.Services.Cloud.Workflow
+ alias Core.PubSub
+
+ def process(%Conduit.Message{body: body} = msg, _) do
+ case handle(body) do
+ {:ok, _} -> ack(msg)
+ _ -> nack(msg)
+ end
+ end
+
+ def handle(%PubSub.ConsoleInstanceCreated{item: instance}), do: Workflow.provision(instance)
+ def handle(%PubSub.ConsoleInstanceUpdated{item: instance}), do: Workflow.sync(instance)
+ def handle(%PubSub.ConsoleInstanceDeleted{item: instance}), do: Workflow.deprovision(instance)
+end
diff --git a/config/config.exs b/config/config.exs
index 7bfdadf51..6edf601e0 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -141,7 +141,12 @@ config :core,
github_demo_token: "test-pat",
github_demo_org: "pluralsh-demos",
trial_plan: "Pro Trial",
- sysbox_emails: []
+ console_token: "bogus",
+ console_url: "https://console.example.com",
+ sysbox_emails: [],
+ mgmt_repo: "https://github.com/pluralsh/plural.git",
+ cockroach_parameters: [],
+ cockroach_ssl: true
config :briefly,
directory: [{:system, "TMPDIR"}, {:system, "TMP"}, {:system, "TEMP"}, "/tmp"],
diff --git a/config/test.exs b/config/test.exs
index b0a0e0e65..b730ec8b2 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -90,4 +90,6 @@ config :worker,
config :core,
workos_webhook: "supersecret",
enforce_pricing: true,
- sysbox_emails: ["sysbox@plural.sh"]
+ sysbox_emails: ["sysbox@plural.sh"],
+ cockroach_parameters: [sslmode: "allow"],
+ cockroach_ssl: true
diff --git a/docker-compose.yml b/docker-compose.yml
index 55ea799d5..1130b6cdc 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,4 +1,3 @@
-version: '3'
services:
db:
image: postgres:14
@@ -9,6 +8,20 @@ services:
POSTGRES_PASSWORD: postgres
volumes:
- database_data:/var/lib/postgresql/data
+ cockroach:
+ image: cockroachdb/cockroach:v24.1.3
+ restart: always
+ ports:
+ - 26257:26257
+ - 8080:8080
+ command: start-single-node --certs-dir /cockroach/certs
+ environment:
+ - COCKROACH_DATABASE=plural
+ - COCKROACH_USER=plrl
+ - COCKROACH_PASSWORD=plural
+ volumes:
+ - roach_data:/cockroach/cockroach-data
+ - ./test-certs:/cockroach/certs
rabbit:
image: rabbitmq:3-management
hostname: "rabbit1"
@@ -51,5 +64,7 @@ services:
volumes:
database_data:
driver: local
+ roach_data:
+ driver: local
influxdb:
driver: local
\ No newline at end of file
diff --git a/mix.lock b/mix.lock
index 7cf270080..b6487ead8 100644
--- a/mix.lock
+++ b/mix.lock
@@ -1,5 +1,6 @@
%{
"absinthe": {:hex, :absinthe, "1.7.0", "36819e7b1fd5046c9c734f27fe7e564aed3bda59f0354c37cd2df88fd32dd014", [:mix], [{:dataloader, "~> 1.0.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0 or ~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "566a5b5519afc9b29c4d367f0c6768162de3ec03e9bf9916f9dc2bcbe7c09643"},
+ "absinthe_client": {:hex, :absinthe_client, "0.1.1", "1e778d587a27b85ecc35e4a5fedc64c85d9fdfd05395745c7af5345564dff54e", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:req, "~> 0.4", [hex: :req, repo: "hexpm", optional: false]}, {:slipstream, "~> 1.0", [hex: :slipstream, repo: "hexpm", optional: false]}], "hexpm", "e75a28c5bb647f485e9c03bbc3a47e7783742794bd4c10f3307a495a9e7273b6"},
"absinthe_phoenix": {:hex, :absinthe_phoenix, "2.0.2", "e607b438db900049b9b3760f8ecd0591017a46122fffed7057bf6989020992b5", [:mix], [{:absinthe, "~> 1.5", [hex: :absinthe, repo: "hexpm", optional: false]}, {:absinthe_plug, "~> 1.5", [hex: :absinthe_plug, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.5", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}], "hexpm", "d36918925c380dc7d2ed7d039c9a3b4182ec36723f7417a68745ade5aab22f8d"},
"absinthe_plug": {:hex, :absinthe_plug, "1.5.8", "38d230641ba9dca8f72f1fed2dfc8abd53b3907d1996363da32434ab6ee5d6ab", [:mix], [{:absinthe, "~> 1.5", [hex: :absinthe, repo: "hexpm", optional: false]}, {:plug, "~> 1.4", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bbb04176647b735828861e7b2705465e53e2cf54ccf5a73ddd1ebd855f996e5a"},
"absinthe_relay": {:hex, :absinthe_relay, "1.5.2", "cfb8aed70f4e4c7718d3f1c212332d2ea728f17c7fc0f68f1e461f0f5f0c4b9a", [:mix], [{:absinthe, "~> 1.5.0 or ~> 1.6.0 or ~> 1.7.0", [hex: :absinthe, repo: "hexpm", optional: false]}, {:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "0587ee913afa31512e1457a5064ee88427f8fe7bcfbeeecd41c71d9cff0b62b6"},
@@ -20,7 +21,7 @@
"bourne": {:hex, :bourne, "1.1.0", "60a8bb3d5ad533a3a18257de9304e70e6bb2f4d7f354a529ef3f47c327c2dd97", [:mix], [{:ecto, "~> 2.1", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "81a389611ad067dc305d5ba4674280ea2b99ef012bca5e7de48357545edc05bc"},
"briefly": {:git, "https://github.com/CargoSense/briefly", "b0fd495bf0c5ef2c44de2791a8cc7a20813c7d36", [ref: "b0fd495bf0c5ef2c44de2791a8cc7a20813c7d36"]},
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
- "castore": {:hex, :castore, "0.1.19", "a2c3e46d62b7f3aa2e6f88541c21d7400381e53704394462b9fd4f06f6d42bb6", [:mix], [], "hexpm", "e96e0161a5dc82ef441da24d5fa74aefc40d920f3a6645d15e1f9f3e66bb2109"},
+ "castore": {:hex, :castore, "0.1.22", "4127549e411bedd012ca3a308dede574f43819fe9394254ca55ab4895abfa1a2", [:mix], [], "hexpm", "c17576df47eb5aa1ee40cc4134316a99f5cad3e215d5c77b8dd3cfef12a22cac"},
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
"cloudflare": {:hex, :cloudflare, "0.2.0", "e754fab186d5c9cd30cafdd336b8150b227e31e45ea6bd279d3c6cb20ff21ac1", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:restlax, "~> 0.1", [hex: :restlax, repo: "hexpm", optional: false]}], "hexpm", "0c2fced3058d92f8732c9911e309496a2f26f6f6d0f8216b8a2c62c2f8028748"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
@@ -59,7 +60,7 @@
"ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"},
"excoveralls": {:hex, :excoveralls, "0.15.2", "809c1016660d80b28bbcd8cb7fd761791300def53345c1af5bd97db1330619ad", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f359dda36f15ae885d3259a90919b09ae9318f37c583c403493fe23808b2b882"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
- "finch": {:hex, :finch, "0.13.0", "c881e5460ec563bf02d4f4584079e62201db676ed4c0ef3e59189331c4eddf7b", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "49957dcde10dcdc042a123a507a9c5ec5a803f53646d451db2f7dea696fba6cc"},
+ "finch": {:hex, :finch, "0.17.0", "17d06e1d44d891d20dbd437335eebe844e2426a0cd7e3a3e220b461127c73f70", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8d014a661bb6a437263d4b5abf0bcbd3cf0deb26b1e8596f2a271d22e48934c7"},
"flow": {:hex, :flow, "0.15.0", "503717c0e367b5713336181d5305106840f64abbad32c75d7af5ef1bb0908e38", [:mix], [{:gen_stage, "~> 0.14.0", [hex: :gen_stage, repo: "hexpm", optional: false]}], "hexpm", "d7ecbd4dd38a188494bc996d5014ef8335f436a0b262140a1f6441ae94714581"},
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm", "8453e2289d94c3199396eb517d65d6715ef26bcae0ee83eb5ff7a84445458d76"},
"gen_state_machine": {:hex, :gen_state_machine, "2.1.0", "a38b0e53fad812d29ec149f0d354da5d1bc0d7222c3711f3a0bd5aa608b42992", [:mix], [], "hexpm", "ae367038808db25cee2f2c4b8d0531522ea587c4995eb6f96ee73410a60fa06b"},
@@ -98,12 +99,14 @@
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
"mimic": {:hex, :mimic, "1.1.3", "3bad83d5271b4faa7bbfef587417a6605cbbc802a353395d446a1e5f46fe7115", [:mix], [], "hexpm", "0d93cb8fcd00a1013bae56050755879050bb9b8ef0c3d51b6fec5a2f1fc33d66"},
"mint": {:hex, :mint, "1.4.2", "50330223429a6e1260b2ca5415f69b0ab086141bc76dc2fbf34d7c389a6675b2", [:mix], [{:castore, "~> 0.1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "ce75a5bbcc59b4d7d8d70f8b2fc284b1751ffb35c7b6a6302b5192f8ab4ddd80"},
+ "mint_web_socket": {:hex, :mint_web_socket, "1.0.4", "0b539116dbb3d3f861cdf5e15e269a933cb501c113a14db7001a3157d96ffafd", [:mix], [{:mint, ">= 1.4.1 and < 2.0.0-0", [hex: :mint, repo: "hexpm", optional: false]}], "hexpm", "027d4c5529c45a4ba0ce27a01c0f35f284a5468519c045ca15f43decb360a991"},
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
"mojito": {:hex, :mojito, "0.7.12", "0461764ba427e720cc7270a603ac2b157a0c70688ffdf19dd116815eba554576", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mint, "~> 1.1", [hex: :mint, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "1fa274d0fab59c6986d1efcb9d40f01d0b1159403968665a141a17601c29daac"},
"nebulex": {:hex, :nebulex, "2.4.2", "b3d2d86d57b15896fb8e6d6dd49b4a9dee2eedd6eddfb3b69bfdb616a09c2817", [:mix], [{:decorator, "~> 1.4", [hex: :decorator, repo: "hexpm", optional: true]}, {:shards, "~> 1.0", [hex: :shards, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "c9f888e5770fd47614c95990d0a02c3515216d51dc72e3c830eaf28f5649ba52"},
- "nimble_options": {:hex, :nimble_options, "0.4.0", "c89babbab52221a24b8d1ff9e7d838be70f0d871be823165c94dd3418eea728f", [:mix], [], "hexpm", "e6701c1af326a11eea9634a3b1c62b475339ace9456c1a23ec3bc9a847bca02d"},
+ "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
+ "nimble_ownership": {:hex, :nimble_ownership, "0.3.2", "d4fa4056ade0ae33b5a9eb64554a1b3779689282e37513260125d2d6b32e4874", [:mix], [], "hexpm", "28b9a9f4094fda1aa8ca72f732ff3223eb54aa3eda4fed9022254de2c152b138"},
"nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"},
- "nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"},
+ "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
"oauth2": {:hex, :oauth2, "2.0.0", "338382079fe16c514420fa218b0903f8ad2d4bfc0ad0c9f988867dfa246731b0", [:mix], [{:hackney, "~> 1.13", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "881b8364ac7385f9fddc7949379cbe3f7081da37233a1aa7aab844670a91e7e7"},
"openid_connect": {:git, "https://github.com/pluralsh/openid_connect", "c3b2701b9adbe01fd89bbd09816ffa6c9e4a825e", []},
"parallax": {:hex, :parallax, "1.0.0", "decfed51b2d41c88ea4398d3bd8fadd2ceb1f913cf0f44ef2cb41e732ace31f5", [:mix], [], "hexpm", "8a8b91685bfacb483ff9370b98844ee01107544a11ee49b5f1d98df0eff2b1e4"},
@@ -135,16 +138,18 @@
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
"recon": {:hex, :recon, "2.5.2", "cba53fa8db83ad968c9a652e09c3ed7ddcc4da434f27c3eaa9ca47ffb2b1ff03", [:mix, :rebar3], [], "hexpm", "2c7523c8dee91dff41f6b3d63cba2bd49eb6d2fe5bf1eec0df7f87eb5e230e1c"},
"remote_ip": {:hex, :remote_ip, "0.2.1", "cd27cd8ea54ecaaf3532776ff4c5e353b3804e710302e88c01eadeaaf42e7e24", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:inet_cidr, "~> 1.0", [hex: :inet_cidr, repo: "hexpm", optional: false]}, {:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "2e7ab1a461cc3cd5719f37e116a08f45c8b8493923063631b164315d6b7ee8e0"},
+ "req": {:hex, :req, "0.4.14", "103de133a076a31044e5458e0f850d5681eef23dfabf3ea34af63212e3b902e2", [:mix], [{:aws_signature, "~> 0.3.2", [hex: :aws_signature, repo: "hexpm", optional: true]}, {:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:nimble_ownership, "~> 0.2.0 or ~> 0.3.0", [hex: :nimble_ownership, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "2ddd3d33f9ab714ced8d3c15fd03db40c14dbf129003c4a3eb80fac2cc0b1b08"},
"restlax": {:hex, :restlax, "0.1.0", "753e00aff37802f703e6b709d3f85b7816e7740cc5f833c446d677a42ecf4824", [:mix], [{:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gun, "~> 1.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:tesla, "~> 1.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "f1db23c10f7ff06ecd1f7e95380d49393a11194baacf58a1714c1e40a9d61123"},
"reverse_proxy_plug": {:hex, :reverse_proxy_plug, "2.1.1", "c5494a56c32e47e2f7ca677df4633846aec31a18ba1695928fef51e2d413cb27", [:mix], [{:cowboy, "~> 2.4", [hex: :cowboy, repo: "hexpm", optional: false]}, {:httpoison, "~> 1.2", [hex: :httpoison, repo: "hexpm", optional: true]}, {:plug, "~> 1.6", [hex: :plug, repo: "hexpm", optional: false]}, {:tesla, "~> 1.4", [hex: :tesla, repo: "hexpm", optional: true]}], "hexpm", "e8f65bdfd35ebffaea2006eb8422c9126cbd899d0bf1552aedf80730c30204b6"},
"sentry": {:hex, :sentry, "8.0.6", "c8de1bf0523bc120ec37d596c55260901029ecb0994e7075b0973328779ceef7", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.6", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.3", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "051a2d0472162f3137787c7c9d6e6e4ef239de9329c8c45b1f1bf1e9379e1883"},
"shards": {:hex, :shards, "1.0.1", "1bdbbf047db27f3c3eb800a829d4a47062c84d5543cbfebcfc4c14d038bf9220", [:make, :rebar3], [], "hexpm", "2c57788afbf053c4024366772892beee89b8b72e884e764fb0a075dfa7442041"},
+ "slipstream": {:hex, :slipstream, "1.1.1", "7e56f62f1a9ee81351e3c36f57b9b187e00dc2f470e70ba46ea7ad16e80b061f", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mint_web_socket, "~> 0.2 or ~> 1.0", [hex: :mint_web_socket, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.1 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c20e420cde1654329d38ec3aa1c0e4debbd4c91ca421491e7984ad4644e638a6"},
"sobelow": {:hex, :sobelow, "0.11.1", "23438964486f8112b41e743bbfd402da3e5b296fdc9eacab29914b79c48916dd", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "9897363a7eff96f4809304a90aad819e2ad5e5d24db547af502885146746a53c"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
"stripity_stripe": {:hex, :stripity_stripe, "2.17.2", "6e4c5f0b2deb3d3411f91e23fedbe9db4c031a52dfeb7074b4df4fdd22ad32cd", [:mix], [{:hackney, "~> 1.15", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:uri_query, "~> 0.1.2", [hex: :uri_query, repo: "hexpm", optional: false]}], "hexpm", "42bffb591d34f3cfa95cb37e862140e44a61ca4c64578bf7bb591ba60c6a66bc"},
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm", "94884f84783fc1ba027aba8fe8a7dae4aad78c98e9f9c76667ec3471585c08c6"},
"sweet_xml": {:hex, :sweet_xml, "0.7.3", "debb256781c75ff6a8c5cbf7981146312b66f044a2898f453709a53e5031b45b", [:mix], [], "hexpm", "e110c867a1b3fe74bfc7dd9893aa851f0eed5518d0d7cad76d7baafd30e4f5ba"},
- "telemetry": {:hex, :telemetry, "1.1.0", "a589817034a27eab11144ad24d5c0f9fab1f58173274b1e9bae7074af9cbee51", [:rebar3], [], "hexpm", "b727b2a1f75614774cff2d7565b64d0dfa5bd52ba517f16543e6fc7efcc0df48"},
+ "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.1", "315d9163a1d4660aedc3fee73f33f1d355dcc76c5c3ab3d59e76e3edf80eef1f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7be9e0871c41732c233be71e4be11b96e56177bf15dde64a8ac9ce72ac9834c6"},
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
"tesla": {:hex, :tesla, "1.4.3", "f5a494e08fb1abe4fd9c28abb17f3d9b62b8f6fc492860baa91efb1aab61c8a0", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "e0755bb664bf4d664af72931f320c97adbf89da4586670f4864bf259b5750386"},
diff --git a/plural/helm/plural/values.yaml b/plural/helm/plural/values.yaml
index b8ea2b7b6..5e231ef84 100644
--- a/plural/helm/plural/values.yaml
+++ b/plural/helm/plural/values.yaml
@@ -515,6 +515,10 @@ crons:
cronModule: Digest.Pending
cronTab: "0 12 * * 1"
envVars: []
+- cronName: plrl-prune-cloud-instances
+ cronModule: Prune.Cloud
+ cronTab: "45 1 * * *"
+ envVars: []
hydraSecrets:
dsn: memory
diff --git a/rel/config/config.exs b/rel/config/config.exs
index 5b65875cd..28130f082 100644
--- a/rel/config/config.exs
+++ b/rel/config/config.exs
@@ -90,7 +90,10 @@ config :core,
openai_token: get_env("OPENAI_BEARER_TOKEN"),
enforce_pricing: get_env("ENFORCE_PRICING"),
stripe_webhook_secret: get_env("STRIPE_WEBHOOK_SECRET"),
- github_demo_token: get_env("GITHUB_DEMO_TOKEN")
+ github_demo_token: get_env("GITHUB_DEMO_TOKEN"),
+ console_token: get_env("CONSOLE_SA_TOKEN"),
+ console_url: get_env("CONSOLE_URL"),
+ mgmt_repo: get_env("CONSOLE_MGMT_REPO")
if get_env("VAULT_HOST") do
diff --git a/schema/schema.graphql b/schema/schema.graphql
index d68fd0b2f..3950e7b21 100644
--- a/schema/schema.graphql
+++ b/schema/schema.graphql
@@ -1114,6 +1114,17 @@ type PolicyBinding {
updatedAt: DateTime
}
+type ConsoleInstanceEdge {
+ node: ConsoleInstance
+ cursor: String
+}
+
+enum ConsoleSize {
+ SMALL
+ MEDIUM
+ LARGE
+}
+
type UpgradeQueue {
id: ID!
acked: ID
@@ -1143,6 +1154,42 @@ type AuditEdge {
cursor: String
}
+type ConsoleInstance {
+ id: ID!
+
+ "the name of this instance (globally unique)"
+ name: String!
+
+ "the subdomain this instance lives under"
+ subdomain: String!
+
+ "full console url of this instance"
+ url: String!
+
+ "the cloud provider hosting this instance"
+ cloud: CloudProvider!
+
+ "the heuristic size of this instance"
+ size: ConsoleSize!
+
+ "the region this instance is hosted in"
+ region: String!
+
+ "the provisioning status of this instance, liveness is fetched through the console field"
+ status: ConsoleInstanceStatus!
+
+ "the time this instance was deleted on"
+ deletedAt: DateTime
+
+ console: Cluster
+
+ owner: User
+
+ insertedAt: DateTime
+
+ updatedAt: DateTime
+}
+
type InvoiceEdge {
node: Invoice
cursor: String
@@ -1320,6 +1367,15 @@ type RecipeTest {
args: [TestArgument]
}
+enum ConsoleInstanceStatus {
+ PENDING
+ DATABASE_CREATED
+ DEPLOYMENT_CREATED
+ PROVISIONED
+ DEPLOYMENT_DELETED
+ DATABASE_DELETED
+}
+
input IncidentAttributes {
title: String
severity: Int
@@ -1350,6 +1406,10 @@ input ChartInstallationAttributes {
versionId: ID
}
+input ConsoleConfigurationUpdateAttributes {
+ encryptionKey: String
+}
+
enum TestStatus {
QUEUED
RUNNING
@@ -1539,18 +1599,18 @@ input LockAttributes {
lock: String!
}
-input IncidentFilter {
- type: IncidentFilterType!
- value: String
- statuses: [IncidentStatus]
-}
-
enum OauthProvider {
GITHUB
GOOGLE
GITLAB
}
+input IncidentFilter {
+ type: IncidentFilterType!
+ value: String
+ statuses: [IncidentStatus]
+}
+
type ScanError {
message: String
}
@@ -1644,6 +1704,19 @@ type IncidentMessageDelta {
payload: IncidentMessage
}
+type DnsRecordConnection {
+ pageInfo: PageInfo!
+ edges: [DnsRecordEdge]
+}
+
+type Reaction {
+ name: String!
+ creator: User!
+ message: IncidentMessage!
+ insertedAt: DateTime
+ updatedAt: DateTime
+}
+
type ShellConfiguration {
workspace: ShellWorkspace
git: GitConfiguration
@@ -2014,19 +2087,17 @@ type RootMutationType {
"The cluster's cloud provider."
provider: Provider!
): Cluster
-}
-type DnsRecordConnection {
- pageInfo: PageInfo!
- edges: [DnsRecordEdge]
+ createConsoleInstance(attributes: ConsoleInstanceAttributes!): ConsoleInstance
+
+ updateConsoleInstance(id: ID!, attributes: ConsoleInstanceUpdateAttributes!): ConsoleInstance
+
+ deleteConsoleInstance(id: ID!): ConsoleInstance
}
-type Reaction {
- name: String!
- creator: User!
- message: IncidentMessage!
- insertedAt: DateTime
- updatedAt: DateTime
+type ConsoleInstanceConnection {
+ pageInfo: PageInfo!
+ edges: [ConsoleInstanceEdge]
}
"An installation of an application."
@@ -2398,6 +2469,10 @@ type RootQueryType {
"Get a list of clusters owned by the current account."
clusters(after: String, first: Int, before: String, last: Int): ClusterConnection
+ consoleInstance(id: ID!): ConsoleInstance
+
+ consoleInstances(after: String, first: Int, before: String, last: Int): ConsoleInstanceConnection
+
configuration: PluralConfiguration
tags(after: String, first: Int, before: String, last: Int, id: ID, type: TagGroup!, q: String): GroupedTagConnection
@@ -2817,6 +2892,11 @@ input OnboardingChecklistAttributes {
dismissed: Boolean
}
+input ConsoleInstanceUpdateAttributes {
+ size: ConsoleSize
+ configuration: ConsoleConfigurationUpdateAttributes
+}
+
enum RolloutStatus {
QUEUED
RUNNING
@@ -3027,6 +3107,10 @@ type Contributor {
updatedAt: DateTime
}
+enum CloudProvider {
+ AWS
+}
+
type ImageLayer {
digest: String
diffId: String
@@ -3641,6 +3725,20 @@ type PlatformMetrics {
publishers: Int
}
+input ConsoleInstanceAttributes {
+ "the name of this instance (globally unique)"
+ name: String!
+
+ "a heuristic size of this instance"
+ size: ConsoleSize!
+
+ "the cloud provider to deploy to"
+ cloud: CloudProvider!
+
+ "the region to deploy to (provider specific)"
+ region: String!
+}
+
type OidcSettings {
uriFormat: String
uriFormats: [String]
diff --git a/www/package.json b/www/package.json
index 0c1c40b40..e13d2a768 100644
--- a/www/package.json
+++ b/www/package.json
@@ -33,7 +33,7 @@
"packageManager": "yarn@3.6.0",
"dependencies": {
"@absinthe/socket": "0.2.1",
- "@apollo/client": "3.7.15",
+ "@apollo/client": "3.10.3",
"@emotion/react": "11.11.1",
"@emotion/styled": "11.11.0",
"@fireworks-js/react": "2.10.5",
@@ -178,4 +178,4 @@
"lint-staged": {
"./src/**/*.{js,jsx,ts,tsx,graphql,md}": "prettier --write"
}
-}
+}
\ No newline at end of file
diff --git a/www/src/components/repository/packages/Docker.tsx b/www/src/components/repository/packages/Docker.tsx
index 219a3597e..9e2765797 100644
--- a/www/src/components/repository/packages/Docker.tsx
+++ b/www/src/components/repository/packages/Docker.tsx
@@ -67,7 +67,6 @@ function PrivateControl({ dockerRepo }: any) {
- // @ts-expect-error
mutation({ variables: { attributes: { public: !dockerRepo.public } } })
}
>
diff --git a/www/src/components/shell/hooks/useOnboarded.ts b/www/src/components/shell/hooks/useOnboarded.ts
index be44a4011..1e4ae401c 100644
--- a/www/src/components/shell/hooks/useOnboarded.ts
+++ b/www/src/components/shell/hooks/useOnboarded.ts
@@ -29,7 +29,7 @@ function useOnboarded() {
cache.writeFragment({
data: data?.updateUser,
fragment: UserFragment,
- }),
+ }) ?? {},
},
})
},
diff --git a/www/src/generated/graphql.ts b/www/src/generated/graphql.ts
index 312ea7e96..ac13b3482 100644
--- a/www/src/generated/graphql.ts
+++ b/www/src/generated/graphql.ts
@@ -12,7 +12,7 @@ export type Incremental = T | { [P in keyof T]?: P extends ' $fragmentName' |
const defaultOptions = {} as const;
/** All built-in and custom scalars, mapped to their actual values */
export type Scalars = {
- ID: { input: string | number; output: string; }
+ ID: { input: string; output: string; }
String: { input: string; output: string; }
Boolean: { input: boolean; output: boolean; }
Int: { input: number; output: number; }
@@ -381,6 +381,10 @@ export type ClosureItem = {
terraform?: Maybe;
};
+export enum CloudProvider {
+ Aws = 'AWS'
+}
+
export type CloudShell = {
__typename?: 'CloudShell';
aesKey: Scalars['String']['output'];
@@ -552,6 +556,78 @@ export type ConsentRequest = {
skip?: Maybe;
};
+export type ConsoleConfigurationUpdateAttributes = {
+ encryptionKey?: InputMaybe;
+};
+
+export type ConsoleInstance = {
+ __typename?: 'ConsoleInstance';
+ /** the cloud provider hosting this instance */
+ cloud: CloudProvider;
+ console?: Maybe;
+ /** the time this instance was deleted on */
+ deletedAt?: Maybe;
+ id: Scalars['ID']['output'];
+ insertedAt?: Maybe;
+ /** the name of this instance (globally unique) */
+ name: Scalars['String']['output'];
+ owner?: Maybe;
+ /** the region this instance is hosted in */
+ region: Scalars['String']['output'];
+ /** the heuristic size of this instance */
+ size: ConsoleSize;
+ /** the provisioning status of this instance, liveness is fetched through the console field */
+ status: ConsoleInstanceStatus;
+ /** the subdomain this instance lives under */
+ subdomain: Scalars['String']['output'];
+ updatedAt?: Maybe;
+ /** full console url of this instance */
+ url: Scalars['String']['output'];
+};
+
+export type ConsoleInstanceAttributes = {
+ /** the cloud provider to deploy to */
+ cloud: CloudProvider;
+ /** the name of this instance (globally unique) */
+ name: Scalars['String']['input'];
+ /** the region to deploy to (provider specific) */
+ region: Scalars['String']['input'];
+ /** a heuristic size of this instance */
+ size: ConsoleSize;
+};
+
+export type ConsoleInstanceConnection = {
+ __typename?: 'ConsoleInstanceConnection';
+ edges?: Maybe>>;
+ pageInfo: PageInfo;
+};
+
+export type ConsoleInstanceEdge = {
+ __typename?: 'ConsoleInstanceEdge';
+ cursor?: Maybe;
+ node?: Maybe;
+};
+
+export enum ConsoleInstanceStatus {
+ DatabaseCreated = 'DATABASE_CREATED',
+ DatabaseDeleted = 'DATABASE_DELETED',
+ DeploymentCreated = 'DEPLOYMENT_CREATED',
+ DeploymentDeleted = 'DEPLOYMENT_DELETED',
+ Pending = 'PENDING',
+ Provisioned = 'PROVISIONED'
+}
+
+export type ConsoleInstanceUpdateAttributes = {
+ configuration?: InputMaybe;
+ size?: InputMaybe;
+};
+
+export enum ConsoleSize {
+ Large = 'LARGE',
+ Medium = 'MEDIUM',
+ Small = 'SMALL'
+}
+
export type ContextAttributes = {
buckets?: InputMaybe>>;
configuration: Scalars['Map']['input'];
@@ -2699,6 +2775,7 @@ export type RootMutationType = {
createCluster?: Maybe;
/** adds a dependency for this cluster to gate future upgrades */
createClusterDependency?: Maybe;
+ createConsoleInstance?: Maybe;
createCrd?: Maybe;
createDemoProject?: Maybe;
createDnsRecord?: Maybe;
@@ -2743,6 +2820,7 @@ export type RootMutationType = {
deleteCluster?: Maybe;
/** deletes a dependency for this cluster and potentially disables promotions entirely */
deleteClusterDependency?: Maybe;
+ deleteConsoleInstance?: Maybe;
deleteDemoProject?: Maybe;
deleteDnsRecord?: Maybe;
deleteDomain?: Maybe;
@@ -2815,6 +2893,7 @@ export type RootMutationType = {
updateAccount?: Maybe;
updateChart?: Maybe;
updateChartInstallation?: Maybe;
+ updateConsoleInstance?: Maybe;
updateDockerRepository?: Maybe;
updateDomain?: Maybe;
updateGroup?: Maybe;
@@ -2889,6 +2968,11 @@ export type RootMutationTypeCreateClusterDependencyArgs = {
};
+export type RootMutationTypeCreateConsoleInstanceArgs = {
+ attributes: ConsoleInstanceAttributes;
+};
+
+
export type RootMutationTypeCreateCrdArgs = {
attributes: CrdAttributes;
chartId?: InputMaybe;
@@ -3116,6 +3200,11 @@ export type RootMutationTypeDeleteClusterDependencyArgs = {
};
+export type RootMutationTypeDeleteConsoleInstanceArgs = {
+ id: Scalars['ID']['input'];
+};
+
+
export type RootMutationTypeDeleteDnsRecordArgs = {
name: Scalars['String']['input'];
type: DnsRecordType;
@@ -3455,6 +3544,12 @@ export type RootMutationTypeUpdateChartInstallationArgs = {
};
+export type RootMutationTypeUpdateConsoleInstanceArgs = {
+ attributes: ConsoleInstanceUpdateAttributes;
+ id: Scalars['ID']['input'];
+};
+
+
export type RootMutationTypeUpdateDockerRepositoryArgs = {
attributes: DockerRepositoryAttributes;
id: Scalars['ID']['input'];
@@ -3627,6 +3722,8 @@ export type RootQueryType = {
/** Get a list of clusters owned by the current account. */
clusters?: Maybe;
configuration?: Maybe;
+ consoleInstance?: Maybe;
+ consoleInstances?: Maybe;
deferredUpdates?: Maybe;
demoProject?: Maybe;
dnsDomain?: Maybe;
@@ -3771,6 +3868,19 @@ export type RootQueryTypeClustersArgs = {
};
+export type RootQueryTypeConsoleInstanceArgs = {
+ id: Scalars['ID']['input'];
+};
+
+
+export type RootQueryTypeConsoleInstancesArgs = {
+ after?: InputMaybe;
+ before?: InputMaybe;
+ first?: InputMaybe;
+ last?: InputMaybe;
+};
+
+
export type RootQueryTypeDeferredUpdatesArgs = {
after?: InputMaybe;
before?: InputMaybe;
@@ -7430,8 +7540,13 @@ export function useListArtifactsLazyQuery(baseOptions?: Apollo.LazyQueryHookOpti
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(ListArtifactsDocument, options);
}
+export function useListArtifactsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(ListArtifactsDocument, options);
+ }
export type ListArtifactsQueryHookResult = ReturnType;
export type ListArtifactsLazyQueryHookResult = ReturnType;
+export type ListArtifactsSuspenseQueryHookResult = ReturnType;
export type ListArtifactsQueryResult = Apollo.QueryResult;
export const CreateArtifactDocument = gql`
mutation CreateArtifact($repoName: String!, $name: String!, $readme: String!, $artifactType: String!, $platform: String!, $blob: UploadOrUrl!, $arch: String) {
@@ -7511,8 +7626,13 @@ export function useGetChartsLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetChartsDocument, options);
}
+export function useGetChartsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetChartsDocument, options);
+ }
export type GetChartsQueryHookResult = ReturnType;
export type GetChartsLazyQueryHookResult = ReturnType;
+export type GetChartsSuspenseQueryHookResult = ReturnType;
export type GetChartsQueryResult = Apollo.QueryResult;
export const GetVersionsDocument = gql`
query GetVersions($id: ID!) {
@@ -7550,8 +7670,13 @@ export function useGetVersionsLazyQuery(baseOptions?: Apollo.LazyQueryHookOption
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetVersionsDocument, options);
}
+export function useGetVersionsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetVersionsDocument, options);
+ }
export type GetVersionsQueryHookResult = ReturnType;
export type GetVersionsLazyQueryHookResult = ReturnType;
+export type GetVersionsSuspenseQueryHookResult = ReturnType;
export type GetVersionsQueryResult = Apollo.QueryResult;
export const GetChartInstallationsDocument = gql`
query GetChartInstallations($id: ID!) {
@@ -7589,8 +7714,13 @@ export function useGetChartInstallationsLazyQuery(baseOptions?: Apollo.LazyQuery
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetChartInstallationsDocument, options);
}
+export function useGetChartInstallationsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetChartInstallationsDocument, options);
+ }
export type GetChartInstallationsQueryHookResult = ReturnType;
export type GetChartInstallationsLazyQueryHookResult = ReturnType;
+export type GetChartInstallationsSuspenseQueryHookResult = ReturnType;
export type GetChartInstallationsQueryResult = Apollo.QueryResult;
export const GetPackageInstallationsDocument = gql`
query GetPackageInstallations($id: ID!) {
@@ -7636,8 +7766,13 @@ export function useGetPackageInstallationsLazyQuery(baseOptions?: Apollo.LazyQue
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetPackageInstallationsDocument, options);
}
+export function useGetPackageInstallationsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetPackageInstallationsDocument, options);
+ }
export type GetPackageInstallationsQueryHookResult = ReturnType;
export type GetPackageInstallationsLazyQueryHookResult = ReturnType;
+export type GetPackageInstallationsSuspenseQueryHookResult = ReturnType;
export type GetPackageInstallationsQueryResult = Apollo.QueryResult;
export const CreateCrdDocument = gql`
mutation CreateCrd($chartName: ChartName!, $name: String!, $blob: UploadOrUrl!) {
@@ -7785,8 +7920,13 @@ export function useClustersLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(ClustersDocument, options);
}
+export function useClustersSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(ClustersDocument, options);
+ }
export type ClustersQueryHookResult = ReturnType;
export type ClustersLazyQueryHookResult = ReturnType;
+export type ClustersSuspenseQueryHookResult = ReturnType;
export type ClustersQueryResult = Apollo.QueryResult;
export const GetDnsRecordsDocument = gql`
query GetDnsRecords($cluster: String!, $provider: Provider!) {
@@ -7825,8 +7965,13 @@ export function useGetDnsRecordsLazyQuery(baseOptions?: Apollo.LazyQueryHookOpti
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetDnsRecordsDocument, options);
}
+export function useGetDnsRecordsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetDnsRecordsDocument, options);
+ }
export type GetDnsRecordsQueryHookResult = ReturnType;
export type GetDnsRecordsLazyQueryHookResult = ReturnType;
+export type GetDnsRecordsSuspenseQueryHookResult = ReturnType;
export type GetDnsRecordsQueryResult = Apollo.QueryResult;
export const CreateDnsRecordDocument = gql`
mutation CreateDnsRecord($cluster: String!, $provider: Provider!, $attributes: DnsRecordAttributes!) {
@@ -7971,8 +8116,13 @@ export function useGroupMembersLazyQuery(baseOptions?: Apollo.LazyQueryHookOptio
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GroupMembersDocument, options);
}
+export function useGroupMembersSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GroupMembersDocument, options);
+ }
export type GroupMembersQueryHookResult = ReturnType;
export type GroupMembersLazyQueryHookResult = ReturnType;
+export type GroupMembersSuspenseQueryHookResult = ReturnType;
export type GroupMembersQueryResult = Apollo.QueryResult;
export const CreateGroupMemberDocument = gql`
mutation CreateGroupMember($groupId: ID!, $userId: ID!) {
@@ -8183,8 +8333,13 @@ export function useGroupsLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(GroupsDocument, options);
}
+export function useGroupsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GroupsDocument, options);
+ }
export type GroupsQueryHookResult = ReturnType;
export type GroupsLazyQueryHookResult = ReturnType;
+export type GroupsSuspenseQueryHookResult = ReturnType;
export type GroupsQueryResult = Apollo.QueryResult;
export const SignupInviteDocument = gql`
mutation SignupInvite($attributes: UserAttributes!, $inviteId: String!) {
@@ -8297,8 +8452,13 @@ export function useInviteLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(InviteDocument, options);
}
+export function useInviteSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(InviteDocument, options);
+ }
export type InviteQueryHookResult = ReturnType;
export type InviteLazyQueryHookResult = ReturnType;
+export type InviteSuspenseQueryHookResult = ReturnType;
export type InviteQueryResult = Apollo.QueryResult;
export const CreateInviteDocument = gql`
mutation CreateInvite($attributes: InviteAttributes!) {
@@ -8368,8 +8528,13 @@ export function useKeyBackupsLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(KeyBackupsDocument, options);
}
+export function useKeyBackupsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(KeyBackupsDocument, options);
+ }
export type KeyBackupsQueryHookResult = ReturnType;
export type KeyBackupsLazyQueryHookResult = ReturnType;
+export type KeyBackupsSuspenseQueryHookResult = ReturnType;
export type KeyBackupsQueryResult = Apollo.QueryResult;
export const KeyBackupDocument = gql`
query KeyBackup($name: String!) {
@@ -8403,8 +8568,13 @@ export function useKeyBackupLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(KeyBackupDocument, options);
}
+export function useKeyBackupSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(KeyBackupDocument, options);
+ }
export type KeyBackupQueryHookResult = ReturnType;
export type KeyBackupLazyQueryHookResult = ReturnType;
+export type KeyBackupSuspenseQueryHookResult = ReturnType;
export type KeyBackupQueryResult = Apollo.QueryResult;
export const DeleteKeyBackupDocument = gql`
mutation DeleteKeyBackup($name: String!) {
@@ -8503,8 +8673,13 @@ export function useSubscriptionLazyQuery(baseOptions?: Apollo.LazyQueryHookOptio
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(SubscriptionDocument, options);
}
+export function useSubscriptionSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(SubscriptionDocument, options);
+ }
export type SubscriptionQueryHookResult = ReturnType;
export type SubscriptionLazyQueryHookResult = ReturnType;
+export type SubscriptionSuspenseQueryHookResult = ReturnType;
export type SubscriptionQueryResult = Apollo.QueryResult;
export const UpdateAccountBillingDocument = gql`
mutation UpdateAccountBilling($attributes: AccountAttributes!) {
@@ -8655,8 +8830,13 @@ export function useCardsLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(CardsDocument, options);
}
+export function useCardsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(CardsDocument, options);
+ }
export type CardsQueryHookResult = ReturnType;
export type CardsLazyQueryHookResult = ReturnType;
+export type CardsSuspenseQueryHookResult = ReturnType;
export type CardsQueryResult = Apollo.QueryResult;
export const SetupIntentDocument = gql`
mutation SetupIntent($address: AddressAttributes!) {
@@ -8790,8 +8970,13 @@ export function useInvoicesLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(InvoicesDocument, options);
}
+export function useInvoicesSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(InvoicesDocument, options);
+ }
export type InvoicesQueryHookResult = ReturnType;
export type InvoicesLazyQueryHookResult = ReturnType;
+export type InvoicesSuspenseQueryHookResult = ReturnType;
export type InvoicesQueryResult = Apollo.QueryResult;
export const GetRecipeDocument = gql`
query GetRecipe($repo: String, $name: String) {
@@ -8829,8 +9014,13 @@ export function useGetRecipeLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetRecipeDocument, options);
}
+export function useGetRecipeSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetRecipeDocument, options);
+ }
export type GetRecipeQueryHookResult = ReturnType;
export type GetRecipeLazyQueryHookResult = ReturnType;
+export type GetRecipeSuspenseQueryHookResult = ReturnType;
export type GetRecipeQueryResult = Apollo.QueryResult;
export const ListRecipesDocument = gql`
query ListRecipes($repositoryName: String, $provider: Provider) {
@@ -8869,8 +9059,13 @@ export function useListRecipesLazyQuery(baseOptions?: Apollo.LazyQueryHookOption
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(ListRecipesDocument, options);
}
+export function useListRecipesSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(ListRecipesDocument, options);
+ }
export type ListRecipesQueryHookResult = ReturnType;
export type ListRecipesLazyQueryHookResult = ReturnType;
+export type ListRecipesSuspenseQueryHookResult = ReturnType;
export type ListRecipesQueryResult = Apollo.QueryResult;
export const CreateRecipeDocument = gql`
mutation CreateRecipe($name: String!, $attributes: RecipeAttributes!) {
@@ -9005,8 +9200,13 @@ export function useGetStackLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(GetStackDocument, options);
}
+export function useGetStackSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetStackDocument, options);
+ }
export type GetStackQueryHookResult = ReturnType;
export type GetStackLazyQueryHookResult = ReturnType;
+export type GetStackSuspenseQueryHookResult = ReturnType;
export type GetStackQueryResult = Apollo.QueryResult;
export const ListStacksDocument = gql`
query ListStacks($featured: Boolean, $cursor: String) {
@@ -9045,8 +9245,13 @@ export function useListStacksLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(ListStacksDocument, options);
}
+export function useListStacksSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(ListStacksDocument, options);
+ }
export type ListStacksQueryHookResult = ReturnType;
export type ListStacksLazyQueryHookResult = ReturnType;
+export type ListStacksSuspenseQueryHookResult = ReturnType;
export type ListStacksQueryResult = Apollo.QueryResult;
export const RepositoryDocument = gql`
query Repository($id: ID, $name: String) {
@@ -9116,8 +9321,13 @@ export function useRepositoryLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(RepositoryDocument, options);
}
+export function useRepositorySuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(RepositoryDocument, options);
+ }
export type RepositoryQueryHookResult = ReturnType;
export type RepositoryLazyQueryHookResult = ReturnType;
+export type RepositorySuspenseQueryHookResult = ReturnType;
export type RepositoryQueryResult = Apollo.QueryResult;
export const CreateResourceDefinitionDocument = gql`
mutation CreateResourceDefinition($name: String!, $input: ResourceDefinitionAttributes!) {
@@ -9399,8 +9609,13 @@ export function useMarketplaceRepositoriesLazyQuery(baseOptions?: Apollo.LazyQue
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(MarketplaceRepositoriesDocument, options);
}
+export function useMarketplaceRepositoriesSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(MarketplaceRepositoriesDocument, options);
+ }
export type MarketplaceRepositoriesQueryHookResult = ReturnType;
export type MarketplaceRepositoriesLazyQueryHookResult = ReturnType;
+export type MarketplaceRepositoriesSuspenseQueryHookResult = ReturnType;
export type MarketplaceRepositoriesQueryResult = Apollo.QueryResult;
export const ScaffoldsDocument = gql`
query Scaffolds($app: String!, $pub: String!, $cat: Category!, $ing: Boolean, $pg: Boolean) {
@@ -9445,8 +9660,13 @@ export function useScaffoldsLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(ScaffoldsDocument, options);
}
+export function useScaffoldsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(ScaffoldsDocument, options);
+ }
export type ScaffoldsQueryHookResult = ReturnType;
export type ScaffoldsLazyQueryHookResult = ReturnType;
+export type ScaffoldsSuspenseQueryHookResult = ReturnType;
export type ScaffoldsQueryResult = Apollo.QueryResult;
export const DeleteRepositoryDocument = gql`
mutation DeleteRepository($id: ID!) {
@@ -9542,8 +9762,13 @@ export function useGetTfProvidersLazyQuery(baseOptions?: Apollo.LazyQueryHookOpt
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetTfProvidersDocument, options);
}
+export function useGetTfProvidersSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetTfProvidersDocument, options);
+ }
export type GetTfProvidersQueryHookResult = ReturnType;
export type GetTfProvidersLazyQueryHookResult = ReturnType;
+export type GetTfProvidersSuspenseQueryHookResult = ReturnType;
export type GetTfProvidersQueryResult = Apollo.QueryResult;
export const GetTfProviderScaffoldDocument = gql`
query GetTfProviderScaffold($name: Provider!, $vsn: String) {
@@ -9579,8 +9804,13 @@ export function useGetTfProviderScaffoldLazyQuery(baseOptions?: Apollo.LazyQuery
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetTfProviderScaffoldDocument, options);
}
+export function useGetTfProviderScaffoldSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetTfProviderScaffoldDocument, options);
+ }
export type GetTfProviderScaffoldQueryHookResult = ReturnType;
export type GetTfProviderScaffoldLazyQueryHookResult = ReturnType;
+export type GetTfProviderScaffoldSuspenseQueryHookResult = ReturnType;
export type GetTfProviderScaffoldQueryResult = Apollo.QueryResult;
export const GetShellDocument = gql`
query GetShell {
@@ -9613,8 +9843,13 @@ export function useGetShellLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(GetShellDocument, options);
}
+export function useGetShellSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetShellDocument, options);
+ }
export type GetShellQueryHookResult = ReturnType;
export type GetShellLazyQueryHookResult = ReturnType;
+export type GetShellSuspenseQueryHookResult = ReturnType;
export type GetShellQueryResult = Apollo.QueryResult;
export const DeleteShellDocument = gql`
mutation DeleteShell {
@@ -9684,8 +9919,13 @@ export function useGetTerraformLazyQuery(baseOptions?: Apollo.LazyQueryHookOptio
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetTerraformDocument, options);
}
+export function useGetTerraformSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetTerraformDocument, options);
+ }
export type GetTerraformQueryHookResult = ReturnType;
export type GetTerraformLazyQueryHookResult = ReturnType;
+export type GetTerraformSuspenseQueryHookResult = ReturnType;
export type GetTerraformQueryResult = Apollo.QueryResult;
export const GetTerraformInstallationsDocument = gql`
query GetTerraformInstallations($id: ID!) {
@@ -9723,8 +9963,13 @@ export function useGetTerraformInstallationsLazyQuery(baseOptions?: Apollo.LazyQ
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetTerraformInstallationsDocument, options);
}
+export function useGetTerraformInstallationsSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetTerraformInstallationsDocument, options);
+ }
export type GetTerraformInstallationsQueryHookResult = ReturnType;
export type GetTerraformInstallationsLazyQueryHookResult = ReturnType;
+export type GetTerraformInstallationsSuspenseQueryHookResult = ReturnType;
export type GetTerraformInstallationsQueryResult = Apollo.QueryResult;
export const UploadTerraformDocument = gql`
mutation UploadTerraform($repoName: String!, $name: String!, $uploadOrUrl: UploadOrUrl!) {
@@ -9998,8 +10243,13 @@ export function useMeLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions(MeDocument, options);
}
+export function useMeSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(MeDocument, options);
+ }
export type MeQueryHookResult = ReturnType;
export type MeLazyQueryHookResult = ReturnType;
+export type MeSuspenseQueryHookResult = ReturnType;
export type MeQueryResult = Apollo.QueryResult;
export const GetLoginMethodDocument = gql`
query GetLoginMethod($email: String!) {
@@ -10034,8 +10284,13 @@ export function useGetLoginMethodLazyQuery(baseOptions?: Apollo.LazyQueryHookOpt
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(GetLoginMethodDocument, options);
}
+export function useGetLoginMethodSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions) {
+ const options = {...defaultOptions, ...baseOptions}
+ return Apollo.useSuspenseQuery(GetLoginMethodDocument, options);
+ }
export type GetLoginMethodQueryHookResult = ReturnType;
export type GetLoginMethodLazyQueryHookResult = ReturnType;
+export type GetLoginMethodSuspenseQueryHookResult = ReturnType;
export type GetLoginMethodQueryResult = Apollo.QueryResult;
export const ListTokensDocument = gql`
query ListTokens {
@@ -10072,8 +10327,13 @@ export function useListTokensLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery(ListTokensDocument, options);
}
+export function useListTokensSuspenseQuery(baseOptions?: Apollo.SuspenseQueryHookOptions