From e4eb23bf5bbf284fd422ab0c30a2fd3b46059cf9 Mon Sep 17 00:00:00 2001 From: "Tony Ibbs (Tibs)" Date: Thu, 10 Nov 2022 10:40:45 +0000 Subject: [PATCH] Change developer.aiven.io to docs.aiven.io --- SEARCH.md | 2 +- conf.py | 2 +- docs/platform/concepts.rst | 2 +- .../howto/rotating-grafana-service-credentials.rst | 2 +- docs/products/kafka/howto/connect-with-command-line.rst | 4 ++-- docs/products/kafka/howto/connect-with-go.rst | 2 +- docs/products/kafka/howto/connect-with-java.rst | 2 +- docs/products/kafka/howto/connect-with-nodejs.rst | 2 +- docs/products/kafka/howto/connect-with-python.rst | 6 +++--- docs/products/kafka/howto/flink-with-aiven-for-kafka.rst | 2 +- docs/products/kafka/howto/ksql-docker.rst | 2 +- .../kafka-connect/concepts/list-of-connector-plugins.rst | 2 +- .../kafka-connect/howto/debezium-source-connector-pg.rst | 2 +- docs/products/mysql/concepts/mysql-backups.rst | 2 +- docs/products/opensearch/howto/connect-with-nodejs.rst | 2 +- .../howto/opensearch-aggregations-and-nodejs.rst | 2 +- docs/products/postgresql/howto/migrate-using-bucardo.rst | 2 +- .../postgresql/reference/high-cpu-load-of-pgbouncer.rst | 2 +- docs/tools/terraform/get-started.rst | 4 ++-- .../reference/cookbook/grafana-m3db-postgresql-recipe.rst | 8 ++++---- .../reference/cookbook/kafka-connect-terraform-recipe.rst | 4 ++-- .../reference/cookbook/kafka-custom-conf-recipe.rst | 6 +++--- .../reference/cookbook/kafka-debezium-postgres-source.rst | 8 ++++---- .../reference/cookbook/kafka-flink-integration-recipe.rst | 6 +++--- .../reference/cookbook/kafka-karapace-recipe.rst | 2 +- .../reference/cookbook/kafka-mirrormaker-recipe.rst | 4 ++-- .../terraform/reference/cookbook/kafka-mongodb-recipe.rst | 8 ++++---- .../terraform/reference/cookbook/m3db-m3agg-recipe.rst | 8 ++++---- .../reference/cookbook/multicloud-postgresql-recipe.rst | 4 ++-- .../cookbook/postgresql-custom-configs-recipe.rst | 4 ++-- .../reference/cookbook/postgresql-read-replica-recipe.rst | 6 +++--- page_stats.py | 2 +- robots.txt | 2 +- 33 files changed, 59 insertions(+), 59 deletions(-) diff --git a/SEARCH.md b/SEARCH.md index 21c54ffc2a..b02244f61a 100644 --- a/SEARCH.md +++ b/SEARCH.md @@ -49,7 +49,7 @@ In addition to these `url` field should be provided with every document but it i ## Search function -The OpenSearch® index is used through the Netlify function in [netlify/functions/search/search.js](netlify/functions/search/search.js). To call the function, make a GET request to the function URL and append your search term to the `query` parameter, like this: [https://developer.aiven.io/.netlify/functions/search?query=redis](https://developer.aiven.io/.netlify/functions/search?query=redis). +The OpenSearch® index is used through the Netlify function in [netlify/functions/search/search.js](netlify/functions/search/search.js). To call the function, make a GET request to the function URL and append your search term to the `query` parameter, like this: [https://docs.aiven.io/.netlify/functions/search?query=redis](https://docs.aiven.io/.netlify/functions/search?query=redis). The query uses this overall approach: diff --git a/conf.py b/conf.py index e0043a922d..99e52a4a2a 100644 --- a/conf.py +++ b/conf.py @@ -69,7 +69,7 @@ html_baseurl = 'https://docs.aiven.io' # Since we have `language='en'` set (further down) the URLs in the sitemap will # default to "{version}{lang}{link}", producing things like -# https://developer.aiven.io/en/docs/platform/howto/create_authentication_token.html +# https://docs.aiven.io/en/docs/platform/howto/create_authentication_token.html # That doesn't work because we do not produce pages with the `/en` in the URL. # We need to be explicit that we don't want {version} or {language} in the URLs sitemap_url_scheme = "{link}" diff --git a/docs/platform/concepts.rst b/docs/platform/concepts.rst index 4ae91079d2..f422705306 100644 --- a/docs/platform/concepts.rst +++ b/docs/platform/concepts.rst @@ -76,7 +76,7 @@ Learn about some of the key concepts for working with Aiven platform: .. We would like to use a :doc: role for this, but at 2022-08, vale will spell-check the URL if we do so, and complain about 'timeseries' -.. _`Choosing a time series database`: https://developer.aiven.io/docs/platform/concepts/choosing-timeseries-database +.. _`Choosing a time series database`: https://docs.aiven.io/docs/platform/concepts/choosing-timeseries-database diff --git a/docs/products/grafana/howto/rotating-grafana-service-credentials.rst b/docs/products/grafana/howto/rotating-grafana-service-credentials.rst index ee922999d5..5417fb130c 100644 --- a/docs/products/grafana/howto/rotating-grafana-service-credentials.rst +++ b/docs/products/grafana/howto/rotating-grafana-service-credentials.rst @@ -4,7 +4,7 @@ Rotating Grafana® service credentials In the interests of security, it is best practice to rotate credentials from time-to-time. For Grafana®, a few steps need to be performed manually to do this. You will need to have access to a web browser, -and to have installed ``avn``, the `Aiven CLI tool `_. +and to have installed ``avn``, the `Aiven CLI tool `_. 1. In the web browser, go to the `Aiven Console `_ page for your Grafana service. diff --git a/docs/products/kafka/howto/connect-with-command-line.rst b/docs/products/kafka/howto/connect-with-command-line.rst index 0e1285a980..12f101f7c0 100644 --- a/docs/products/kafka/howto/connect-with-command-line.rst +++ b/docs/products/kafka/howto/connect-with-command-line.rst @@ -34,7 +34,7 @@ Variable Description .. _`the guide to set up properties to use the Apache Kafka toolbox`: toolbox_ .. _`for Apache Kafka toolbox`: toolbox_ -.. _toolbox: https://developer.aiven.io/docs/products/kafka/howto/kafka-tools-config-file.html +.. _toolbox: https://docs.aiven.io/docs/products/kafka/howto/kafka-tools-config-file.html In the command lines below, values in ``{`` and ``}`` are to be replaced - so ``{PORT}`` would be replaced by the appropriate port number, for instance ``12345``. @@ -65,7 +65,7 @@ With ``kafka-avro-console-producer`` you can include the schema by connecting to 1. The ``schema.registry.url`` value must be a full URL, typically starting with ``https://`` 2. Aiven's `Karapace `_ is an acceptable schema registry for this purpose. - See `Use Karapace with Aiven for Apache Kafka® `_ for how to enable it for your Aiven for Kafka service. The ``SCHEMA_REGISTRY_`` values for the command line can be found on the service Overview page, on the **Schema registry** tab. + See `Use Karapace with Aiven for Apache Kafka® `_ for how to enable it for your Aiven for Kafka service. The ``SCHEMA_REGISTRY_`` values for the command line can be found on the service Overview page, on the **Schema registry** tab. .. code:: diff --git a/docs/products/kafka/howto/connect-with-go.rst b/docs/products/kafka/howto/connect-with-go.rst index 841af592a1..89f2e9a421 100644 --- a/docs/products/kafka/howto/connect-with-go.rst +++ b/docs/products/kafka/howto/connect-with-go.rst @@ -24,7 +24,7 @@ Go to the *Overview* page of your Aiven for Apache Kafka service. * If you are going to connect using SASL authentication: - #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. + #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. #. In the *Connection Information* section diff --git a/docs/products/kafka/howto/connect-with-java.rst b/docs/products/kafka/howto/connect-with-java.rst index e71d051197..9b2e6eb40f 100644 --- a/docs/products/kafka/howto/connect-with-java.rst +++ b/docs/products/kafka/howto/connect-with-java.rst @@ -25,7 +25,7 @@ Go to the *Overview* page of your Aiven for Apache Kafka service. * If you are going to connect using SASL authentication: - #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. + #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. #. In the *Connection Information* section diff --git a/docs/products/kafka/howto/connect-with-nodejs.rst b/docs/products/kafka/howto/connect-with-nodejs.rst index b1e0b2c911..e74a4b825f 100644 --- a/docs/products/kafka/howto/connect-with-nodejs.rst +++ b/docs/products/kafka/howto/connect-with-nodejs.rst @@ -21,7 +21,7 @@ Go to the *Overview* page of your Aiven for Apache Kafka service. * If you are going to connect using SASL authentication: - #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. + #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. #. In the *Connection Information* section diff --git a/docs/products/kafka/howto/connect-with-python.rst b/docs/products/kafka/howto/connect-with-python.rst index d04aeabf1a..4cab46a23f 100644 --- a/docs/products/kafka/howto/connect-with-python.rst +++ b/docs/products/kafka/howto/connect-with-python.rst @@ -11,7 +11,7 @@ These examples show how to connect to an Aiven for Apache Kafka® service using .. note:: The examples given here provide different options for the different authentication methods. For more information on the supported methods, see `our article on Kafka - authentication types `_. + authentication types `_. Pre-requisites -------------- @@ -35,7 +35,7 @@ Go to the *Overview* page of your Aiven for Apache Kafka service. * If you are going to connect using SASL authentication: - #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. + #. Follow the instructions at `Use SASL Authentication with Apache Kafka® `_ to enable SASL. #. In the *Connection Information* section @@ -48,7 +48,7 @@ Note that the *CA Certificate* ``ca.pem`` file has the same contents by either r In the below examples, we just pass the name of the certificate files, but in actual use, the full path should be used. -You can also use the `Aiven command line tool `_ to download the files. See the documentation for `avn service user-creds-download `_ +You can also use the `Aiven command line tool `_ to download the files. See the documentation for `avn service user-creds-download `_ Variables diff --git a/docs/products/kafka/howto/flink-with-aiven-for-kafka.rst b/docs/products/kafka/howto/flink-with-aiven-for-kafka.rst index efa2af808a..ce4a6fd919 100644 --- a/docs/products/kafka/howto/flink-with-aiven-for-kafka.rst +++ b/docs/products/kafka/howto/flink-with-aiven-for-kafka.rst @@ -15,7 +15,7 @@ The example in this article shows you how to create a simple Java Flink job that Prerequisites ------------- -You need an Aiven for Apache Kafka service up and running with two topics, named ``test-flink-input`` and ``test-flink-output``, already `created `_. +You need an Aiven for Apache Kafka service up and running with two topics, named ``test-flink-input`` and ``test-flink-output``, already `created `_. Furthermore, for the example, you need to collect the following information about the Aiven for Apache Kafka service: * ``APACHE_KAFKA_HOST``: The hostname of the Apache Kafka service diff --git a/docs/products/kafka/howto/ksql-docker.rst b/docs/products/kafka/howto/ksql-docker.rst index a168e7a813..42f221aaff 100644 --- a/docs/products/kafka/howto/ksql-docker.rst +++ b/docs/products/kafka/howto/ksql-docker.rst @@ -3,7 +3,7 @@ Use ksqlDB with Aiven for Apache Kafka® Aiven provides a managed Apache Kafka® solution together with a number of auxiliary services like Apache Kafka Connect, Kafka REST and Schema Registry via `Karapace `_. A managed `ksqlDB `_ service in Aiven is, however, not supported. If you want to define streaming data pipelines with SQL, you have two options: -* Use `Aiven for Apache Flink® `_ +* Use `Aiven for Apache Flink® `_ * Run a self-hosted ksqlDB cluster. diff --git a/docs/products/kafka/kafka-connect/concepts/list-of-connector-plugins.rst b/docs/products/kafka/kafka-connect/concepts/list-of-connector-plugins.rst index 1b87ea2e85..9d987c921e 100644 --- a/docs/products/kafka/kafka-connect/concepts/list-of-connector-plugins.rst +++ b/docs/products/kafka/kafka-connect/concepts/list-of-connector-plugins.rst @@ -40,7 +40,7 @@ Sink connectors enable the integration of data from an existing Apache Kafka top * :doc:`Aiven for Apache Kafka® S3 Sink Connector <../howto/s3-sink-connector-aiven>` -* `Confluent Amazon S3 Sink `__ +* `Confluent Amazon S3 Sink `__ * `Couchbase® `__ diff --git a/docs/products/kafka/kafka-connect/howto/debezium-source-connector-pg.rst b/docs/products/kafka/kafka-connect/howto/debezium-source-connector-pg.rst index 942932bc32..aedeab074e 100644 --- a/docs/products/kafka/kafka-connect/howto/debezium-source-connector-pg.rst +++ b/docs/products/kafka/kafka-connect/howto/debezium-source-connector-pg.rst @@ -31,7 +31,7 @@ Furthermore you need to collect the following information about the source Postg * ``PLUGIN_NAME``: The `logical decoding plugin `_, possible values are ``decoderbufs``, ``wal2json`` and ``pgoutput`` * ``PG_TABLES``: The list of database tables to be included in Apache Kafka; the list must be in the form of ``schema_name1.table_name1,schema_name2.table_name2`` * ``PG_PUBLICATION_NAME``: The name of the `PostgreSQL logical replication publication `_, if left empty, ``debezium`` is used as default -* ``PG_SLOT_NAME``: name of the `PostgreSQL replication slot `_, if left empty, ``debezium`` is be used as default +* ``PG_SLOT_NAME``: name of the `PostgreSQL replication slot `_, if left empty, ``debezium`` is be used as default * ``APACHE_KAFKA_HOST``: The hostname of the Apache Kafka service, only needed when using Avro as data format * ``SCHEMA_REGISTRY_PORT``: The Apache Kafka's schema registry port, only needed when using Avro as data format * ``SCHEMA_REGISTRY_USER``: The Apache Kafka's schema registry username, only needed when using Avro as data format diff --git a/docs/products/mysql/concepts/mysql-backups.rst b/docs/products/mysql/concepts/mysql-backups.rst index ea7c6641b3..a8dde7b26a 100644 --- a/docs/products/mysql/concepts/mysql-backups.rst +++ b/docs/products/mysql/concepts/mysql-backups.rst @@ -22,4 +22,4 @@ More resources -------------- - Our blog post: `MyHoard, your solution to MySQL backups and restoration `_ -- Read about `Aiven cloud security and data encryption `_ +- Read about `Aiven cloud security and data encryption `_ diff --git a/docs/products/opensearch/howto/connect-with-nodejs.rst b/docs/products/opensearch/howto/connect-with-nodejs.rst index bf68ac9913..798f8c8bff 100644 --- a/docs/products/opensearch/howto/connect-with-nodejs.rst +++ b/docs/products/opensearch/howto/connect-with-nodejs.rst @@ -3,7 +3,7 @@ Connect to OpenSearch® cluster with NodeJS The most convenient way to work with the cluster when using NodeJS is to rely on `OpenSearch® JavaScript client `_. Follow its ``README`` file for installation instructions. -To connect to the cluster, you'll need ``service_uri``, which you can find either in the service overview in the `Aiven console `_ or get through the Aiven command line interface `service command `_. ``service_uri`` contains credentials, therefore should be treated with care. +To connect to the cluster, you'll need ``service_uri``, which you can find either in the service overview in the `Aiven console `_ or get through the Aiven command line interface `service command `_. ``service_uri`` contains credentials, therefore should be treated with care. We strongly recommend using environment variables for credential information. A good way to do this is to use ``dotenv``. You will find installation and usage instructions `on its library's project page `_, but in short, you need to create ``.env`` file in the project and assign ``SERVICE_URI`` inside of this file. diff --git a/docs/products/opensearch/howto/opensearch-aggregations-and-nodejs.rst b/docs/products/opensearch/howto/opensearch-aggregations-and-nodejs.rst index 4670ab0bac..82bab2ac10 100644 --- a/docs/products/opensearch/howto/opensearch-aggregations-and-nodejs.rst +++ b/docs/products/opensearch/howto/opensearch-aggregations-and-nodejs.rst @@ -286,7 +286,7 @@ Calculate percentiles for `calories`: } } -From the returned result you can see that 50% of recipes have less than 331 calories. Interestingly, only one percent of the meals is more than 3256 calories. You must be curious what falls within that last percentile ;) Now that we know the value to look for, we can use `a range query `_ to find the recipes. Set the minimum value, but keep the maximum empty to allow no bounds: +From the returned result you can see that 50% of recipes have less than 331 calories. Interestingly, only one percent of the meals is more than 3256 calories. You must be curious what falls within that last percentile ;) Now that we know the value to look for, we can use `a range query `_ to find the recipes. Set the minimum value, but keep the maximum empty to allow no bounds: :: diff --git a/docs/products/postgresql/howto/migrate-using-bucardo.rst b/docs/products/postgresql/howto/migrate-using-bucardo.rst index 119f9a268b..527eeae4ae 100644 --- a/docs/products/postgresql/howto/migrate-using-bucardo.rst +++ b/docs/products/postgresql/howto/migrate-using-bucardo.rst @@ -36,7 +36,7 @@ To migrate your data using Bucardo: instructions `__ on the Bucardo site. -#. | Install the ``aiven_extras`` `extension `_ to your current database. +#. | Install the ``aiven_extras`` `extension `_ to your current database. | Bucardo requires the superuser role to set the ``session_replication_role`` parameter. Aiven uses the open source ``aiven_extras`` extension to allow you to run ``superuser`` diff --git a/docs/products/postgresql/reference/high-cpu-load-of-pgbouncer.rst b/docs/products/postgresql/reference/high-cpu-load-of-pgbouncer.rst index 673926665c..b8adb9b7dc 100644 --- a/docs/products/postgresql/reference/high-cpu-load-of-pgbouncer.rst +++ b/docs/products/postgresql/reference/high-cpu-load-of-pgbouncer.rst @@ -1,7 +1,7 @@ High CPU load when using PgBouncer ================================== -PgBouncer is a lightweight connection pooler for PostgreSQL®. For the setup and configurations of PgBouncer please refer to this `help article. `_ +PgBouncer is a lightweight connection pooler for PostgreSQL®. For the setup and configurations of PgBouncer please refer to this `help article. `_ During the usage of PgBouncer pooling, you may see a high percentage of CPU load by PgBouncer, which indicates a problem with the usage pattern. Historically, the reason for the high CPU usage in PgBouncer is most likely the high incoming SSL connection rate. SSL handshakes cost quite a bit when the user run the query and then disconnect/reconnect again for the next query. diff --git a/docs/tools/terraform/get-started.rst b/docs/tools/terraform/get-started.rst index 4204a87478..2a83e9f7bd 100644 --- a/docs/tools/terraform/get-started.rst +++ b/docs/tools/terraform/get-started.rst @@ -7,13 +7,13 @@ Prepare the dependencies '''''''''''''''''''''''' - `Download and install Terraform `_ - `Sign up `_ for Aiven if you haven't already -- `Generate an authentication token `_ +- `Generate an authentication token `_ .. Tip:: Make sure that you have either the *Administrator* or *Operator* role when creating the API token. When you create a project, you automatically receive the *Administrator* access. - For more details, refer to the `Project members and roles page `_. + For more details, refer to the `Project members and roles page `_. Configure your project and services ''''''''''''''''''''''''''''''''''' diff --git a/docs/tools/terraform/reference/cookbook/grafana-m3db-postgresql-recipe.rst b/docs/tools/terraform/reference/cookbook/grafana-m3db-postgresql-recipe.rst index 38249ad92c..d835848d88 100644 --- a/docs/tools/terraform/reference/cookbook/grafana-m3db-postgresql-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/grafana-m3db-postgresql-recipe.rst @@ -179,8 +179,8 @@ More resources You might find these related resources useful too: -- `Configuration options for Aiven for Grafana `_ -- `Configuration options for Aiven for PostgreSQL `_ -- `Configuration options for Aiven for M3DB `_ -- `Set up your first Aiven Terraform project `_ +- `Configuration options for Aiven for Grafana `_ +- `Configuration options for Aiven for PostgreSQL `_ +- `Configuration options for Aiven for M3DB `_ +- `Set up your first Aiven Terraform project `_ - `Metrics and graphs with M3 and Grafana `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-connect-terraform-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-connect-terraform-recipe.rst index 113040d591..1e0d52dc2a 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-connect-terraform-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-connect-terraform-recipe.rst @@ -201,6 +201,6 @@ More resources You might find these related resources useful too: -- `Configuration options for Kafka `_ -- `Configuration options for OpenSearch `_ +- `Configuration options for Kafka `_ +- `Configuration options for OpenSearch `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-custom-conf-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-custom-conf-recipe.rst index fefa16f077..b30ca6a5f5 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-custom-conf-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-custom-conf-recipe.rst @@ -196,7 +196,7 @@ More resources Keep in mind that some parameters and configurations will vary for your case. Some related resources are provided below: -- `Configuration options for Aiven for Apache Kafka `_ -- `Aiven for Apache Kafka access control lists permission mapping `_ +- `Configuration options for Aiven for Apache Kafka `_ +- `Aiven for Apache Kafka access control lists permission mapping `_ - `How to Manage Aiven for Apache Kafka Parameters `_ -- `Set up your first Aiven Terraform project `_ +- `Set up your first Aiven Terraform project `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-debezium-postgres-source.rst b/docs/tools/terraform/reference/cookbook/kafka-debezium-postgres-source.rst index 61f84b8691..6ced6fa95b 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-debezium-postgres-source.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-debezium-postgres-source.rst @@ -1,7 +1,7 @@ Debezium source connector - PostgreSQL® to Apache Kafka® across clouds ====================================================================== -The `Aiven Terraform Provider `_ is a great choice for provisioning an Aiven for Apache Kafka® cluster with Kafka Connect enabled and the `Debezium source connector for PostgreSQL® `_ configured. +The `Aiven Terraform Provider `_ is a great choice for provisioning an Aiven for Apache Kafka® cluster with Kafka Connect enabled and the `Debezium source connector for PostgreSQL® `_ configured. Let's check out the following diagram to understand the setup. @@ -221,6 +221,6 @@ More resources Keep in mind that some parameters and configurations will vary for your case. A reference to some of the advanced Apache Kafka configurations and other related resources: -- `List of advanced Apache Kafka configurations `_ -- `Create a Debezium source connector `_ -- `List of available Apache Kafka® Connect connectors `_ +- `List of advanced Apache Kafka configurations `_ +- `Create a Debezium source connector `_ +- `List of available Apache Kafka® Connect connectors `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-flink-integration-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-flink-integration-recipe.rst index 8ecc300578..19935f151d 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-flink-integration-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-flink-integration-recipe.rst @@ -203,6 +203,6 @@ More resources The parameters and configurations will vary for your case. Please refer below for Apache Kafka and Apache Flink advanced parameters, a related blog, and how to get started with Aiven Terraform Provider: - `Build a Streaming SQL Pipeline with Apache Flink® and Apache Kafka® `_ -- `Set up your first Aiven Terraform project `_ -- `Advanced parameters for Aiven for Apache Kafka® `_ -- `Advanced parameters for Aiven for Apache Flink® `_ +- `Set up your first Aiven Terraform project `_ +- `Advanced parameters for Aiven for Apache Kafka® `_ +- `Advanced parameters for Aiven for Apache Flink® `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-karapace-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-karapace-recipe.rst index 16a37b5271..6418e8ad60 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-karapace-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-karapace-recipe.rst @@ -4,7 +4,7 @@ Apache Kafka® with Karapace Schema Registry This example shows how to setup `Karapace `_ - an open source HTTP API interface and schema registry, with Aiven for Apache Kafka® using `Aiven Terraform Provider `_. You'll also enable the auto creation of Apache Kafka topics which will allow you to send message to topics that didn't exist already on the Apache Kafka cluster. In order to work directly with Kafka by producing and consuming messages over HTTP, the REST API feature will be enabled. -To learn more, check out `Create Apache Kafka® topics automatically `_ page. +To learn more, check out `Create Apache Kafka® topics automatically `_ page. .. mermaid:: diff --git a/docs/tools/terraform/reference/cookbook/kafka-mirrormaker-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-mirrormaker-recipe.rst index 2cb241aa2e..1399821940 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-mirrormaker-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-mirrormaker-recipe.rst @@ -242,6 +242,6 @@ More resources Keep in mind that some parameters and configurations will vary for your case. A reference to some of the advanced Apache Kafka configurations and other related resources: -- `Configuration options for Aiven for Apache Kafka `_ -- `Aiven for Apache Kafka® MirrorMaker 2 Terminology `_ +- `Configuration options for Aiven for Apache Kafka `_ +- `Aiven for Apache Kafka® MirrorMaker 2 Terminology `_ - `5 reasons why you should be using MirrorMaker 2.0 for data replication `_ diff --git a/docs/tools/terraform/reference/cookbook/kafka-mongodb-recipe.rst b/docs/tools/terraform/reference/cookbook/kafka-mongodb-recipe.rst index 98b1f75c17..f84046d081 100644 --- a/docs/tools/terraform/reference/cookbook/kafka-mongodb-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/kafka-mongodb-recipe.rst @@ -1,7 +1,7 @@ Apache Kafka® with MongoDB source connector =========================================== -The `Aiven Terraform Provider `_ is a great choice for provisioning an Aiven for Apache Kafka® cluster with Kafka Connect enabled and the `MongoDB source connector `_ configured. +The `Aiven Terraform Provider `_ is a great choice for provisioning an Aiven for Apache Kafka® cluster with Kafka Connect enabled and the `MongoDB source connector `_ configured. Let's check out the following diagram to understand the setup. @@ -160,6 +160,6 @@ More resources Keep in mind that some parameters and configurations will vary for your case. A reference to some of the advanced Apache Kafka configurations and other related resources: -- `List of advanced Apache Kafka configurations `_ -- `Create a MongoDB source connector `_ -- `List of available Apache Kafka® Connect connectors `_ +- `List of advanced Apache Kafka configurations `_ +- `Create a MongoDB source connector `_ +- `List of available Apache Kafka® Connect connectors `_ diff --git a/docs/tools/terraform/reference/cookbook/m3db-m3agg-recipe.rst b/docs/tools/terraform/reference/cookbook/m3db-m3agg-recipe.rst index 9aee4237c8..30acdb6b5d 100644 --- a/docs/tools/terraform/reference/cookbook/m3db-m3agg-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/m3db-m3agg-recipe.rst @@ -1,7 +1,7 @@ Setup M3-related services using Aiven Terraform Provider ======================================================== -`Aiven for M3DB `_ is a powerful time-series database that can be used when handling very large volumes of metrics and scalability is a concern. `Aiven for M3 Aggregator `_ can store your data at various resolutions for different workloads at scale. +`Aiven for M3DB `_ is a powerful time-series database that can be used when handling very large volumes of metrics and scalability is a concern. `Aiven for M3 Aggregator `_ can store your data at various resolutions for different workloads at scale. Together, they are a perfect choice to aggregate, store, and query large time-series data like internet of things (IoT) sensor readings. This example shows how to use the `Aiven Terraform Provider `_ to create an Aiven for M3 service, an Aiven for M3 Aggregator service, and the related service integration programmatically. @@ -188,7 +188,7 @@ More resources You might find these related resources useful too: -- `Configuration options for Aiven for M3DB `_ -- `Configuration options for Aiven for M3 Aggregator `_ -- `Set up your first Aiven Terraform project `_ +- `Configuration options for Aiven for M3DB `_ +- `Configuration options for Aiven for M3 Aggregator `_ +- `Set up your first Aiven Terraform project `_ - `Metrics and graphs with M3 and Grafana® `_ diff --git a/docs/tools/terraform/reference/cookbook/multicloud-postgresql-recipe.rst b/docs/tools/terraform/reference/cookbook/multicloud-postgresql-recipe.rst index f1d8c843d2..5cf4893243 100644 --- a/docs/tools/terraform/reference/cookbook/multicloud-postgresql-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/multicloud-postgresql-recipe.rst @@ -131,6 +131,6 @@ More resources You might find these related resources useful too: -- `Configuration options for PostgreSQL `_ -- `Set up your first Aiven Terraform project `_ +- `Configuration options for PostgreSQL `_ +- `Set up your first Aiven Terraform project `_ - `Benefits and challenges of multi-cloud `_ diff --git a/docs/tools/terraform/reference/cookbook/postgresql-custom-configs-recipe.rst b/docs/tools/terraform/reference/cookbook/postgresql-custom-configs-recipe.rst index d48c5791dc..83af8e1e11 100644 --- a/docs/tools/terraform/reference/cookbook/postgresql-custom-configs-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/postgresql-custom-configs-recipe.rst @@ -160,6 +160,6 @@ More resources To learn how to get started with Aiven Terraform Provider and specific PostgreSQL configurations for you use case, check out the following resources: - `What is PostgreSQL®? `_ -- `Configuration options for PostgreSQL `_ +- `Configuration options for PostgreSQL `_ - `PostgreSQL Resource Consumption `_ -- `Set up your first Aiven Terraform project `_ +- `Set up your first Aiven Terraform project `_ diff --git a/docs/tools/terraform/reference/cookbook/postgresql-read-replica-recipe.rst b/docs/tools/terraform/reference/cookbook/postgresql-read-replica-recipe.rst index 696ca5bdbc..f28c08a431 100644 --- a/docs/tools/terraform/reference/cookbook/postgresql-read-replica-recipe.rst +++ b/docs/tools/terraform/reference/cookbook/postgresql-read-replica-recipe.rst @@ -166,6 +166,6 @@ More resources To learn how to get started with Aiven Terraform Provider and specific PostgreSQL configurations for you use case, check out the following resources: - `What is PostgreSQL®? `_ -- `Configuration options for PostgreSQL `_ -- `Create and use read-only replicas `_ -- `Set up your first Aiven Terraform project `_ +- `Configuration options for PostgreSQL `_ +- `Create and use read-only replicas `_ +- `Set up your first Aiven Terraform project `_ diff --git a/page_stats.py b/page_stats.py index a9cb12a7ed..a61e77a3a1 100644 --- a/page_stats.py +++ b/page_stats.py @@ -47,7 +47,7 @@ def index_pages(es, base_url): if __name__ == "__main__": - tree = sitemap_tree_for_homepage('https://developer.aiven.io') + tree = sitemap_tree_for_homepage('https://docs.aiven.io') devportal_pages = sum(1 for _ in tree.all_pages()) print("Aiven Developer sitemap page count: {:d}".format(devportal_pages)) diff --git a/robots.txt b/robots.txt index 40f0dd5db4..5c7a8fd199 100644 --- a/robots.txt +++ b/robots.txt @@ -1,4 +1,4 @@ User-agent: * -Sitemap: https://developer.aiven.io/sitemap.xml +Sitemap: https://docs.aiven.io/sitemap.xml