From bb544239f0ca3bcd076cf2e93116b2747e289e10 Mon Sep 17 00:00:00 2001 From: Ajantha Bhat Date: Tue, 30 Jul 2024 23:25:02 +0530 Subject: [PATCH] Update README.md (#15) matching the catalog name with the below spark sql example of "use polaris" --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c8494d910..44d6c7a46 100644 --- a/README.md +++ b/README.md @@ -169,10 +169,10 @@ $ export PRINCIPAL_TOKEN=ver:1-hint:1036-ETMsDgAAAY/GPANareallyverylongstringtha $ curl -i -X PUT -H "Authorization: Bearer $PRINCIPAL_TOKEN" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/v1/catalogs \ - -d '{"name": "snowflake", "id": 100, "type": "INTERNAL", "readOnly": false}' + -d '{"name": "polaris", "id": 100, "type": "INTERNAL", "readOnly": false}' ``` -This creates a catalog called `snowflake`. From here, you can use Spark to create namespaces, tables, etc. +This creates a catalog called `polaris`. From here, you can use Spark to create namespaces, tables, etc. You must run the following as the first query in your spark-sql shell to actually use Polaris: