diff --git a/README.md b/README.md index 213f6ce0bf..991cd6de4f 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,6 @@ Cortex is an open source platform that takes machine learning models—trained with nearly any framework—and turns them into production web APIs in one command.
- - -[install](https://www.cortex.dev/install) • [docs](https://www.cortex.dev) • [examples](https://github.com/cortexlabs/cortex/tree/0.8/examples) • [we're hiring](https://angel.co/cortex-labs-inc/jobs) • [email us](mailto:hello@cortex.dev) • [chat with us](https://gitter.im/cortexlabs/cortex)

- ![Demo](https://cortex-public.s3-us-west-2.amazonaws.com/demo/gif/v0.8.gif)
@@ -20,7 +16,7 @@ Below, we'll walk through how to use Cortex to deploy OpenAI's GPT-2 model as a ### Step 1: Configure your deployment -Define a `deployment` and an `api` resource. A `deployment` specifies a set of APIs that are deployed together. An `api` makes a model available as a web service that can serve real-time predictions. The configuration below will download the model from the `cortex-examples` S3 bucket. You can run the code that generated the model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.8/examples/text-generator/gpt-2.ipynb). +Define a `deployment` and an `api` resource. A `deployment` specifies a set of APIs that are deployed together. An `api` makes a model available as a web service that can serve real-time predictions. The configuration below will download the model from the `cortex-examples` S3 bucket. You can run the code that generated the model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/text-generator/gpt-2.ipynb). ```yaml # cortex.yaml @@ -101,11 +97,11 @@ Any questions? [chat with us](https://gitter.im/cortexlabs/cortex). ## More examples -- [Iris classification](https://github.com/cortexlabs/cortex/tree/0.8/examples/iris-classifier) +- [Iris classification](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) -- [Sentiment analysis](https://github.com/cortexlabs/cortex/tree/0.8/examples/sentiment-analysis) with BERT +- [Sentiment analysis](https://github.com/cortexlabs/cortex/tree/0.9/examples/sentiment-analysis) with BERT -- [Image classification](https://github.com/cortexlabs/cortex/tree/0.8/examples/image-classifier) with Inception v3 and AlexNet +- [Image classification](https://github.com/cortexlabs/cortex/tree/0.9/examples/image-classifier) with Inception v3 and AlexNet
diff --git a/build/build-image.sh b/build/build-image.sh index 7e8ce0353f..2c638d8e05 100755 --- a/build/build-image.sh +++ b/build/build-image.sh @@ -19,7 +19,7 @@ set -euo pipefail ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. >/dev/null && pwd)" -CORTEX_VERSION=master +CORTEX_VERSION=0.9.0 dir=$1 image=$2 diff --git a/build/cli.sh b/build/cli.sh index fb515bbb11..5b835d163a 100755 --- a/build/cli.sh +++ b/build/cli.sh @@ -19,7 +19,7 @@ set -euo pipefail ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. >/dev/null && pwd)" -CORTEX_VERSION=master +CORTEX_VERSION=0.9.0 arg1=${1:-""} upload="false" diff --git a/build/push-image.sh b/build/push-image.sh index b502d0d4f5..c5b70bd4c8 100755 --- a/build/push-image.sh +++ b/build/push-image.sh @@ -17,7 +17,7 @@ set -euo pipefail -CORTEX_VERSION=master +CORTEX_VERSION=0.9.0 image=$1 diff --git a/cortex.sh b/cortex.sh index 2fa8b88fb8..eec656e544 100755 --- a/cortex.sh +++ b/cortex.sh @@ -84,7 +84,7 @@ set -u ### CONFIGURATION ### ##################### -export CORTEX_VERSION_BRANCH_STABLE=master +export CORTEX_VERSION_BRANCH_STABLE=0.9.0 export CORTEX_CONFIG="${CORTEX_CONFIG:-""}" if [ "$CORTEX_CONFIG" != "" ]; then diff --git a/docs/cluster/config.md b/docs/cluster/config.md index 0e5d08c36c..0f9b9c9288 100644 --- a/docs/cluster/config.md +++ b/docs/cluster/config.md @@ -37,23 +37,23 @@ export CORTEX_NODES_MIN=2 export CORTEX_NODES_MAX=5 # Image paths -export CORTEX_IMAGE_MANAGER="cortexlabs/manager:master" -export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:master" -export CORTEX_IMAGE_STATSD="cortexlabs/statsd:master" -export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:master" -export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:master" -export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:master" -export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:master" -export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:master" -export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:master" -export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:master" -export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:master" -export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:master" -export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:master" -export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:master" -export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:master" -export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:master" -export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:master" +export CORTEX_IMAGE_MANAGER="cortexlabs/manager:0.9.0" +export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:0.9.0" +export CORTEX_IMAGE_STATSD="cortexlabs/statsd:0.9.0" +export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:0.9.0" +export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:0.9.0" +export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:0.9.0" +export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:0.9.0" +export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:0.9.0" +export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:0.9.0" +export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:0.9.0" +export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:0.9.0" +export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:0.9.0" +export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:0.9.0" +export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:0.9.0" +export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:0.9.0" +export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:0.9.0" +export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:0.9.0" # Flag to enable collecting error reports and usage stats. If flag is not set to either "true" or "false", you will be prompted. export CORTEX_ENABLE_TELEMETRY="" diff --git a/docs/cluster/install.md b/docs/cluster/install.md index 3446c3360e..7036ed7035 100644 --- a/docs/cluster/install.md +++ b/docs/cluster/install.md @@ -13,7 +13,7 @@ See [cluster configuration](config.md) to customize your installation. ```bash # Download -curl -O https://raw.githubusercontent.com/cortexlabs/cortex/master/cortex.sh +curl -O https://raw.githubusercontent.com/cortexlabs/cortex/0.9/cortex.sh # Change permissions chmod +x cortex.sh @@ -42,7 +42,7 @@ This will create resources in your AWS account which aren't included in the free ```bash # Clone the Cortex repository -git clone -b master https://github.com/cortexlabs/cortex.git +git clone -b 0.9 https://github.com/cortexlabs/cortex.git # Navigate to the iris classification example cd cortex/examples/iris-classifier diff --git a/docs/cluster/python-client.md b/docs/cluster/python-client.md index a69ec9ddd1..cda94ec64c 100644 --- a/docs/cluster/python-client.md +++ b/docs/cluster/python-client.md @@ -4,7 +4,7 @@ The Python client can be used to programmatically deploy models to a Cortex Clus ```bash -pip install git+https://github.com/cortexlabs/cortex.git@master#egg=cortex\&subdirectory=pkg/workloads/cortex/client +pip install git+https://github.com/cortexlabs/cortex.git@v0.9.0#egg=cortex\&subdirectory=pkg/workloads/cortex/client ``` The Python client needs to be initialized with AWS credentials and an operator URL for your Cortex cluster. You can find the operator URL by running `./cortex.sh endpoints`. diff --git a/docs/cluster/uninstall.md b/docs/cluster/uninstall.md index 35ffad1c83..9b96f69be7 100644 --- a/docs/cluster/uninstall.md +++ b/docs/cluster/uninstall.md @@ -11,7 +11,7 @@ ```bash # Download -curl -O https://raw.githubusercontent.com/cortexlabs/cortex/master/cortex.sh +curl -O https://raw.githubusercontent.com/cortexlabs/cortex/0.9/cortex.sh # Change permissions chmod +x cortex.sh diff --git a/docs/cluster/update.md b/docs/cluster/update.md index 787af119a8..1bed4f4d0b 100644 --- a/docs/cluster/update.md +++ b/docs/cluster/update.md @@ -13,7 +13,7 @@ See [cluster configuration](config.md) to customize your installation. ```bash # Download -curl -O https://raw.githubusercontent.com/cortexlabs/cortex/master/cortex.sh +curl -O https://raw.githubusercontent.com/cortexlabs/cortex/0.9/cortex.sh # Change permissions chmod +x cortex.sh diff --git a/docs/deployments/packaging-models.md b/docs/deployments/packaging-models.md index 8eea6f9715..602a174266 100644 --- a/docs/deployments/packaging-models.md +++ b/docs/deployments/packaging-models.md @@ -3,7 +3,7 @@ ## TensorFlow -Export your trained model and upload the export directory, or checkpoint directory containing the export directory, which is usually the case if you used `estimator.train_and_evaluate`. An example is shown below (here is the [complete example](https://github.com/cortexlabs/cortex/blob/master/examples/sentiment-analysis)): +Export your trained model and upload the export directory, or checkpoint directory containing the export directory, which is usually the case if you used `estimator.train_and_evaluate`. An example is shown below (here is the [complete example](https://github.com/cortexlabs/cortex/blob/0.9/examples/sentiment-analysis)): ```Python import tensorflow as tf @@ -80,10 +80,10 @@ with open("sklearn.onnx", "wb") as f: Here are complete examples of converting models from some of the common ML frameworks to ONNX: -* [PyTorch](https://github.com/cortexlabs/cortex/blob/master/examples/iris-classifier/models/pytorch_model.py) -* [Sklearn](https://github.com/cortexlabs/cortex/blob/master/examples/iris-classifier/models/sklearn_model.py) -* [XGBoost](https://github.com/cortexlabs/cortex/blob/master/examples/iris-classifier/models/xgboost_model.py) -* [Keras](https://github.com/cortexlabs/cortex/blob/master/examples/iris-classifier/models/keras_model.py) +* [PyTorch](https://github.com/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/pytorch_model.py) +* [Sklearn](https://github.com/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/sklearn_model.py) +* [XGBoost](https://github.com/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/xgboost_model.py) +* [Keras](https://github.com/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/keras_model.py) Upload your trained model in ONNX format to Amazon S3 using the AWS web console or CLI: diff --git a/docs/summary.md b/docs/summary.md index 4649525f92..d6c8c87d3f 100644 --- a/docs/summary.md +++ b/docs/summary.md @@ -3,7 +3,7 @@ * [Deploy machine learning models in production](../README.md) * [Install](cluster/install.md) * [GitHub](https://github.com/cortexlabs/cortex) -* [Examples](https://github.com/cortexlabs/cortex/tree/master/examples) +* [Examples](https://github.com/cortexlabs/cortex/tree/0.9/examples) * [We're hiring](https://angel.co/cortex-labs-inc/jobs) * [Email us](mailto:hello@cortex.dev) * [Chat with us](https://gitter.im/cortexlabs/cortex) diff --git a/examples/image-classifier/README.md b/examples/image-classifier/README.md index e46b5d8256..7d16c86139 100644 --- a/examples/image-classifier/README.md +++ b/examples/image-classifier/README.md @@ -20,8 +20,8 @@ A `deployment` specifies a set of resources that are deployed as a single unit. You can run the code that generated the exported models used in this example folder here: -- [Pytorch Alexnet](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/image-classifier/alexnet.ipynb) -- [TensorFlow Inception V3](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/image-classifier/inception.ipynb) +- [Pytorch Alexnet](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/image-classifier/alexnet.ipynb) +- [TensorFlow Inception V3](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/image-classifier/inception.ipynb) ## Add request handling diff --git a/examples/image-classifier/alexnet.ipynb b/examples/image-classifier/alexnet.ipynb index 825f2e5857..096ce14b25 100644 --- a/examples/image-classifier/alexnet.ipynb +++ b/examples/image-classifier/alexnet.ipynb @@ -166,7 +166,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/image-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/image-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/image-classifier/inception.ipynb b/examples/image-classifier/inception.ipynb index f3796e9e74..64ee5ecf8f 100644 --- a/examples/image-classifier/inception.ipynb +++ b/examples/image-classifier/inception.ipynb @@ -202,7 +202,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/image-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/image-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/iris-classifier/README.md b/examples/iris-classifier/README.md index 69e9177ce0..a097309caf 100644 --- a/examples/iris-classifier/README.md +++ b/examples/iris-classifier/README.md @@ -20,11 +20,11 @@ Define a `deployment` and an `api` resource in `cortex.yaml`. A `deployment` spe You can run the code that generated the exported models used in this folder example here: -- [TensorFlow](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/iris-classifier/models/tensorflow.ipynb) -- [Pytorch](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/iris-classifier/models/pytorch.ipynb) -- [Keras](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/iris-classifier/models/keras.ipynb) -- [XGBoost](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/iris-classifier/models/xgboost.ipynb) -- [sklearn](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/iris-classifier/models/sklearn.ipynb) +- [TensorFlow](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/tensorflow.ipynb) +- [Pytorch](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/pytorch.ipynb) +- [Keras](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/keras.ipynb) +- [XGBoost](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/xgboost.ipynb) +- [sklearn](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/iris-classifier/models/sklearn.ipynb) ## Add request handling diff --git a/examples/iris-classifier/models/keras.ipynb b/examples/iris-classifier/models/keras.ipynb index 326ac11ff1..7f9401937a 100644 --- a/examples/iris-classifier/models/keras.ipynb +++ b/examples/iris-classifier/models/keras.ipynb @@ -242,7 +242,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/iris-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/iris-classifier/models/pytorch.ipynb b/examples/iris-classifier/models/pytorch.ipynb index 2e16433452..ffee7aab92 100644 --- a/examples/iris-classifier/models/pytorch.ipynb +++ b/examples/iris-classifier/models/pytorch.ipynb @@ -274,7 +274,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/iris-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/iris-classifier/models/sklearn.ipynb b/examples/iris-classifier/models/sklearn.ipynb index f4d7fbba47..f66816348f 100644 --- a/examples/iris-classifier/models/sklearn.ipynb +++ b/examples/iris-classifier/models/sklearn.ipynb @@ -293,7 +293,7 @@ }, "source": [ "\n", - "We also need to upload the mean and standard deviation, so that the [pre-inference request handler](https://github.com/cortexlabs/cortex/blob/master/examples/iris-classifier/handlers/sklearn.py) can normalize the data before making real-time predictions." + "We also need to upload the mean and standard deviation, so that the [pre-inference request handler](https://github.com/cortexlabs/cortex/blob/0.9/examples/iris-classifier/handlers/sklearn.py) can normalize the data before making real-time predictions." ] }, { @@ -347,7 +347,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/iris-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/iris-classifier/models/tensorflow.ipynb b/examples/iris-classifier/models/tensorflow.ipynb index 3b1877af64..f6e8ad97f6 100644 --- a/examples/iris-classifier/models/tensorflow.ipynb +++ b/examples/iris-classifier/models/tensorflow.ipynb @@ -287,7 +287,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/iris-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/iris-classifier/models/xgboost.ipynb b/examples/iris-classifier/models/xgboost.ipynb index 3243d8b5af..8643288abe 100644 --- a/examples/iris-classifier/models/xgboost.ipynb +++ b/examples/iris-classifier/models/xgboost.ipynb @@ -235,7 +235,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/iris-classifier) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/iris-classifier) for how to deploy the model as an API." ] } ] diff --git a/examples/sentiment-analysis/README.md b/examples/sentiment-analysis/README.md index 5302e4d3c7..17522eafa0 100644 --- a/examples/sentiment-analysis/README.md +++ b/examples/sentiment-analysis/README.md @@ -19,7 +19,7 @@ A `deployment` specifies a set of resources that are deployed as a single unit. ``` -You can run the code that generated the exported BERT model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/sentiment-analysis/bert.ipynb). +You can run the code that generated the exported BERT model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/sentiment-analysis/bert.ipynb). ## Add request handling diff --git a/examples/sentiment-analysis/bert.ipynb b/examples/sentiment-analysis/bert.ipynb index e28ac9f453..9bcad8b280 100644 --- a/examples/sentiment-analysis/bert.ipynb +++ b/examples/sentiment-analysis/bert.ipynb @@ -998,7 +998,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/sentiment-analysis) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/sentiment-analysis) for how to deploy the model as an API." ] } ] diff --git a/examples/text-generator/README.md b/examples/text-generator/README.md index 3593fe2a44..9d3d787fb4 100644 --- a/examples/text-generator/README.md +++ b/examples/text-generator/README.md @@ -20,7 +20,7 @@ A `deployment` specifies a set of resources that are deployed as a single unit. ``` -You can run the code that generated the exported GPT-2 model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/master/examples/text-generator/gpt-2.ipynb). +You can run the code that generated the exported GPT-2 model [here](https://colab.research.google.com/github/cortexlabs/cortex/blob/0.9/examples/text-generator/gpt-2.ipynb). ## Add request handling diff --git a/examples/text-generator/gpt-2.ipynb b/examples/text-generator/gpt-2.ipynb index 8d4d7fb92a..9af287e1c0 100644 --- a/examples/text-generator/gpt-2.ipynb +++ b/examples/text-generator/gpt-2.ipynb @@ -344,7 +344,7 @@ }, "source": [ "\n", - "We also need to upload `vocab.bpe` and `encoder.json`, so that the [encoder](https://github.com/cortexlabs/cortex/blob/master/examples/text-generator/encoder.py) in the [pre-inference request handler](https://github.com/cortexlabs/cortex/blob/master/examples/text-generator/handler.py) can encode the input text before making a request to the model." + "We also need to upload `vocab.bpe` and `encoder.json`, so that the [encoder](https://github.com/cortexlabs/cortex/blob/0.9/examples/text-generator/encoder.py) in the [pre-inference request handler](https://github.com/cortexlabs/cortex/blob/0.9/examples/text-generator/handler.py) can encode the input text before making a request to the model." ] }, { @@ -374,7 +374,7 @@ }, "source": [ "\n", - "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/master/examples/text-generator) for how to deploy the model as an API." + "That's it! See the [example on GitHub](https://github.com/cortexlabs/cortex/tree/0.9/examples/text-generator) for how to deploy the model as an API." ] } ] diff --git a/pkg/consts/consts.go b/pkg/consts/consts.go index 3ed04b5cab..5cc8587083 100644 --- a/pkg/consts/consts.go +++ b/pkg/consts/consts.go @@ -17,7 +17,7 @@ limitations under the License. package consts var ( - CortexVersion = "master" // CORTEX_VERSION + CortexVersion = "0.9.0" // CORTEX_VERSION ContextCacheDir = "/mnt/context" EmptyDirMountPath = "/mnt" diff --git a/pkg/workloads/cortex/client/cortex/client.py b/pkg/workloads/cortex/client/cortex/client.py index 61563cff7b..75fe4b4caf 100644 --- a/pkg/workloads/cortex/client/cortex/client.py +++ b/pkg/workloads/cortex/client/cortex/client.py @@ -44,7 +44,7 @@ def __init__(self, aws_access_key_id, aws_secret_access_key, operator_url): self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key self.headers = { - "CortexAPIVersion": "master", # CORTEX_VERSION + "CortexAPIVersion": "0.9.0", # CORTEX_VERSION "Authorization": "CortexAWS {}|{}".format( self.aws_access_key_id, self.aws_secret_access_key ), diff --git a/pkg/workloads/cortex/client/setup.py b/pkg/workloads/cortex/client/setup.py index d2bf48c1ae..faa5daf660 100644 --- a/pkg/workloads/cortex/client/setup.py +++ b/pkg/workloads/cortex/client/setup.py @@ -16,7 +16,7 @@ setup( name="cortex", - version="master", # CORTEX_VERSION + version="0.9.0", # CORTEX_VERSION description="", author="Cortex Labs", author_email="dev@cortexlabs.com", diff --git a/pkg/workloads/cortex/consts.py b/pkg/workloads/cortex/consts.py index 91c4a779fe..d2332cf1e6 100644 --- a/pkg/workloads/cortex/consts.py +++ b/pkg/workloads/cortex/consts.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -CORTEX_VERSION = "master" +CORTEX_VERSION = "0.9.0"