From 2830bcadc9a085765020838b6a08f99ab963aef2 Mon Sep 17 00:00:00 2001 From: David Eliahu Date: Fri, 1 Nov 2019 11:28:39 -0700 Subject: [PATCH] Update stable version to 0.9.2 --- cortex.sh | 2 +- docs/cluster/config.md | 34 +++++++++++++++++----------------- docs/cluster/python-client.md | 2 +- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/cortex.sh b/cortex.sh index 2103a39cca..b1b3a9f9dd 100755 --- a/cortex.sh +++ b/cortex.sh @@ -84,7 +84,7 @@ set -u ### CONFIGURATION ### ##################### -export CORTEX_VERSION_BRANCH_STABLE=0.9.1 +export CORTEX_VERSION_BRANCH_STABLE=0.9.2 export CORTEX_CONFIG="${CORTEX_CONFIG:-""}" if [ "$CORTEX_CONFIG" != "" ]; then diff --git a/docs/cluster/config.md b/docs/cluster/config.md index 1e0cc0a466..a9d2313d68 100644 --- a/docs/cluster/config.md +++ b/docs/cluster/config.md @@ -37,23 +37,23 @@ export CORTEX_NODES_MIN=2 export CORTEX_NODES_MAX=5 # Image paths -export CORTEX_IMAGE_MANAGER="cortexlabs/manager:0.9.1" -export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:0.9.1" -export CORTEX_IMAGE_STATSD="cortexlabs/statsd:0.9.1" -export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:0.9.1" -export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:0.9.1" -export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:0.9.1" -export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:0.9.1" -export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:0.9.1" -export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:0.9.1" -export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:0.9.1" -export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:0.9.1" -export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:0.9.1" -export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:0.9.1" -export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:0.9.1" -export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:0.9.1" -export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:0.9.1" -export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:0.9.1" +export CORTEX_IMAGE_MANAGER="cortexlabs/manager:0.9.2" +export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:0.9.2" +export CORTEX_IMAGE_STATSD="cortexlabs/statsd:0.9.2" +export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:0.9.2" +export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:0.9.2" +export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:0.9.2" +export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:0.9.2" +export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:0.9.2" +export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:0.9.2" +export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:0.9.2" +export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:0.9.2" +export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:0.9.2" +export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:0.9.2" +export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:0.9.2" +export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:0.9.2" +export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:0.9.2" +export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:0.9.2" # Flag to enable collecting error reports and usage stats. If flag is not set to either "true" or "false", you will be prompted. export CORTEX_ENABLE_TELEMETRY="" diff --git a/docs/cluster/python-client.md b/docs/cluster/python-client.md index 43c2f160c8..73cdd622fd 100644 --- a/docs/cluster/python-client.md +++ b/docs/cluster/python-client.md @@ -4,7 +4,7 @@ The Python client can be used to programmatically deploy models to a Cortex Clus ```bash -pip install git+https://github.com/cortexlabs/cortex.git@v0.9.1#egg=cortex\&subdirectory=pkg/workloads/cortex/client +pip install git+https://github.com/cortexlabs/cortex.git@v0.9.2#egg=cortex\&subdirectory=pkg/workloads/cortex/client ``` The Python client needs to be initialized with AWS credentials and an operator URL for your Cortex cluster. You can find the operator URL by running `./cortex.sh info`.