diff --git a/cortex.sh b/cortex.sh index 2103a39cca..b1b3a9f9dd 100755 --- a/cortex.sh +++ b/cortex.sh @@ -84,7 +84,7 @@ set -u ### CONFIGURATION ### ##################### -export CORTEX_VERSION_BRANCH_STABLE=0.9.1 +export CORTEX_VERSION_BRANCH_STABLE=0.9.2 export CORTEX_CONFIG="${CORTEX_CONFIG:-""}" if [ "$CORTEX_CONFIG" != "" ]; then diff --git a/docs/cluster/config.md b/docs/cluster/config.md index 1e0cc0a466..a9d2313d68 100644 --- a/docs/cluster/config.md +++ b/docs/cluster/config.md @@ -37,23 +37,23 @@ export CORTEX_NODES_MIN=2 export CORTEX_NODES_MAX=5 # Image paths -export CORTEX_IMAGE_MANAGER="cortexlabs/manager:0.9.1" -export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:0.9.1" -export CORTEX_IMAGE_STATSD="cortexlabs/statsd:0.9.1" -export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:0.9.1" -export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:0.9.1" -export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:0.9.1" -export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:0.9.1" -export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:0.9.1" -export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:0.9.1" -export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:0.9.1" -export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:0.9.1" -export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:0.9.1" -export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:0.9.1" -export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:0.9.1" -export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:0.9.1" -export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:0.9.1" -export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:0.9.1" +export CORTEX_IMAGE_MANAGER="cortexlabs/manager:0.9.2" +export CORTEX_IMAGE_FLUENTD="cortexlabs/fluentd:0.9.2" +export CORTEX_IMAGE_STATSD="cortexlabs/statsd:0.9.2" +export CORTEX_IMAGE_OPERATOR="cortexlabs/operator:0.9.2" +export CORTEX_IMAGE_TF_SERVE="cortexlabs/tf-serve:0.9.2" +export CORTEX_IMAGE_TF_API="cortexlabs/tf-api:0.9.2" +export CORTEX_IMAGE_TF_SERVE_GPU="cortexlabs/tf-serve-gpu:0.9.2" +export CORTEX_IMAGE_ONNX_SERVE="cortexlabs/onnx-serve:0.9.2" +export CORTEX_IMAGE_ONNX_SERVE_GPU="cortexlabs/onnx-serve-gpu:0.9.2" +export CORTEX_IMAGE_CLUSTER_AUTOSCALER="cortexlabs/cluster-autoscaler:0.9.2" +export CORTEX_IMAGE_NVIDIA="cortexlabs/nvidia:0.9.2" +export CORTEX_IMAGE_METRICS_SERVER="cortexlabs/metrics-server:0.9.2" +export CORTEX_IMAGE_ISTIO_PROXY="cortexlabs/istio-proxy:0.9.2" +export CORTEX_IMAGE_ISTIO_PILOT="cortexlabs/istio-pilot:0.9.2" +export CORTEX_IMAGE_ISTIO_CITADEL="cortexlabs/istio-citadel:0.9.2" +export CORTEX_IMAGE_ISTIO_GALLEY="cortexlabs/istio-galley:0.9.2" +export CORTEX_IMAGE_DOWNLOADER="cortexlabs/downloader:0.9.2" # Flag to enable collecting error reports and usage stats. If flag is not set to either "true" or "false", you will be prompted. export CORTEX_ENABLE_TELEMETRY="" diff --git a/docs/cluster/python-client.md b/docs/cluster/python-client.md index 43c2f160c8..73cdd622fd 100644 --- a/docs/cluster/python-client.md +++ b/docs/cluster/python-client.md @@ -4,7 +4,7 @@ The Python client can be used to programmatically deploy models to a Cortex Clus ```bash -pip install git+https://github.com/cortexlabs/cortex.git@v0.9.1#egg=cortex\&subdirectory=pkg/workloads/cortex/client +pip install git+https://github.com/cortexlabs/cortex.git@v0.9.2#egg=cortex\&subdirectory=pkg/workloads/cortex/client ``` The Python client needs to be initialized with AWS credentials and an operator URL for your Cortex cluster. You can find the operator URL by running `./cortex.sh info`.