Skip to content

Commit

Permalink
Merge pull request #100 from ibm-granite/inference_service_dir_refactor
Browse files Browse the repository at this point in the history
Inference service directory refactor
  • Loading branch information
wgifford authored Aug 2, 2024
2 parents b77b3dd + 88a1748 commit bd5ed5f
Show file tree
Hide file tree
Showing 16 changed files with 37 additions and 56 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/inference-services-test.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python

name: Inference Service Tests
name: Tsfminference Service Tests

on:
push:
Expand Down Expand Up @@ -32,7 +32,7 @@ jobs:
pip install poetry
cd services/inference
poetry install -n --with dev
- name: Test local server inference service with pytest
- name: Test local server tsfminference service with pytest
run: |
source .venv/bin/activate
cd services/inference
Expand Down
2 changes: 1 addition & 1 deletion services/inference/.dockerignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
**
!tsfmservices
!tsfminference
!poetry.lock
!pyproject.toml

28 changes: 14 additions & 14 deletions services/inference/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ RUN microdnf -y update && \

ENV POETRY_VIRTUALENVS_IN_PROJECT=1

COPY tsfmservices* /tsfmservices/tsfmservices
COPY pyproject.toml /tsfmservices/
COPY poetry.lock /tsfmservices/
WORKDIR /tsfmservices
RUN mkdir /inference
COPY tsfminference/* /inference/tsfminference/
COPY pyproject.toml /inference/
COPY poetry.lock /inference/
WORKDIR /inference
RUN pip3.11 install poetry && poetry install

FROM registry.access.redhat.com/ubi9/ubi-minimal:latest AS deploy
Expand All @@ -22,20 +23,19 @@ RUN microdnf -y update && \
shadow-utils python3.11 && \
microdnf clean all

WORKDIR /tsfmservices
WORKDIR /inference

COPY --from=builder /tsfmservices/ /tsfmservices/
COPY --from=builder /inference /inference

ENV VIRTUAL_ENV=/tsfmservices/.venv
ENV VIRTUAL_ENV=/inference/.venv
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
ENV HF_HOME=/tmp

RUN groupadd --system tsfmservices --gid 1001 && \
adduser --system --uid 1001 --gid 0 --groups tsfmservices \
--create-home --home-dir /tsfmservices --shell /sbin/nologin \
--comment "TSFMServices User" tsfmservices
RUN groupadd --system tsfminference --gid 1001 && \
adduser --system --uid 1001 --gid 0 --groups tsfminference \
--create-home --home-dir /inference --shell /sbin/nologin \
--comment "tsfminference User" tsfminference

USER tsfmservices
USER tsfminference

# CMD ["python", "-m", "tsfmservices.inference.main"]
CMD ["python", "-m", "uvicorn","tsfmservices.inference.main:app", "--host", "0.0.0.0", "--port", "8000" ]
CMD ["python", "-m", "uvicorn","tsfminference.main:app", "--host", "0.0.0.0", "--port", "8000" ]
10 changes: 5 additions & 5 deletions services/inference/Makefile
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
CONTAINER_BUILDER ?= docker

# starts the inference service (used mainly for test cases)
# starts the tsfminference service (used mainly for test cases)
start_service_local:
python -m tsfmservices.inference.main &
python -m tsfminference.main &
sleep 10
stop_service_local:
pkill -f 'python.*tsfmservices.*'
pkill -f 'python.*tsfminference.*'
sleep 10

image:
$(CONTAINER_BUILDER) build -t tsfmservices -f Dockerfile .
$(CONTAINER_BUILDER) build -t tsfminference -f Dockerfile .

start_service_image: image
$(CONTAINER_BUILDER) run -p 8000:8000 -d --rm --name tsfmserver tsfmservices
$(CONTAINER_BUILDER) run -p 8000:8000 -d --rm --name tsfmserver tsfminference
sleep 10
stop_service_image:
$(CONTAINER_BUILDER) stop tsfmserver
Expand Down
6 changes: 3 additions & 3 deletions services/inference/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ CONTAINER_BUILDER=<docker|podmain> make image
# e.g, CONTAINER_BUILDER=docker make image
```

After a successful build you should have a local image named `tsfmservices:latest`
After a successful build you should have a local image named `tsfminference:latest`

```sh
(py311) ➜ tsfm-services git:(revised-build-system) ✗ docker images | grep tsfmservices | head -n 1
tsfmservices latest df592dcb0533 46 seconds ago 1.49GB
(py311) ➜ tsfm-services git:(revised-build-system) ✗ docker images | grep tsfminference | head -n 1
tsfminference latest df592dcb0533 46 seconds ago 1.49GB
# some of the numeric and hash values on your machine could be different
```

Expand Down
6 changes: 3 additions & 3 deletions services/inference/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
[tool.poetry]
name = "tsfmservices"
name = "tsfminference"
version = "0.0.1"
description = "Service layer for TSFM granite models."
authors = ["IBM"]
license = "https://github.com/ibm-granite/granite-tsfm/blob/main/LICENSE"
packages = [{ include = "tsfmservices/**/*.py" }]
packages = [{ include = "tsfminference/**/*.py" }]

[tool.poetry.dependencies]
# including 3.9 causes poetry lock to run forever
python = ">=3.10,<3.12"
numpy = { version = "<2" }
tsfm_public = { git = "https://github.com/IBM/tsfm.git", tag = "v0.2.5" }

# trying to pick up cpu version for inference
# trying to pick up cpu version for tsfminference
# to make image smaller
torch = { version = ">2,<3", source = "pytorch" }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,23 +26,9 @@
"%(levelname)s:p-%(process)d:t-%(thread)d:%(filename)s:%(funcName)s:%(message)s",
)

# Do we have redis?

try:
# Third Party
import redis
import rq

HAVE_REDIS_BACKEND = True
except ImportError:
HAVE_REDIS_BACKEND = False

TSFM_USE_KFTO_ASYNC_BACKEND = int(os.getenv("TSFM_USE_KFTO_ASYNC_BACKEND", "0")) == 1
USE_REDIS_ASYNC_BACKEND = HAVE_REDIS_BACKEND and not TSFM_USE_KFTO_ASYNC_BACKEND

TSFM_ALLOW_LOAD_FROM_HF_HUB = int(os.getenv("TSFM_ALLOW_LOAD_FROM_HF_HUB", "1")) == 1

TSFM_CONFIG_FILE = os.getenv(
"TSFM_CONFIG_FILE",
os.path.realpath(os.path.join(os.path.dirname(__file__), "config", "default_config.yml")),
os.path.realpath(os.path.join(os.path.dirname(__file__), "default_config.yml")),
)
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Copyright contributors to the TSFM project
#
"""Inference Runtime"""
"""Tsfminference Runtime"""

import datetime
import logging
Expand All @@ -11,11 +11,11 @@
from fastapi import APIRouter, HTTPException

from tsfm_public import TimeSeriesForecastingPipeline, TimeSeriesPreprocessor
from tsfmservices import TSFM_ALLOW_LOAD_FROM_HF_HUB

from ..common.constants import API_VERSION
from ..common.util import load_config, load_model, register_config
from . import TSFM_ALLOW_LOAD_FROM_HF_HUB
from .constants import API_VERSION
from .payloads import ForecastingInferenceInput, PredictOutput
from .util import load_config, load_model, register_config


LOGGER = logging.getLogger(__file__)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@
import yaml
from fastapi import FastAPI

from tsfmservices import (
from . import (
TSFM_CONFIG_FILE,
TSFM_PYTHON_LOGGING_FORMAT,
TSFM_PYTHON_LOGGING_LEVEL,
)
from tsfmservices.common.constants import API_VERSION
from tsfmservices.inference import InferenceRuntime
from .constants import API_VERSION
from .inference import InferenceRuntime


logging.basicConfig(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Copyright contributors to the TSFM project
#
"""Payload definitions for inference"""
"""Payload definitions for tsfminference"""

from typing import Annotated, Any, Dict, List, Optional

Expand Down
File renamed without changes.
2 changes: 0 additions & 2 deletions services/inference/tsfmservices/common/__init__.py

This file was deleted.

3 changes: 0 additions & 3 deletions services/inference/tsfmservices/inference/__init__.py

This file was deleted.

0 comments on commit bd5ed5f

Please sign in to comment.