Skip to content

Commit

Permalink
add fastapi support to Helsinki component
Browse files Browse the repository at this point in the history
  • Loading branch information
heinpa committed Aug 22, 2024
1 parent 447f0ca commit 532d286
Show file tree
Hide file tree
Showing 5 changed files with 89 additions and 45 deletions.
27 changes: 22 additions & 5 deletions qanary-component-MT-Python-HelsinkiNLP/boot.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,26 @@
#!/bin/sh
export $(grep -v "^#" < .env)

# check required parameters
declare -a required_vars=(
"SPRING_BOOT_ADMIN_URL"
"SERVER_HOST"
"SERVER_PORT"
"SPRING_BOOT_ADMIN_USERNAME"
"SPRING_BOOT_ADMIN_PASSWORD"
"SERVICE_NAME_COMPONENT"
"SERVICE_DESCRIPTION_COMPONENT"
# TODO: other?
)

export $(grep -v '^#' .env | xargs)
for param in ${required_vars[@]};
do
if [[ -z ${!param} ]]; then
echo "Required variable \"$param\" is not set!"
echo "The required variables are: ${required_vars[@]}"
exit 4
fi
done

echo Downloading the models

Expand All @@ -10,8 +29,6 @@ python -c "from utils.model_utils import load_models_and_tokenizers; SUPPORTED_L
echo Downloading the model finished

echo The port number is: $SERVER_PORT
echo The host is: $SERVER_HOST
echo The Qanary pipeline URL is: $SPRING_BOOT_ADMIN_URL
if [ -n $SERVER_PORT ]
then
exec gunicorn -b :$SERVER_PORT --access-logfile - --error-logfile - run:app # refer to the gunicorn documentation for more options
fi
exec uvicorn run:app --host 0.0.0.0 --port $SERVER_PORT --log-level warning
36 changes: 26 additions & 10 deletions qanary-component-MT-Python-HelsinkiNLP/component/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from component.mt_helsinki_nlp import mt_helsinki_nlp_bp
from flask import Flask
from component import mt_helsinki_nlp
from fastapi import FastAPI
from fastapi.responses import RedirectResponse, Response, JSONResponse

version = "0.2.0"

Expand All @@ -8,20 +9,35 @@

# endpoint for health information of the service required for Spring Boot Admin server callback
healthendpoint = "/health"

aboutendpoint = "/about"
translateendpoint = "/translate"
# TODO: add languages endpoint?

# initialize Flask app and add the externalized service information
app = Flask(__name__)
app.register_blueprint(mt_helsinki_nlp_bp)
app = FastAPI(docs_url="/swagger-ui.html")
app.include_router(mt_helsinki_nlp.router)


@app.get("/")
async def main():
return RedirectResponse("/about")


@app.route(healthendpoint, methods=['GET'])
@app.get(healthendpoint)
def health():
"""required health endpoint for callback of Spring Boot Admin server"""
return "alive"
return Response("alive", media_type="text/plain")

@app.route(aboutendpoint, methods=['GET'])
@app.get(aboutendpoint)
def about():
"""required about endpoint for callback of Spring Boot Admin server"""
return "about"
"""required about endpoint for callback of Srping Boot Admin server"""
return Response("Translates questions into English", media_type="text/plain")

@app.get(translateendpoint+"_to_one", description="", tags=["Translate"])
def translate_to_one(text: str, source_lang: str, target_lang: str):
return JSONResponse(translate_to_one(text, source_lang, target_lang))

@app.get(translateendpoint+"_to_all", description="", tags=["Translate"])
def translate_to_all(text: str, source_lang: str):
return JSONResponse(translate_to_all(text, source_lang))

66 changes: 38 additions & 28 deletions qanary-component-MT-Python-HelsinkiNLP/component/mt_helsinki_nlp.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import logging
import os
from flask import Blueprint, jsonify, request
from qanary_helpers.qanary_queries import get_text_question_in_graph, insert_into_triplestore
from qanary_helpers.language_queries import get_translated_texts_in_triplestore, get_texts_with_detected_language_in_triplestore, question_text_with_language, create_annotation_of_question_language, create_annotation_of_question_translation
from utils.model_utils import load_models_and_tokenizers
from utils.lang_utils import translation_options
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse


logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO)
mt_helsinki_nlp_bp = Blueprint('mt_helsinki_nlp_bp', __name__, template_folder='templates')
router = APIRouter()

SERVICE_NAME_COMPONENT = os.environ['SERVICE_NAME_COMPONENT']

Expand All @@ -30,6 +31,21 @@ def translate_input(text: str, source_lang: str, target_lang: str) -> str:
return translation


def translate_to_one(text: str, source_lang: str, target_lang: str):
translation = translate_input(text, source_lang, target_lang)
return {target_lang: translation}


def translate_to_all(text: str, source_lang: str):
translations = list()
for target_lang in translation_options[source_lang]:
translation = translate_input(text, source_lang, target_lang)
translations.append({
target_lang: translation
})
return translations


def find_source_texts_in_triplestore(triplestore_endpoint: str, graph_uri: str, lang: str) -> list[question_text_with_language]:
source_texts = []

Expand All @@ -52,15 +68,16 @@ def find_source_texts_in_triplestore(triplestore_endpoint: str, graph_uri: str,
return source_texts


@mt_helsinki_nlp_bp.route("/annotatequestion", methods=['POST'])
def qanary_service():
@router.post("/annotatequestion", desctiption="", tags=["Qanary"])
async def qanary_service(request: Request):
"""the POST endpoint required for a Qanary service"""

# Retrieve basic information about the current question process
request_json = await request.json()

triplestore_endpoint = request.json["values"]["urn:qanary#endpoint"]
triplestore_ingraph = request.json["values"]["urn:qanary#inGraph"]
triplestore_outgraph = request.json["values"]["urn:qanary#outGraph"]
triplestore_endpoint = request_json["values"]["urn:qanary#endpoint"]
triplestore_ingraph = request_json["values"]["urn:qanary#inGraph"]
triplestore_outgraph = request_json["values"]["urn:qanary#outGraph"]
logging.info("endpoint: %s, inGraph: %s, outGraph: %s" % (triplestore_endpoint, triplestore_ingraph, triplestore_outgraph))


Expand Down Expand Up @@ -91,32 +108,25 @@ def qanary_service():
)
insert_into_triplestore(triplestore_endpoint, SPARQLqueryAnnotationOfQuestionTranslation)

return jsonify(request.get_json())
return JSONResponse(request_json)


@mt_helsinki_nlp_bp.route("/translate_to_one_language", methods=['GET'])
def translate_to_one_language(question: str, source_language: str, target_language: str):
if (source_language in translation_options.keys()) and (target_language in translation_options.get(source_language, [])):
translation = translate_input(question, source_language, target_language)
return jsonify(translation)
def translate_to_one(text: str, source_lang: str, target_lang: str):
if (source_lang in translation_options.keys()) and (target_lang in translation_options.get(source_lang, [])):
translation = translate_input(text, source_lang, target_lang)
return {target_lang: translation}
else:
raise RuntimeError("Unsupported source and/or target language! Valid options: {to}".format(to=translation_options))


@mt_helsinki_nlp_bp.route("/translate_to_all_languages", methods=['GET'])
def translate_to_all_languages(question: str, source_language: str):
if source_language in translation_options.keys():
translations = dict()
for target_language in translation_options[source_language]:
translations[target_language] = translate_input(question, source_language, target_language)
return jsonify(translations)
def translate_to_all(text: str, source_lang: str):
if source_lang in translation_options.keys():
translations = list()
for target_lang in translation_options[source_lang]:
translation = translate_input(text, source_lang, target_lang)
translations.append({
target_lang: translation
})
return translations
else:
raise RuntimeError("Unsupported source language! Valid options: {to}".format(to=translation_options))


@mt_helsinki_nlp_bp.route("/", methods=['GET'])
def index():
"""an examplary GET endpoint returning "hello world (String)"""

logging.info("host_url: %s" % (request.host_url,))
return "Hi! \n This is Python MT Helsinki NLP component"
3 changes: 2 additions & 1 deletion qanary-component-MT-Python-HelsinkiNLP/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Flask==3.0.3
fastapi==0.109.1
pytest==8.3.2
pytest-env==1.1.3
qanary_helpers==0.2.2
Expand All @@ -7,3 +7,4 @@ SPARQLWrapper==2.0.0
torch==2.4.0
transformers==4.44.0
qanary-helpers==0.2.2
uvicorn==0.30.1
2 changes: 1 addition & 1 deletion qanary-component-MT-Python-NLLB/component/mt_nllb.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def find_source_texts_in_triplestore(triplestore_endpoint: str, graph_uri: str,
return source_texts


@router.post("/annotatequestion", methods=["POST"])
@router.post("/annotatequestion")
async def qanary_service(request: Request):
"""the POST endpoint required for a Qanary service"""

Expand Down

0 comments on commit 532d286

Please sign in to comment.