Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add OpenAI client access OPEA microservice UT cases #653

Merged
merged 12 commits into from
Sep 11, 2024
Merged
12 changes: 12 additions & 0 deletions tests/embeddings/test_embeddings_tei_langchain.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,16 @@ function validate_microservice() {
fi
}

function validate_microservice_with_openai() {
tei_service_port=5001
python3 ${WORKPATH}/tests/utils/validate_svc_with_openai.py $ip_address $tei_service_port "embedding"
if [ $? -ne 0 ]; then
docker logs test-comps-embedding-tei-endpoint
docker logs test-comps-embedding-tei-server
exit 1
fi
}

function stop_docker() {
cid=$(docker ps -aq --filter "name=test-comps-embedding-*")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
Expand All @@ -59,6 +69,8 @@ function main() {
start_service

validate_microservice
pip install openai
validate_microservice_with_openai

stop_docker
echo y | docker system prune
Expand Down
19 changes: 16 additions & 3 deletions tests/llms/test_llms_text-generation_tgi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,16 @@ function validate_microservice() {
fi
}

function validate_microservice_with_openai() {
llm_service_port=5005
python3 ${WORKPATH}/tests/utils/validate_svc_with_openai.py "$ip_address" "$llm_service_port" "llm"
if [ $? -ne 0 ]; then
docker logs test-comps-llm-tgi-endpoint >> ${LOG_PATH}/llm-tgi.log
docker logs test-comps-llm-tgi-server >> ${LOG_PATH}/llm-tgi-server.log
exit 1
fi
}

function stop_docker() {
cid=$(docker ps -aq --filter "name=test-comps-llm-tgi*")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
Expand All @@ -70,15 +80,18 @@ function main() {
stop_docker
build_docker_images

pip install openai

llm_models=(
Intel/neural-chat-7b-v3-3
meta-llama/Llama-2-7b-chat-hf
meta-llama/Meta-Llama-3-8B-Instruct
microsoft/Phi-3-mini-4k-instruct
# meta-llama/Llama-2-7b-chat-hf
# meta-llama/Meta-Llama-3-8B-Instruct
# microsoft/Phi-3-mini-4k-instruct
)
for model in "${llm_models[@]}"; do
start_service "${model}"
validate_microservice
validate_microservice_with_openai
stop_docker
done

Expand Down
42 changes: 42 additions & 0 deletions tests/utils/validate_svc_with_openai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import os
import sys

import openai


def validate_svc(ip_address, service_port, service_type):
openai.api_key = os.getenv("OPENAI_API_KEY", "empty")

endpoint = f"http://{ip_address}:{service_port}"
client = openai.OpenAI(
api_key=openai.api_key,
base_url=endpoint + "/v1",
)

if service_type == "llm":
response = client.chat.completions.create(model="tgi", messages="What is Deep Learning?", max_tokens=128)
elif service_type == "embedding":
response = client.embeddings.create(model="tei", input="What is Deep Learning?")
else:
print(f"Unknown service type: {service_type}")
exit(1)
result = response.choices[0].text.strip() if service_type == "llm" else response.data[0].embedding
if "Deep Learning is" in result if service_type == "llm" else result:
print("Result correct.")
else:
print(f"Result wrong. Received was {result}")
exit(1)


if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: python3 validate_svc_with_openai.py <ip_address> <service_port> <service_type>")
exit(1)
ip_address = sys.argv[1]
service_port = sys.argv[2]
service_type = sys.argv[3]
validate_svc(ip_address, service_port, service_type)
Loading