Skip to content

Commit

Permalink
Update llmServer.py for status check
Browse files Browse the repository at this point in the history
  • Loading branch information
seokho-son authored Feb 27, 2024
1 parent e3d86ed commit e16bcb0
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions scripts/usecases/llm/llmServer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

# Global variable to indicate model loading status
model="tiiuae/falcon-7b-instruct"

model_loaded = False
llm = None

Expand All @@ -29,7 +30,8 @@ async def startup_event():

@app.get("/status")
def get_status():
# Endpoint to return the model loading status
if not model_loaded:
return {"model": model, "loaded": model_loaded, "message": "Model is not loaded yet."}
return {"model": model, "loaded": model_loaded}

# Common function to generate text based on the prompt
Expand Down Expand Up @@ -60,4 +62,4 @@ async def query_post(request: Request) -> JSONResponse:

if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=port)


0 comments on commit e16bcb0

Please sign in to comment.