Skip to content

Commit

Permalink
update requirements
Browse files Browse the repository at this point in the history
Signed-off-by: Vladimir Mandic <[email protected]>
  • Loading branch information
vladmandic committed Sep 30, 2024
1 parent ddf56f0 commit 492ee38
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 30 deletions.
56 changes: 30 additions & 26 deletions modules/vqa.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import transformers
import transformers.dynamic_module_utils
from PIL import Image
from modules import shared, devices
from modules import shared, devices, errors


processor = None
Expand Down Expand Up @@ -181,31 +181,35 @@ def get_imports(f):


def interrogate(vqa_question, vqa_image, vqa_model_req):
vqa_model = MODELS.get(vqa_model_req, None)
shared.log.debug(f'VQA: model="{vqa_model}" question="{vqa_question}" image={vqa_image}')
if vqa_image is None:
answer = 'no image provided'
return answer
if vqa_model_req is None:
answer = 'no model selected'
return answer
if vqa_model is None:
answer = f'unknown: model={vqa_model_req} available={MODELS.keys()}'
return answer
if 'git' in vqa_model.lower():
answer = git(vqa_question, vqa_image, vqa_model)
elif 'vilt' in vqa_model.lower():
answer = vilt(vqa_question, vqa_image, vqa_model)
elif 'blip' in vqa_model.lower():
answer = blip(vqa_question, vqa_image, vqa_model)
elif 'pix' in vqa_model.lower():
answer = pix(vqa_question, vqa_image, vqa_model)
elif 'moondream2' in vqa_model.lower():
answer = moondream(vqa_question, vqa_image, vqa_model)
elif 'florence' in vqa_model.lower():
answer = florence(vqa_question, vqa_image, vqa_model)
else:
answer = 'unknown model'
try:
vqa_model = MODELS.get(vqa_model_req, None)
shared.log.debug(f'VQA: model="{vqa_model}" question="{vqa_question}" image={vqa_image}')
if vqa_image is None:
answer = 'no image provided'
return answer
if vqa_model_req is None:
answer = 'no model selected'
return answer
if vqa_model is None:
answer = f'unknown: model={vqa_model_req} available={MODELS.keys()}'
return answer
if 'git' in vqa_model.lower():
answer = git(vqa_question, vqa_image, vqa_model)
elif 'vilt' in vqa_model.lower():
answer = vilt(vqa_question, vqa_image, vqa_model)
elif 'blip' in vqa_model.lower():
answer = blip(vqa_question, vqa_image, vqa_model)
elif 'pix' in vqa_model.lower():
answer = pix(vqa_question, vqa_image, vqa_model)
elif 'moondream2' in vqa_model.lower():
answer = moondream(vqa_question, vqa_image, vqa_model)
elif 'florence' in vqa_model.lower():
answer = florence(vqa_question, vqa_image, vqa_model)
else:
answer = 'unknown model'
except Exception as e:
errors.display(e, 'VQA')
answer = 'error'
if model is not None:
model.to(devices.cpu)
devices.torch_gc()
Expand Down
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@ torchsde==0.2.6
open-clip-torch
clip-interrogator==0.6.0
antlr4-python3-runtime==4.9.3
requests==2.31.0
tqdm==4.66.4
requests==2.32.3
tqdm==4.66.5
accelerate==0.34.2
opencv-contrib-python-headless==4.9.0.80
einops==0.4.1
gradio==3.43.2
huggingface_hub==0.24.6
huggingface_hub==0.25.1
numexpr==2.8.8
numpy==1.26.4
numba==0.59.1
Expand All @@ -53,7 +53,7 @@ scipy
pandas
protobuf==4.25.3
pytorch_lightning==1.9.4
tokenizers==0.19.1
tokenizers==0.20.0
transformers==4.45.1
urllib3==1.26.19
Pillow==10.4.0
Expand Down

0 comments on commit 492ee38

Please sign in to comment.