Skip to content

Commit

Permalink
fix ai21 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
EyalPazz committed Jul 14, 2023
1 parent 82931cf commit 8ab999e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion llm_client/llm_api_client/ai21_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def __init__(self, config: LLMAPIClientConfig):
self._headers[AUTH_HEADER] = BEARER_TOKEN + self._api_key

async def text_completion(self, prompt: str, model: Optional[str] = None, max_tokens: int = 16,
temperature: float = 0.7, top_p: float = 1 ,**kwargs) -> list[str]:
temperature: float = 0.7, top_p: float = 1,**kwargs) -> list[str]:
model = model or self._default_model
kwargs[PROMPT_KEY] = prompt
kwargs["topP"] = kwargs.pop("topP", top_p)
Expand Down
8 changes: 4 additions & 4 deletions tests/llm_api_client/ai21_client/test_ai21.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def test_text_completion__sanity(mock_aioresponse, llm_client, url):
'friends, entertaining family...you get the point! One of my favorite things to do is plan parties']
mock_aioresponse.assert_called_once_with(url, method='POST',
headers={AUTH_HEADER: BEARER_TOKEN + llm_client._api_key },
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 0 },
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 1 },
raise_for_status=True)


Expand All @@ -49,7 +49,7 @@ async def test_text_completion__return_multiple_completions(mock_aioresponse, ll
]
mock_aioresponse.assert_called_once_with(url, method='POST',
headers={AUTH_HEADER: BEARER_TOKEN + llm_client._api_key},
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 0 },
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 1 },
raise_for_status=True)


Expand All @@ -69,7 +69,7 @@ async def test_text_completion__override_model(mock_aioresponse, llm_client):
'friends, entertaining family...you get the point! One of my favorite things to do is plan parties']
mock_aioresponse.assert_called_once_with(url, method='POST',
headers={AUTH_HEADER: BEARER_TOKEN + llm_client._api_key},
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 0 },
json={'prompt': 'These are a few of my favorite', "maxTokens" : 16, "temperature" : 0.7, "topP" : 1 },
raise_for_status=True)


Expand All @@ -87,7 +87,7 @@ async def test_text_completion__with_kwargs(mock_aioresponse, llm_client, url):
'friends, entertaining family...you get the point! One of my favorite things to do is plan parties']
mock_aioresponse.assert_called_once_with(url, method='POST',
headers={AUTH_HEADER: BEARER_TOKEN + llm_client._api_key},
json={'prompt': 'These are a few of my favorite', "maxTokens" : 10, "temperature" : 0.7 ,"topP" : 0},
json={'prompt': 'These are a few of my favorite', "maxTokens" : 10, "temperature" : 0.7 ,"topP" : 1},
raise_for_status=True)


Expand Down

0 comments on commit 8ab999e

Please sign in to comment.