Skip to content

Commit

Permalink
Deprecate openai/chat-gpt in favor of openai/gpt-3.5-turbo-0613 (stan…
Browse files Browse the repository at this point in the history
  • Loading branch information
yifanmai authored and danielz02 committed Sep 7, 2023
1 parent f7c3aa9 commit 385da35
Show file tree
Hide file tree
Showing 9 changed files with 0 additions and 376 deletions.
1 change: 0 additions & 1 deletion requirements-freeze.txt
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ regex==2022.9.13
requests==2.28.1
responses==0.18.0
retrying==1.3.4
revChatGPT==0.1.1
rouge-score==0.1.2
rsa==4.9
s3transfer==0.6.0
Expand Down
1 change: 0 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ install_requires=
anthropic~=0.2.5
icetk~=0.0.4 # for ice_tokenizer_client
openai~=0.27.8
revChatGPT~=0.1.1
sentencepiece~=0.1.97 # For palmyra_client and yalm_tokenizer
tiktoken~=0.3.3 # for openai_client
tokenizers~=0.13.3 # for aleph_alpha_client
Expand Down
220 changes: 0 additions & 220 deletions src/helm/benchmark/presentation/run_specs_chat_gpt.conf

This file was deleted.

1 change: 0 additions & 1 deletion src/helm/benchmark/run_expander.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,6 @@ def values_dict(self):
"full_functionality_text": get_model_names_with_tag(FULL_FUNCTIONALITY_TEXT_MODEL_TAG),
"ai21/j1-jumbo": ["ai21/j1-jumbo"],
"openai/curie": ["openai/curie"],
"chat_run": ["openai/chat-gpt", "openai/text-davinci-003"], # Compare ChatGPT to text-davinci-003
"all": get_all_models(),
"text_code": get_all_text_models() + get_all_code_models(),
"text": get_all_text_models(),
Expand Down
7 changes: 0 additions & 7 deletions src/helm/benchmark/static/schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -715,13 +715,6 @@ models:
creator_organization: OpenAI
access: limited
release_date: 2023-06-13
- name: openai/chat-gpt
display_name: ChatGPT
description: Sibling model to InstructGPT which interacts in a conversational way. See [OpenAI's announcement](https://openai.com/blog/chatgpt/). The size of the model is unknown.
creator_organization: OpenAI
access: limited
release_date: 2022-11-30
todo: true

# Together
- name: together/Together-gpt-JT-6B-v1
Expand Down
14 changes: 0 additions & 14 deletions src/helm/proxy/clients/auto_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,26 +90,12 @@ def _get_client(self, model: str) -> Client:
elif organization == "neurips":
client = HTTPModelClient(cache_config=cache_config)
elif organization == "openai":
from helm.proxy.clients.chat_gpt_client import ChatGPTClient
from helm.proxy.clients.openai_client import OpenAIClient

# TODO: add ChatGPT to the OpenAIClient when it's supported.
# We're using a separate client for now since we're using an unofficial Python library.
# See https://github.com/acheong08/ChatGPT/wiki/Setup on how to get a valid session token.
chat_gpt_client: ChatGPTClient = ChatGPTClient(
session_token=self.credentials.get("chatGPTSessionToken", ""),
lock_file_path=os.path.join(self.cache_path, "ChatGPT.lock"),
# TODO: use `cache_config` above. Since this feature is still experimental,
# save queries and responses in a separate collection.
cache_config=self._build_cache_config("ChatGPT"),
tokenizer_client=self._get_tokenizer_client("huggingface"),
)

org_id = self.credentials.get("openaiOrgId", None)
api_key = self.credentials.get("openaiApiKey", None)
client = OpenAIClient(
cache_config=cache_config,
chat_gpt_client=chat_gpt_client,
api_key=api_key,
org_id=org_id,
)
Expand Down
Loading

0 comments on commit 385da35

Please sign in to comment.