Skip to content

Commit

Permalink
Add ruff rules for pycodestyle Warning (W)
Browse files Browse the repository at this point in the history
  • Loading branch information
cbornet committed Sep 28, 2024
1 parent 29bf89d commit fae2c9a
Show file tree
Hide file tree
Showing 32 changed files with 119 additions and 119 deletions.
4 changes: 2 additions & 2 deletions libs/core/langchain_core/documents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ class BaseMedia(Serializable):
id: Optional[str] = None
"""An optional identifier for the document.
Ideally this should be unique across the document collection and formatted
Ideally this should be unique across the document collection and formatted
as a UUID, but this will not be enforced.
.. versionadded:: 0.2.11
"""

Expand Down
14 changes: 7 additions & 7 deletions libs/core/langchain_core/indexing/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,26 +465,26 @@ class DeleteResponse(TypedDict, total=False):

num_deleted: int
"""The number of items that were successfully deleted.
If returned, this should only include *actual* deletions.
If the ID did not exist to begin with,
If the ID did not exist to begin with,
it should not be included in this count.
"""

succeeded: Sequence[str]
"""The IDs that were successfully deleted.
If returned, this should only include *actual* deletions.
If the ID did not exist to begin with,
it should not be included in this list.
"""

failed: Sequence[str]
"""The IDs that failed to be deleted.
Please note that deleting an ID that
Please note that deleting an ID that
does not exist is **NOT** considered a failure.
"""

Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/language_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,12 @@ class BaseLanguageModel(

cache: Union[BaseCache, bool, None] = None
"""Whether to cache the response.
* If true, will use the global cache.
* If false, will not use a cache
* If None, will use the global cache if it's set, otherwise no cache.
* If instance of BaseCache, will use the provided cache.
Caching is not currently supported for streaming methods of models.
"""
verbose: bool = Field(default_factory=_get_verbosity, exclude=True, repr=False)
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/language_models/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,8 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):

disable_streaming: Union[bool, Literal["tool_calling"]] = False
"""Whether to disable streaming for this model.
If streaming is bypassed, then ``stream()/astream()`` will defer to
If streaming is bypassed, then ``stream()/astream()`` will defer to
``invoke()/ainvoke()``.
- If True, will always bypass streaming case.
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/language_models/fake.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ class FakeListLLM(LLM):
# it's only used by sub-classes.
sleep: Optional[float] = None
"""Sleep time in seconds between responses.
Ignored by FakeListLLM, but used by sub-classes.
"""
i: int = 0
"""Internally incremented after every model invocation.
Useful primarily for testing purposes.
"""

Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class AIMessage(BaseMessage):

example: bool = False
"""Use to denote that a message is part of an example conversation.
At the moment, this is ignored by most models. Usage is discouraged.
"""

Expand Down Expand Up @@ -215,7 +215,7 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore
"""The type of the message (used for deserialization).
"""The type of the message (used for deserialization).
Defaults to "AIMessageChunk"."""

tool_call_chunks: list[ToolCallChunk] = []
Expand Down
10 changes: 5 additions & 5 deletions libs/core/langchain_core/messages/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class BaseMessage(Serializable):

additional_kwargs: dict = Field(default_factory=dict)
"""Reserved for additional payload data associated with the message.
For example, for a message from an AI, this could include tool calls as
encoded by the model provider.
"""
Expand All @@ -35,16 +35,16 @@ class BaseMessage(Serializable):

type: str
"""The type of the message. Must be a string that is unique to the message type.
The purpose of this field is to allow for easy identification of the message type
when deserializing messages.
"""

name: Optional[str] = None
"""An optional name for the message.
"""An optional name for the message.
This can be used to provide a human-readable name for the message.
Usage of this field is optional, and whether it's used or not is up to the
model implementation.
"""
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/messages/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class ChatMessageChunk(ChatMessage, BaseMessageChunk):
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore
"""The type of the message (used during serialization).
"""The type of the message (used during serialization).
Defaults to "ChatMessageChunk"."""

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/messages/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment]
"""The type of the message (used for serialization).
"""The type of the message (used for serialization).
Defaults to "FunctionMessageChunk"."""

@classmethod
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/human.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class HumanMessage(BaseMessage):

example: bool = False
"""Use to denote that a message is part of an example conversation.
At the moment, this is ignored by most models. Usage is discouraged.
Defaults to False.
"""
Expand Down Expand Up @@ -66,7 +66,7 @@ class HumanMessageChunk(HumanMessage, BaseMessageChunk):
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["HumanMessageChunk"] = "HumanMessageChunk" # type: ignore[assignment]
"""The type of the message (used for serialization).
"""The type of the message (used for serialization).
Defaults to "HumanMessageChunk"."""

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/messages/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class SystemMessageChunk(SystemMessage, BaseMessageChunk):
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment]
"""The type of the message (used for serialization).
"""The type of the message (used for serialization).
Defaults to "SystemMessageChunk"."""

@classmethod
Expand Down
8 changes: 4 additions & 4 deletions libs/core/langchain_core/messages/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ class ToolMessage(BaseMessage):

artifact: Any = None
"""Artifact of the Tool execution which is not meant to be sent to the model.
Should only be specified if it is different from the message content, e.g. if only
Should only be specified if it is different from the message content, e.g. if only
a subset of the full tool output is being passed as message content but the full
output is needed in other parts of the code.
.. versionadded:: 0.2.17
"""

Expand Down Expand Up @@ -191,7 +191,7 @@ class ToolCall(TypedDict):
"""The arguments to the tool call."""
id: Optional[str]
"""An identifier associated with the tool call.
An identifier is needed to associate a tool call request with a tool
call result in events when multiple concurrent tool calls are made.
"""
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/output_parsers/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class JsonOutputParser(BaseCumulativeTransformOutputParser[Any]):
"""

pydantic_object: Annotated[Optional[type[TBaseModel]], SkipValidation()] = None # type: ignore
"""The Pydantic object to use for validation.
"""The Pydantic object to use for validation.
If None, no validation is performed."""

def _diff(self, prev: Optional[Any], next: Any) -> Any:
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/output_parsers/openai_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ class JsonOutputFunctionsParser(BaseCumulativeTransformOutputParser[Any]):

strict: bool = False
"""Whether to allow non-JSON-compliant strings.
See: https://docs.python.org/3/library/json.html#encoders-and-decoders
Useful when the parsed output may include unicode characters or new lines.
"""

Expand Down Expand Up @@ -226,7 +226,7 @@ class Dog(BaseModel):

pydantic_schema: Union[type[BaseModel], dict[str, type[BaseModel]]]
"""The pydantic schema to parse the output with.
If multiple schemas are provided, then the function name will be used to
determine which schema to use.
"""
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/output_parsers/openai_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,12 +142,12 @@ class JsonOutputToolsParser(BaseCumulativeTransformOutputParser[Any]):
first_tool_only: bool = False
"""Whether to return only the first tool call.
If False, the result will be a list of tool calls, or an empty list
If False, the result will be a list of tool calls, or an empty list
if no tool calls are found.
If true, and multiple tool calls are found, only the first one will be returned,
and the other tool calls will be ignored.
If no tool calls are found, None will be returned.
and the other tool calls will be ignored.
If no tool calls are found, None will be returned.
"""

def parse_result(self, result: list[Generation], *, partial: bool = False) -> Any:
Expand Down
22 changes: 11 additions & 11 deletions libs/core/langchain_core/output_parsers/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
from langchain_core.runnables.utils import AddableDict

XML_FORMAT_INSTRUCTIONS = """The output should be formatted as a XML file.
1. Output should conform to the tags below.
1. Output should conform to the tags below.
2. If tags are not given, make them on your own.
3. Remember to always open and close all the tags.
As an example, for the tags ["foo", "bar", "baz"]:
1. String "<foo>\n <bar>\n <baz></baz>\n </bar>\n</foo>" is a well-formatted instance of the schema.
1. String "<foo>\n <bar>\n <baz></baz>\n </bar>\n</foo>" is a well-formatted instance of the schema.
2. String "<foo>\n <bar>\n </foo>" is a badly-formatted instance.
3. String "<foo>\n <tag>\n </tag>\n</foo>" is a badly-formatted instance.
Expand Down Expand Up @@ -146,23 +146,23 @@ class XMLOutputParser(BaseTransformOutputParser):
)
parser: Literal["defusedxml", "xml"] = "defusedxml"
"""Parser to use for XML parsing. Can be either 'defusedxml' or 'xml'.
* 'defusedxml' is the default parser and is used to prevent XML vulnerabilities
* 'defusedxml' is the default parser and is used to prevent XML vulnerabilities
present in some distributions of Python's standard library xml.
`defusedxml` is a wrapper around the standard library parser that
sets up the parser with secure defaults.
* 'xml' is the standard library parser.
Use `xml` only if you are sure that your distribution of the standard library
is not vulnerable to XML vulnerabilities.
is not vulnerable to XML vulnerabilities.
Please review the following resources for more information:
* https://docs.python.org/3/library/xml.html#xml-vulnerabilities
* https://github.com/tiran/defusedxml
* https://github.com/tiran/defusedxml
The standard library relies on libexpat for parsing XML:
https://github.com/libexpat/libexpat
https://github.com/libexpat/libexpat
"""

def get_format_instructions(self) -> str:
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/outputs/chat_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,16 @@ class ChatResult(BaseModel):

generations: list[ChatGeneration]
"""List of the chat generations.
Generations is a list to allow for multiple candidate generations for a single
input prompt.
"""
llm_output: Optional[dict] = None
"""For arbitrary LLM provider specific output.
This dictionary is a free-form dictionary that can contain any information that the
provider wants to return. It is not standardized and is provider-specific.
Users should generally avoid relying on this field and instead rely on
accessing relevant information from standardized fields present in
AIMessage.
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/outputs/generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ class Generation(Serializable):
"""Generated text output."""

generation_info: Optional[dict[str, Any]] = None
"""Raw response from the provider.
"""Raw response from the provider.
May include things like the reason for finishing or token log probabilities.
"""
type: Literal["Generation"] = "Generation"
Expand Down
12 changes: 6 additions & 6 deletions libs/core/langchain_core/outputs/llm_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,25 @@ class LLMResult(BaseModel):
list[Union[Generation, ChatGeneration, GenerationChunk, ChatGenerationChunk]]
]
"""Generated outputs.
The first dimension of the list represents completions for different input
prompts.
The second dimension of the list represents different candidate generations
for a given prompt.
When returned from an LLM the type is List[List[Generation]].
When returned from a chat model the type is List[List[ChatGeneration]].
ChatGeneration is a subclass of Generation that has a field for a structured
chat message.
"""
llm_output: Optional[dict] = None
"""For arbitrary LLM provider specific output.
This dictionary is a free-form dictionary that can contain any information that the
provider wants to return. It is not standardized and is provider-specific.
Users should generally avoid relying on this field and instead rely on
accessing relevant information from standardized fields present in
AIMessage.
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/prompts/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ class BasePromptTemplate(
"""Base class for all prompt templates, returning a prompt."""

input_variables: list[str]
"""A list of the names of the variables whose values are required as inputs to the
"""A list of the names of the variables whose values are required as inputs to the
prompt."""
optional_variables: list[str] = Field(default=[])
"""optional_variables: A list of the names of the variables for placeholder
or MessagePlaceholder that are optional. These variables are auto inferred
or MessagePlaceholder that are optional. These variables are auto inferred
from the prompt and user need not provide them."""
input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006
"""A dictionary of the types of the variables the prompt template expects.
Expand All @@ -58,7 +58,7 @@ class BasePromptTemplate(
"""How to parse the output of calling an LLM on this formatted prompt."""
partial_variables: Mapping[str, Any] = Field(default_factory=dict)
"""A dictionary of the partial variables the prompt template carries.
Partial variables populate the template so that you don't need to
pass them in every time you call the prompt."""
metadata: Optional[typing.Dict[str, Any]] = None # noqa: UP006
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/prompts/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,12 +196,12 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
"""Name of variable to use as messages."""

optional: bool = False
"""If True format_messages can be called with no arguments and will return an empty
list. If False then a named argument with name `variable_name` must be passed
"""If True format_messages can be called with no arguments and will return an empty
list. If False then a named argument with name `variable_name` must be passed
in, even if the value is an empty list."""

n_messages: Optional[PositiveInt] = None
"""Maximum number of messages to include. If None, then will include all.
"""Maximum number of messages to include. If None, then will include all.
Defaults to None."""

@classmethod
Expand Down
Loading

0 comments on commit fae2c9a

Please sign in to comment.