Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Custom Runtime Logger <> FileLogger #2596

Merged
merged 33 commits into from
May 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
e38e660
added logger param for custom logger support
Hk669 May 5, 2024
67d631b
added FileLogger
Hk669 May 5, 2024
8d4d1c2
bump: spell check
Hk669 May 5, 2024
e2bf317
bump: import error
Hk669 May 5, 2024
3b26941
added more log functionalites
Hk669 May 5, 2024
455962c
bump: builtin logger for FileLogger
Hk669 May 6, 2024
5545504
type check and instance level logger
Hk669 May 6, 2024
b008f41
Merge branch 'main' into runtime-log
sonichi May 7, 2024
5af49da
tests added for the fileLogger
Hk669 May 7, 2024
9803bfc
Merge branch 'runtime-log' of https://github.com/microsoft/autogen in…
Hk669 May 7, 2024
4907c49
formatting bump
Hk669 May 7, 2024
cd1c076
updated tests and removed time formatting
Hk669 May 7, 2024
465f190
separate module for the filelogger
Hk669 May 7, 2024
945c6fe
update file logger test
Hk669 May 8, 2024
4fe2b76
added the FileLogger into the notebook
Hk669 May 8, 2024
90691b3
bump json decode error
Hk669 May 8, 2024
067a71c
updated requested changes
Hk669 May 8, 2024
fe52423
Updated tests with AutoGen agents
Hk669 May 9, 2024
fc63f77
bump file
Hk669 May 9, 2024
3cee52a
bump: logger accessed before intializedsolved
Hk669 May 9, 2024
5b2ef37
Updated notebook to guide with a filename
Hk669 May 10, 2024
4481bc3
Merge branch 'main' into runtime-log
Hk669 May 11, 2024
b0ed2f6
added thread_id to the FileLogger
Hk669 May 11, 2024
b8fbd05
Merge branch 'runtime-log' of https://github.com/microsoft/autogen in…
Hk669 May 11, 2024
d8da2e7
bump type check in tests
Hk669 May 11, 2024
ba22f2a
Updated thread_id for each log event
Hk669 May 12, 2024
99d2873
Updated thread_id for each log event
Hk669 May 12, 2024
8b71799
Updated with tempfile
Hk669 May 14, 2024
00f35ed
bump: str cleanup
Hk669 May 14, 2024
afabe32
Merge branch 'main' into runtime-log
Hk669 May 14, 2024
2434bb1
skipping-windows tests
Hk669 May 15, 2024
6f2f09e
Merge branch 'runtime-log' of https://github.com/microsoft/autogen in…
Hk669 May 15, 2024
bd17a5c
Merge branch 'main' into runtime-log
Hk669 May 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
211 changes: 211 additions & 0 deletions autogen/logger/file_logger.py
ekzhu marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
from __future__ import annotations

import json
import logging
import os
import threading
import uuid
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union

from openai import AzureOpenAI, OpenAI
from openai.types.chat import ChatCompletion

from autogen.logger.base_logger import BaseLogger
from autogen.logger.logger_utils import get_current_ts, to_dict

from .base_logger import LLMConfig

if TYPE_CHECKING:
from autogen import Agent, ConversableAgent, OpenAIWrapper

logger = logging.getLogger(__name__)


class FileLogger(BaseLogger):
def __init__(self, config: Dict[str, Any]):
self.config = config
self.session_id = str(uuid.uuid4())

curr_dir = os.getcwd()
self.log_dir = os.path.join(curr_dir, "autogen_logs")
os.makedirs(self.log_dir, exist_ok=True)

self.log_file = os.path.join(self.log_dir, self.config.get("filename", "runtime.log"))
try:
with open(self.log_file, "a"):
pass
except Exception as e:
logger.error(f"[file_logger] Failed to create logging file: {e}")

self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(self.log_file)
self.logger.addHandler(file_handler)

def start(self) -> str:
"""Start the logger and return the session_id."""
try:
self.logger.info(f"Started new session with Session ID: {self.session_id}")
except Exception as e:
logger.error(f"[file_logger] Failed to create logging file: {e}")
finally:
return self.session_id

def log_chat_completion(
self,
invocation_id: uuid.UUID,
client_id: int,
wrapper_id: int,
request: Dict[str, Union[float, str, List[Dict[str, str]]]],
response: Union[str, ChatCompletion],
is_cached: int,
cost: float,
start_time: str,
) -> None:
"""
Log a chat completion.
"""
thread_id = threading.get_ident()
try:
log_data = json.dumps(
{
"invocation_id": str(invocation_id),
"client_id": client_id,
"wrapper_id": wrapper_id,
"request": to_dict(request),
"response": str(response),
"is_cached": is_cached,
"cost": cost,
"start_time": start_time,
"end_time": get_current_ts(),
"thread_id": thread_id,
}
)

self.logger.info(log_data)
Hk669 marked this conversation as resolved.
Show resolved Hide resolved
except Exception as e:
self.logger.error(f"[file_logger] Failed to log chat completion: {e}")

def log_new_agent(self, agent: ConversableAgent, init_args: Dict[str, Any] = {}) -> None:
"""
Log a new agent instance.
"""
thread_id = threading.get_ident()

try:
log_data = json.dumps(
{
"id": id(agent),
"agent_name": agent.name if hasattr(agent, "name") and agent.name is not None else "",
"wrapper_id": to_dict(
agent.client.wrapper_id if hasattr(agent, "client") and agent.client is not None else ""
),
"session_id": self.session_id,
"current_time": get_current_ts(),
"agent_type": type(agent).__name__,
"args": to_dict(init_args),
"thread_id": thread_id,
}
)
self.logger.info(log_data)
except Exception as e:
self.logger.error(f"[file_logger] Failed to log new agent: {e}")

def log_event(self, source: Union[str, Agent], name: str, **kwargs: Dict[str, Any]) -> None:
"""
Log an event from an agent or a string source.
"""
from autogen import Agent
Hk669 marked this conversation as resolved.
Show resolved Hide resolved

# This takes an object o as input and returns a string. If the object o cannot be serialized, instead of raising an error,
# it returns a string indicating that the object is non-serializable, along with its type's qualified name obtained using __qualname__.
json_args = json.dumps(kwargs, default=lambda o: f"<<non-serializable: {type(o).__qualname__}>>")
Hk669 marked this conversation as resolved.
Show resolved Hide resolved
thread_id = threading.get_ident()

if isinstance(source, Agent):
try:
log_data = json.dumps(
{
"source_id": id(source),
"source_name": str(source.name) if hasattr(source, "name") else source,
"event_name": name,
"agent_module": source.__module__,
"agent_class": source.__class__.__name__,
"json_state": json_args,
"timestamp": get_current_ts(),
"thread_id": thread_id,
}
)
self.logger.info(log_data)
except Exception as e:
self.logger.error(f"[file_logger] Failed to log event {e}")
else:
try:
log_data = json.dumps(
{
"source_id": id(source),
"source_name": str(source.name) if hasattr(source, "name") else source,
"event_name": name,
"json_state": json_args,
"timestamp": get_current_ts(),
"thread_id": thread_id,
}
)
self.logger.info(log_data)
except Exception as e:
self.logger.error(f"[file_logger] Failed to log event {e}")

def log_new_wrapper(
self, wrapper: OpenAIWrapper, init_args: Dict[str, Union[LLMConfig, List[LLMConfig]]] = {}
) -> None:
"""
Log a new wrapper instance.
"""
thread_id = threading.get_ident()

try:
log_data = json.dumps(
{
"wrapper_id": id(wrapper),
"session_id": self.session_id,
"json_state": json.dumps(init_args),
"timestamp": get_current_ts(),
"thread_id": thread_id,
}
)
self.logger.info(log_data)
except Exception as e:
self.logger.error(f"[file_logger] Failed to log event {e}")

def log_new_client(self, client: AzureOpenAI | OpenAI, wrapper: OpenAIWrapper, init_args: Dict[str, Any]) -> None:
"""
Log a new client instance.
"""
thread_id = threading.get_ident()

try:
log_data = json.dumps(
{
"client_id": id(client),
"wrapper_id": id(wrapper),
"session_id": self.session_id,
"class": type(client).__name__,
"json_state": json.dumps(init_args),
"timestamp": get_current_ts(),
"thread_id": thread_id,
}
)
self.logger.info(log_data)
except Exception as e:
self.logger.error(f"[file_logger] Failed to log event {e}")

def get_connection(self) -> None:
"""Method is intentionally left blank because there is no specific connection needed for the FileLogger."""
pass

def stop(self) -> None:
"""Close the file handler and remove it from the logger."""
for handler in self.logger.handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
self.logger.removeHandler(handler)
9 changes: 7 additions & 2 deletions autogen/logger/logger_factory.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
from typing import Any, Dict, Optional
from typing import Any, Dict, Literal, Optional

from autogen.logger.base_logger import BaseLogger
from autogen.logger.file_logger import FileLogger
from autogen.logger.sqlite_logger import SqliteLogger

__all__ = ("LoggerFactory",)


class LoggerFactory:
@staticmethod
def get_logger(logger_type: str = "sqlite", config: Optional[Dict[str, Any]] = None) -> BaseLogger:
def get_logger(
logger_type: Literal["sqlite", "file"] = "sqlite", config: Optional[Dict[str, Any]] = None
) -> BaseLogger:
if config is None:
config = {}

if logger_type == "sqlite":
return SqliteLogger(config)
elif logger_type == "file":
return FileLogger(config)
else:
raise ValueError(f"[logger_factory] Unknown logger type: {logger_type}")
24 changes: 20 additions & 4 deletions autogen/runtime_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
import logging
import sqlite3
import uuid
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union

from openai import AzureOpenAI, OpenAI
from openai.types.chat import ChatCompletion

from autogen.logger.base_logger import LLMConfig
from autogen.logger.base_logger import BaseLogger, LLMConfig
from autogen.logger.logger_factory import LoggerFactory

if TYPE_CHECKING:
Expand All @@ -20,11 +20,27 @@
is_logging = False


def start(logger_type: str = "sqlite", config: Optional[Dict[str, Any]] = None) -> str:
def start(
logger: Optional[BaseLogger] = None,
ekzhu marked this conversation as resolved.
Show resolved Hide resolved
logger_type: Literal["sqlite", "file"] = "sqlite",
config: Optional[Dict[str, Any]] = None,
) -> str:
"""
Start logging for the runtime.
Args:
logger (BaseLogger): A logger instance
logger_type (str): The type of logger to use (default: sqlite)
config (dict): Configuration for the logger
Returns:
session_id (str(uuid.uuid4)): a unique id for the logging session
"""
global autogen_logger
global is_logging

autogen_logger = LoggerFactory.get_logger(logger_type=logger_type, config=config)
if logger:
autogen_logger = logger
else:
autogen_logger = LoggerFactory.get_logger(logger_type=logger_type, config=config)

try:
session_id = autogen_logger.start()
Expand Down
82 changes: 81 additions & 1 deletion notebook/agentchat_logging.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
"\n",
"AutoGen offers utilities to log data for debugging and performance analysis. This notebook demonstrates how to use them. \n",
"\n",
"we log data in different modes:\n",
"- SQlite Database\n",
"- File \n",
"\n",
"In general, users can initiate logging by calling `autogen.runtime_logging.start()` and stop logging by calling `autogen.runtime_logging.stop()`"
]
},
Expand Down Expand Up @@ -287,6 +291,82 @@
" + str(round(session_cost, 4))\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Log data in File mode\n",
"\n",
"By default, the log type is set to `sqlite` as shown above, but we introduced a new parameter for the `autogen.runtime_logging.start()`\n",
"\n",
"the `logger_type = \"file\"` will start to log data in the File mode."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Logging session ID: ed493ebf-d78e-49f0-b832-69557276d557\n",
"\u001b[33muser_proxy\u001b[0m (to assistant):\n",
"\n",
"What is the height of the Eiffel Tower? Only respond with the answer and terminate\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33massistant\u001b[0m (to user_proxy):\n",
"\n",
"The height of the Eiffel Tower is 330 meters.\n",
"TERMINATE\n",
"\n",
"--------------------------------------------------------------------------------\n"
]
}
],
"source": [
"\n",
"import pandas as pd\n",
"\n",
"import autogen\n",
"from autogen import AssistantAgent, UserProxyAgent\n",
"\n",
"# Setup API key. Add your own API key to config file or environment variable\n",
"llm_config = {\n",
" \"config_list\": autogen.config_list_from_json(\n",
" env_or_file=\"OAI_CONFIG_LIST\",\n",
" ),\n",
" \"temperature\": 0.9,\n",
"}\n",
"\n",
"# Start logging with logger_type and the filename to log to\n",
"logging_session_id = autogen.runtime_logging.start(logger_type=\"file\", config={\"filename\": \"runtime.log\"})\n",
"print(\"Logging session ID: \" + str(logging_session_id))\n",
"\n",
"# Create an agent workflow and run it\n",
"assistant = AssistantAgent(name=\"assistant\", llm_config=llm_config)\n",
"user_proxy = UserProxyAgent(\n",
" name=\"user_proxy\",\n",
" code_execution_config=False,\n",
" human_input_mode=\"NEVER\",\n",
" is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n",
")\n",
"\n",
"user_proxy.initiate_chat(\n",
" assistant, message=\"What is the height of the Eiffel Tower? Only respond with the answer and terminate\"\n",
")\n",
"autogen.runtime_logging.stop()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This should create a `runtime.log` file in your current directory. "
]
}
],
"metadata": {
Expand All @@ -312,7 +392,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.7"
"version": "3.9.13"
}
},
"nbformat": 4,
Expand Down
Loading
Loading