Skip to content

Commit

Permalink
feat: add warning (#1591)
Browse files Browse the repository at this point in the history
Co-authored-by: Wendong-Fan <[email protected]>
Co-authored-by: Xiaotian Jin <[email protected]>
Co-authored-by: Wendong <[email protected]>
  • Loading branch information
4 people authored Feb 18, 2025
1 parent 7821e6e commit 69e4281
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 1 deletion.
13 changes: 12 additions & 1 deletion camel/memories/agent_memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========

import warnings
from typing import List, Optional

from camel.memories.base import AgentMemory, BaseContextCreator
Expand Down Expand Up @@ -49,7 +50,17 @@ def __init__(
self._chat_history_block = ChatHistoryBlock(storage=storage)

def retrieve(self) -> List[ContextRecord]:
return self._chat_history_block.retrieve(self._window_size)
records = self._chat_history_block.retrieve(self._window_size)
if self._window_size is not None and len(records) == self._window_size:
warnings.warn(
f"Chat history window size limit ({self._window_size}) "
f"reached. Some earlier messages will not be included in "
f"the context. Consider increasing window_size if you need "
f"a longer context.",
UserWarning,
stacklevel=2,
)
return records

def write_records(self, records: List[MemoryRecord]) -> None:
self._chat_history_block.write_records(records)
Expand Down
9 changes: 9 additions & 0 deletions camel/memories/context_creators/score_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,14 @@

from pydantic import BaseModel

from camel.logger import get_logger
from camel.memories.base import BaseContextCreator
from camel.memories.records import ContextRecord
from camel.messages import OpenAIMessage
from camel.utils import BaseTokenCounter

logger = get_logger(__name__)


class _ContextUnit(BaseModel):
idx: int
Expand Down Expand Up @@ -103,6 +106,12 @@ def create_context(
if total_tokens <= self.token_limit:
return self._create_output(context_units)

# Log warning about token limit being exceeded
logger.warning(
f"Token limit reached ({total_tokens} > {self.token_limit}). "
f"Some messages will be pruned from memory to meet the limit."
)

# Sort by score
context_units = sorted(
context_units, key=lambda unit: unit.record.score
Expand Down

0 comments on commit 69e4281

Please sign in to comment.