From 425c3002c41586ac0a191e71fd2185ec3499ab33 Mon Sep 17 00:00:00 2001 From: zachary62 Date: Sat, 25 Jan 2025 05:50:04 +0000 Subject: [PATCH] cache retry doc --- docs/llm.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/llm.md b/docs/llm.md index 9c48186..3a1967b 100644 --- a/docs/llm.md +++ b/docs/llm.md @@ -54,9 +54,30 @@ def call_llm(prompt): ``` > ⚠️ Caching conflicts with Node retries, as retries yield the same result. + +To address this, you could use cached results only if not retried. {: .warning } +```python +from functools import lru_cache + +@lru_cache(maxsize=1000) +def cached_call(prompt): + pass + +def call_llm(prompt, use_cache): + if use_cache: + return cached_call(prompt) + # Call the underlying function directly + return cached_call.__wrapped__(prompt) + +class SummarizeNode(Node): + def exec(self, text): + return call_llm(f"Summarize: {text}", self.cur_retry==0) +``` + + - Enable logging: ```python