From 5d2728b034a45882f7bdc9d39af410b8bb38ad68 Mon Sep 17 00:00:00 2001 From: xucai Date: Fri, 21 Jun 2024 19:48:17 +0800 Subject: [PATCH 01/25] dspy-integration-with-langfuse --- README.md | 8 ++++++-- dsp/modules/azure_openai.py | 15 +++++++++++++-- dsp/modules/gpt3.py | 14 ++++++++++++-- setup.py | 3 ++- 4 files changed, 33 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index d8639fd15..140392909 100644 --- a/README.md +++ b/README.md @@ -75,12 +75,16 @@ Or open our intro notebook in Google Colab: [ Date: Sat, 22 Jun 2024 18:16:59 +0800 Subject: [PATCH 02/25] ollama support langfuse --- README.md | 2 ++ dsp/modules/ollama.py | 18 +++++++++++++++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 140392909..c5d353f21 100644 --- a/README.md +++ b/README.md @@ -81,6 +81,8 @@ For the optional (alphabetically sorted) [Chromadb](https://github.com/chroma-co pip install dspy-ai[chromadb] # or [groq] or [marqo] or [milvus] or [mongodb] or [myscale] or [pinecone] or [qdrant] or [snowflake] or [weaviate] or [langfuse] ``` +langfuse is now supported(openAI、AzureOpenAI and Ollama)! + Before you configure langfuse, please manually deploy the langfuse server or use Langfuse Cloud. You will get the corresponding configuration after you create a new project. When please configure the relevant environment variables in the project, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. Just write the environment variables and langfuse will automatically read them.[langfuse details](https://langfuse.com/docs/deployment/self-host) . diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index 7781583be..00ed5f45b 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -1,11 +1,19 @@ import datetime import hashlib +import uuid from typing import Any, Literal import requests from dsp.modules.lm import LM +try: + from langfuse import Langfuse + # If you need higher performance, set the "thread" value + langfuse = Langfuse(max_retries=2) +except: + LANGFUSE = False + def post_request_metadata(model_name, prompt): """Creates a serialized request object for the Ollama API.""" @@ -134,7 +142,15 @@ def basic_request(self, prompt: str, **kwargs): "raw_kwargs": raw_kwargs, } self.history.append(history) - + if LANGFUSE: + langfuse.trace( + name="Ollama request", + user_id=str(uuid.uuid4()), + metadata=settings_dict['options'], + input=prompt, + output=request_info['choices'] + ) + # 测试下ollama能不能接入并上报,得找找托管版本的地址 return request_info def request(self, prompt: str, **kwargs): From 8d02e99906bdcff2e8a4fa66be0d6d0a2f2a7eff Mon Sep 17 00:00:00 2001 From: xucai Date: Sat, 22 Jun 2024 18:37:26 +0800 Subject: [PATCH 03/25] ollama support langfuse --- dsp/modules/ollama.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index 00ed5f45b..80fdba06f 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -146,11 +146,10 @@ def basic_request(self, prompt: str, **kwargs): langfuse.trace( name="Ollama request", user_id=str(uuid.uuid4()), - metadata=settings_dict['options'], + metadata={**settings_dict['options'], **request_info["usage"]}, input=prompt, output=request_info['choices'] ) - # 测试下ollama能不能接入并上报,得找找托管版本的地址 return request_info def request(self, prompt: str, **kwargs): From 1164399701236e72b4b8eb7b2bcd260ab4204d12 Mon Sep 17 00:00:00 2001 From: xucai Date: Mon, 24 Jun 2024 22:16:07 +0800 Subject: [PATCH 04/25] new BaseTracker && use LangfuseTracker --- dsp/modules/azure_openai.py | 1 - dsp/modules/gpt3.py | 1 - dsp/modules/lm.py | 3 +- dsp/modules/ollama.py | 22 ++------ dsp/trackers/__init__.py | 0 dsp/trackers/base.py | 10 ++++ dsp/trackers/langfuse_tracker.py | 87 ++++++++++++++++++++++++++++++++ requirements.txt | 1 + 8 files changed, 104 insertions(+), 21 deletions(-) create mode 100644 dsp/trackers/__init__.py create mode 100644 dsp/trackers/base.py create mode 100644 dsp/trackers/langfuse_tracker.py diff --git a/dsp/modules/azure_openai.py b/dsp/modules/azure_openai.py index 3fed1d865..09f82839f 100644 --- a/dsp/modules/azure_openai.py +++ b/dsp/modules/azure_openai.py @@ -6,7 +6,6 @@ try: """ - langfuse has made it compatible for us. If there is any error in the langfuse configuration, it will turn to request the real address(openai or azure endpoint) """ import langfuse diff --git a/dsp/modules/gpt3.py b/dsp/modules/gpt3.py index 17d6a3474..becb3996f 100644 --- a/dsp/modules/gpt3.py +++ b/dsp/modules/gpt3.py @@ -6,7 +6,6 @@ try: """ - langfuse has made it compatible for us. If there is any error in the langfuse configuration, it will turn to request the real address(openai or azure endpoint) """ import langfuse diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 7f7966d4b..0f34b0b90 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +from dsp import BaseTracker class LM(ABC): @@ -126,7 +127,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): return printing_value @abstractmethod - def __call__(self, prompt, only_completed=True, return_sorted=False, **kwargs): + def __call__(self, prompt, only_completed=True, return_sorted=False, tracker=BaseTracker, **kwargs): pass def copy(self, **kwargs): diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index 80fdba06f..ca135449c 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -1,18 +1,9 @@ import datetime import hashlib -import uuid from typing import Any, Literal - import requests - from dsp.modules.lm import LM - -try: - from langfuse import Langfuse - # If you need higher performance, set the "thread" value - langfuse = Langfuse(max_retries=2) -except: - LANGFUSE = False +from dsp.trackers.base import BaseTracker def post_request_metadata(model_name, prompt): @@ -142,14 +133,6 @@ def basic_request(self, prompt: str, **kwargs): "raw_kwargs": raw_kwargs, } self.history.append(history) - if LANGFUSE: - langfuse.trace( - name="Ollama request", - user_id=str(uuid.uuid4()), - metadata={**settings_dict['options'], **request_info["usage"]}, - input=prompt, - output=request_info['choices'] - ) return request_info def request(self, prompt: str, **kwargs): @@ -167,6 +150,7 @@ def __call__( prompt: str, only_completed: bool = True, return_sorted: bool = False, + tracker: BaseTracker = BaseTracker, **kwargs, ) -> list[dict[str, Any]]: """Retrieves completions from Ollama. @@ -194,6 +178,8 @@ def __call__( completions = [self._get_choice_text(c) for c in choices] + tracker.call(input=prompt, output=choices, **kwargs) + return completions def copy(self, **kwargs): diff --git a/dsp/trackers/__init__.py b/dsp/trackers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dsp/trackers/base.py b/dsp/trackers/base.py new file mode 100644 index 000000000..35d762923 --- /dev/null +++ b/dsp/trackers/base.py @@ -0,0 +1,10 @@ +from abc import ABC, abstractmethod + + +class BaseTracker(ABC): + def __init__(self): + pass + + @abstractmethod + def call(self, **kwargs): + pass diff --git a/dsp/trackers/langfuse_tracker.py b/dsp/trackers/langfuse_tracker.py new file mode 100644 index 000000000..992704b9e --- /dev/null +++ b/dsp/trackers/langfuse_tracker.py @@ -0,0 +1,87 @@ +from typing import Optional, Union, List, Any +import httpx +import logging +import os +from langfuse.client import Langfuse, StatefulTraceClient, StatefulSpanClient, StateType +from dsp.trackers.base import BaseTracker + + +class LangfuseTracker(BaseTracker): + log = logging.getLogger("langfuse") + + def __init__(self, *, public_key: Optional[str] = None, secret_key: Optional[str] = None, + host: Optional[str] = None, debug: bool = False, stateful_client: Optional[ + Union[StatefulTraceClient, StatefulSpanClient] + ] = None, update_stateful_client: bool = False, version: Optional[str] = None, + session_id: Optional[str] = None, user_id: Optional[str] = None, trace_name: Optional[str] = None, + release: Optional[str] = None, metadata: Optional[Any] = None, tags: Optional[List[str]] = None, + threads: Optional[int] = None, flush_at: Optional[int] = None, flush_interval: Optional[int] = None, + max_retries: Optional[int] = None, timeout: Optional[int] = None, enabled: Optional[bool] = None, + httpx_client: Optional[httpx.Client] = None, sdk_integration: str = "default") -> None: + super().__init__() + self.version = version + self.session_id = session_id + self.user_id = user_id + self.trace_name = trace_name + self.release = release + self.metadata = metadata + self.tags = tags + + self.root_span = None + self.update_stateful_client = update_stateful_client + self.langfuse = None + + prio_public_key = public_key or os.environ.get("LANGFUSE_PUBLIC_KEY") + prio_secret_key = secret_key or os.environ.get("LANGFUSE_SECRET_KEY") + prio_host = host or os.environ.get( + "LANGFUSE_HOST", "https://cloud.langfuse.com" + ) + + if stateful_client and isinstance(stateful_client, StatefulTraceClient): + self.trace = stateful_client + self._task_manager = stateful_client.task_manager + return + + elif stateful_client and isinstance(stateful_client, StatefulSpanClient): + self.root_span = stateful_client + self.trace = StatefulTraceClient( + stateful_client.client, + stateful_client.trace_id, + StateType.TRACE, + stateful_client.trace_id, + stateful_client.task_manager, + ) + self._task_manager = stateful_client.task_manager + return + + args = { + "public_key": prio_public_key, + "secret_key": prio_secret_key, + "host": prio_host, + "debug": debug, + } + + if release is not None: + args["release"] = release + if threads is not None: + args["threads"] = threads + if flush_at is not None: + args["flush_at"] = flush_at + if flush_interval is not None: + args["flush_interval"] = flush_interval + if max_retries is not None: + args["max_retries"] = max_retries + if timeout is not None: + args["timeout"] = timeout + if enabled is not None: + args["enabled"] = enabled + if httpx_client is not None: + args["httpx_client"] = httpx_client + args["sdk_integration"] = sdk_integration + + self.langfuse = Langfuse(**args) + self.trace: Optional[StatefulTraceClient] = None + self._task_manager = self.langfuse.task_manager + + def call(self, input, output, **kwargs): + self.langfuse.trace(input=input, output=output, **kwargs) diff --git a/requirements.txt b/requirements.txt index e5569d1f3..8656f1ae1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,3 +10,4 @@ requests structlog tqdm ujson +httpx From 6063d0f6cf881e193bc91f33e6a2f406eca31d28 Mon Sep 17 00:00:00 2001 From: xucai Date: Mon, 24 Jun 2024 22:56:22 +0800 Subject: [PATCH 05/25] new BaseTracker && use LangfuseTracker && edit README.md --- README.md | 10 +++++++++- dsp/modules/lm.py | 2 +- dsp/modules/ollama.py | 5 +++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c5d353f21..1d2d73f8b 100644 --- a/README.md +++ b/README.md @@ -81,12 +81,20 @@ For the optional (alphabetically sorted) [Chromadb](https://github.com/chroma-co pip install dspy-ai[chromadb] # or [groq] or [marqo] or [milvus] or [mongodb] or [myscale] or [pinecone] or [qdrant] or [snowflake] or [weaviate] or [langfuse] ``` -langfuse is now supported(openAI、AzureOpenAI and Ollama)! +### How to integrate langfuse Before you configure langfuse, please manually deploy the langfuse server or use Langfuse Cloud. You will get the corresponding configuration after you create a new project. When please configure the relevant environment variables in the project, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. Just write the environment variables and langfuse will automatically read them.[langfuse details](https://langfuse.com/docs/deployment/self-host) . +```python +import dspy +from dsp.trackers.langfuse_tracker import LangfuseTracker + +langfuse = LangfuseTracker() +dspy.OllamaLocal(model="llama2", tracker=langfuse) +``` + ## 2) Documentation The DSPy documentation is divided into **tutorials** (step-by-step illustration of solving a task in DSPy), **guides** (how to use specific parts of the API), and **examples** (self-contained programs that illustrate usage). diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 0f34b0b90..b78abd816 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -127,7 +127,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): return printing_value @abstractmethod - def __call__(self, prompt, only_completed=True, return_sorted=False, tracker=BaseTracker, **kwargs): + def __call__(self, prompt, only_completed=True, return_sorted=False, **kwargs): pass def copy(self, **kwargs): diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index ca135449c..03d577b9c 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -41,6 +41,7 @@ def __init__( presence_penalty: float = 0, n: int = 1, num_ctx: int = 1024, + tracker: BaseTracker = BaseTracker, **kwargs, ): super().__init__(model) @@ -50,6 +51,7 @@ def __init__( self.base_url = base_url self.model_name = model self.timeout_s = timeout_s + self.tracker = tracker self.kwargs = { "temperature": temperature, @@ -150,7 +152,6 @@ def __call__( prompt: str, only_completed: bool = True, return_sorted: bool = False, - tracker: BaseTracker = BaseTracker, **kwargs, ) -> list[dict[str, Any]]: """Retrieves completions from Ollama. @@ -178,7 +179,7 @@ def __call__( completions = [self._get_choice_text(c) for c in choices] - tracker.call(input=prompt, output=choices, **kwargs) + self.tracker.call(input=prompt, output=choices, **kwargs) return completions From 479e7a1fc8b51219cf24392f3cc4f4e3284fb1e8 Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 25 Jun 2024 00:22:11 +0800 Subject: [PATCH 06/25] new BaseTracker && new LangfuseTracker && edit README.md --- dsp/modules/lm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index b78abd816..7f7966d4b 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -1,5 +1,4 @@ from abc import ABC, abstractmethod -from dsp import BaseTracker class LM(ABC): From d000151faf30396a5598706b5462150f425a062f Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 25 Jun 2024 01:13:27 +0800 Subject: [PATCH 07/25] new BaseTracker && new LangfuseTracker && edit README.md --- dsp/trackers/base.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/dsp/trackers/base.py b/dsp/trackers/base.py index 35d762923..1094a5038 100644 --- a/dsp/trackers/base.py +++ b/dsp/trackers/base.py @@ -1,10 +1,8 @@ -from abc import ABC, abstractmethod - -class BaseTracker(ABC): +class BaseTracker: def __init__(self): pass - @abstractmethod - def call(self, **kwargs): + @classmethod + def call(cls, **kwargs): pass From 9f332083b42eec86ebe4de140bd11c51bc003da9 Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 25 Jun 2024 01:19:14 +0800 Subject: [PATCH 08/25] new BaseTracker && new LangfuseTracker && edit README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1d2d73f8b..4035e3913 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,7 @@ Just write the environment variables and langfuse will automatically read them.[ import dspy from dsp.trackers.langfuse_tracker import LangfuseTracker +# Assuming the environment variables have been set langfuse = LangfuseTracker() dspy.OllamaLocal(model="llama2", tracker=langfuse) ``` From 7dbeedbf8058a49447c76f10aafeab761f91468b Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 25 Jun 2024 14:55:29 +0800 Subject: [PATCH 09/25] langfuse:think of kwargs as metadata --- dsp/trackers/langfuse_tracker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dsp/trackers/langfuse_tracker.py b/dsp/trackers/langfuse_tracker.py index 992704b9e..54436b37f 100644 --- a/dsp/trackers/langfuse_tracker.py +++ b/dsp/trackers/langfuse_tracker.py @@ -84,4 +84,4 @@ def __init__(self, *, public_key: Optional[str] = None, secret_key: Optional[str self._task_manager = self.langfuse.task_manager def call(self, input, output, **kwargs): - self.langfuse.trace(input=input, output=output, **kwargs) + self.langfuse.trace(input=input, output=output, metadata=kwargs) From 2c781817e8d77c4424e6e280872df3768354aee4 Mon Sep 17 00:00:00 2001 From: xucai Date: Thu, 4 Jul 2024 23:54:56 +0800 Subject: [PATCH 10/25] support tracker_decorator --- dsp/modules/ollama.py | 4 ++-- dsp/trackers/langfuse_tracker.py | 4 ++-- dsp/trackers/tracker_decorator.py | 10 ++++++++++ 3 files changed, 14 insertions(+), 4 deletions(-) create mode 100644 dsp/trackers/tracker_decorator.py diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index 03d577b9c..6650d537f 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -4,6 +4,7 @@ import requests from dsp.modules.lm import LM from dsp.trackers.base import BaseTracker +from dsp.trackers.tracker_decorator import tracker_decorator def post_request_metadata(model_name, prompt): @@ -147,6 +148,7 @@ def request(self, prompt: str, **kwargs): def _get_choice_text(self, choice: dict[str, Any]) -> str: return choice["message"]["content"] + @tracker_decorator def __call__( self, prompt: str, @@ -179,8 +181,6 @@ def __call__( completions = [self._get_choice_text(c) for c in choices] - self.tracker.call(input=prompt, output=choices, **kwargs) - return completions def copy(self, **kwargs): diff --git a/dsp/trackers/langfuse_tracker.py b/dsp/trackers/langfuse_tracker.py index 54436b37f..cf8c9db72 100644 --- a/dsp/trackers/langfuse_tracker.py +++ b/dsp/trackers/langfuse_tracker.py @@ -83,5 +83,5 @@ def __init__(self, *, public_key: Optional[str] = None, secret_key: Optional[str self.trace: Optional[StatefulTraceClient] = None self._task_manager = self.langfuse.task_manager - def call(self, input, output, **kwargs): - self.langfuse.trace(input=input, output=output, metadata=kwargs) + def call(self, i, o, **kwargs): + self.langfuse.trace(input=i, output=o, metadata=kwargs) diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py new file mode 100644 index 000000000..fdc998bb0 --- /dev/null +++ b/dsp/trackers/tracker_decorator.py @@ -0,0 +1,10 @@ +from dsp import BaseTracker + + +def tracker_decorator(func): + def tracker_wrapper(*args, **kwargs): + completions = func(*args, **kwargs) + if hasattr(args[0], "tracker") and issubclass(args[0], BaseTracker): + args[0].tracker.call(i=kwargs['prompt'], o=completions, **kwargs) + return completions + return tracker_wrapper From f0fadac73a72ffb3388c88ab57c11789b780b53f Mon Sep 17 00:00:00 2001 From: xucai Date: Fri, 5 Jul 2024 00:00:36 +0800 Subject: [PATCH 11/25] support tracker_decorator --- dsp/trackers/tracker_decorator.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py index fdc998bb0..2ea3352c2 100644 --- a/dsp/trackers/tracker_decorator.py +++ b/dsp/trackers/tracker_decorator.py @@ -1,10 +1,8 @@ -from dsp import BaseTracker - def tracker_decorator(func): def tracker_wrapper(*args, **kwargs): completions = func(*args, **kwargs) - if hasattr(args[0], "tracker") and issubclass(args[0], BaseTracker): + if hasattr(args[0], "tracker"): args[0].tracker.call(i=kwargs['prompt'], o=completions, **kwargs) return completions return tracker_wrapper From b23818bb60f84bc3bc1e9be72fba336d133f033b Mon Sep 17 00:00:00 2001 From: xucai Date: Fri, 5 Jul 2024 00:14:55 +0800 Subject: [PATCH 12/25] support tracker_decorator --- dsp/trackers/tracker_decorator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py index 2ea3352c2..065c0d706 100644 --- a/dsp/trackers/tracker_decorator.py +++ b/dsp/trackers/tracker_decorator.py @@ -2,7 +2,8 @@ def tracker_decorator(func): def tracker_wrapper(*args, **kwargs): completions = func(*args, **kwargs) - if hasattr(args[0], "tracker"): - args[0].tracker.call(i=kwargs['prompt'], o=completions, **kwargs) + from dsp import BaseTracker + if hasattr(args[0], "tracker") and issubclass(args[0], BaseTracker): + args[0].tracker.call(i=args[1], o=completions, **kwargs) return completions return tracker_wrapper From 12af731d9f52af02047a67c61ee017063cc94e2a Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 9 Jul 2024 20:08:28 +0800 Subject: [PATCH 13/25] LM module adds tracker parameters & Mapping kwargs to metadata --- README.md | 13 +++++++++---- dsp/modules/lm.py | 6 ++++-- dsp/trackers/tracker_decorator.py | 9 ++++++--- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 8c5c66792..8edfed283 100644 --- a/README.md +++ b/README.md @@ -82,12 +82,17 @@ For the optional (alphabetically sorted) [Chromadb](https://github.com/chroma-co pip install dspy-ai[chromadb] # or [groq] or [marqo] or [milvus] or [mongodb] or [myscale] or [pinecone] or [qdrant] or [snowflake] or [weaviate] or [langfuse] ``` -### How to integrate langfuse +### How to make the prompt visible +We have now integrated langfuse as one of the tracker. -Before you configure langfuse, please manually deploy the langfuse server or use Langfuse Cloud. You will get the corresponding configuration after you create a new project. -When please configure the relevant environment variables in the project, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. -Just write the environment variables and langfuse will automatically read them.[langfuse details](https://langfuse.com/docs/deployment/self-host) . +How to configure langfuse?[langfuse details](https://langfuse.com/docs/deployment/self-host) . +After that, you will get three environment variables, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. + +Just write the environment variables and langfuse will automatically read them. + +If you are using **openai** or **azure_openai**, then your preparations are now complete. +Otherwise, you need to configure manually. ```python import dspy from dsp.trackers.langfuse_tracker import LangfuseTracker diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 13ddfbffa..5cf3ff8dd 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -4,9 +4,10 @@ class LM(ABC): """Abstract class for language models.""" - def __init__(self, model): + def __init__(self, model, tracker=None): self.kwargs = { "model": model, + "tracker": tracker, "temperature": 0.0, "max_tokens": 150, "top_p": 1, @@ -138,5 +139,6 @@ def copy(self, **kwargs): """Returns a copy of the language model with the same parameters.""" kwargs = {**self.kwargs, **kwargs} model = kwargs.pop("model") + tracker = kwargs.pop("tracker") - return self.__class__(model=model, **kwargs) \ No newline at end of file + return self.__class__(model=model, tracker=tracker, **kwargs) diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py index 065c0d706..75e7db024 100644 --- a/dsp/trackers/tracker_decorator.py +++ b/dsp/trackers/tracker_decorator.py @@ -2,8 +2,11 @@ def tracker_decorator(func): def tracker_wrapper(*args, **kwargs): completions = func(*args, **kwargs) - from dsp import BaseTracker - if hasattr(args[0], "tracker") and issubclass(args[0], BaseTracker): - args[0].tracker.call(i=args[1], o=completions, **kwargs) + try: + from dsp import BaseTracker + if hasattr(args[0], "tracker") and issubclass(args[0].tracker.__class__, BaseTracker): + args[0].tracker.call(i=args[1], o=completions, **args[0].kwargs) + except Exception as e: + raise RuntimeError(f"tracker TypeError and tracker.call() fail, detail:{e}") from e return completions return tracker_wrapper From b4ad853be397dfaf34229465daa4319a629c95e5 Mon Sep 17 00:00:00 2001 From: xucai Date: Tue, 9 Jul 2024 20:33:27 +0800 Subject: [PATCH 14/25] tracker should be placed outside of kwargs(issubclass(args[0].tracker.__class__, BaseTracker)) --- dsp/modules/lm.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 5cf3ff8dd..439f0f73b 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -7,7 +7,6 @@ class LM(ABC): def __init__(self, model, tracker=None): self.kwargs = { "model": model, - "tracker": tracker, "temperature": 0.0, "max_tokens": 150, "top_p": 1, @@ -16,6 +15,7 @@ def __init__(self, model, tracker=None): "n": 1, } self.provider = "default" + self.tracker = tracker self.history = [] @@ -139,6 +139,5 @@ def copy(self, **kwargs): """Returns a copy of the language model with the same parameters.""" kwargs = {**self.kwargs, **kwargs} model = kwargs.pop("model") - tracker = kwargs.pop("tracker") - return self.__class__(model=model, tracker=tracker, **kwargs) + return self.__class__(model=model, **kwargs) From 2ae10c75f2ad97ae3cb5adfcb79db9861d62b3c3 Mon Sep 17 00:00:00 2001 From: xucai Date: Sun, 4 Aug 2024 01:33:32 +0800 Subject: [PATCH 15/25] Supports users to manually call the tracker. --- README.md | 9 ++++++--- dsp/trackers/base.py | 2 +- dsp/trackers/langfuse_tracker.py | 4 ++-- dsp/trackers/tracker_decorator.py | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 7131114fe..8114988d5 100644 --- a/README.md +++ b/README.md @@ -93,14 +93,17 @@ After that, you will get three environment variables, they are `LANGFUSE_SECRET_ Just write the environment variables and langfuse will automatically read them. If you are using **openai** or **azure_openai**, then your preparations are now complete. -Otherwise, you need to configure manually. + +for other modules, you need to configure manually. ```python import dspy from dsp.trackers.langfuse_tracker import LangfuseTracker - +# e.g: # Assuming the environment variables have been set langfuse = LangfuseTracker() -dspy.OllamaLocal(model="llama2", tracker=langfuse) +turbo = dspy.OllamaLocal(model="llama2", tracker=langfuse) +dspy.settings.configure(lm=turbo) +# turbo.tracker.call(prompt="your prompt", output='llm answer') # If you are using a module other than OllamaLocal, you will need to call manually. ``` ## 2) Documentation diff --git a/dsp/trackers/base.py b/dsp/trackers/base.py index 1094a5038..e46322cc0 100644 --- a/dsp/trackers/base.py +++ b/dsp/trackers/base.py @@ -4,5 +4,5 @@ def __init__(self): pass @classmethod - def call(cls, **kwargs): + def call(cls, *args, **kwargs): pass diff --git a/dsp/trackers/langfuse_tracker.py b/dsp/trackers/langfuse_tracker.py index cf8c9db72..909919d50 100644 --- a/dsp/trackers/langfuse_tracker.py +++ b/dsp/trackers/langfuse_tracker.py @@ -83,5 +83,5 @@ def __init__(self, *, public_key: Optional[str] = None, secret_key: Optional[str self.trace: Optional[StatefulTraceClient] = None self._task_manager = self.langfuse.task_manager - def call(self, i, o, **kwargs): - self.langfuse.trace(input=i, output=o, metadata=kwargs) + def call(self, i, o, name=None, **kwargs): + self.langfuse.trace(input=i, output=o, name=name, metadata=kwargs) diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py index 75e7db024..fbe39a8e1 100644 --- a/dsp/trackers/tracker_decorator.py +++ b/dsp/trackers/tracker_decorator.py @@ -5,7 +5,7 @@ def tracker_wrapper(*args, **kwargs): try: from dsp import BaseTracker if hasattr(args[0], "tracker") and issubclass(args[0].tracker.__class__, BaseTracker): - args[0].tracker.call(i=args[1], o=completions, **args[0].kwargs) + args[0].tracker.call(i=args[1], o=completions, name=args[0].__class__.__name__, **args[0].kwargs) except Exception as e: raise RuntimeError(f"tracker TypeError and tracker.call() fail, detail:{e}") from e return completions From 024336868d1e1d29ba5f55a759bb4511c81e2267 Mon Sep 17 00:00:00 2001 From: xucai Date: Mon, 5 Aug 2024 01:34:06 +0800 Subject: [PATCH 16/25] Support manual call of tracker & supplementary document --- README.md | 8 +++++--- dsp/modules/lm.py | 14 ++++++++++++++ dsp/modules/ollama.py | 2 -- dsp/trackers/tracker_decorator.py | 2 +- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 8114988d5..2bfe9ff1e 100644 --- a/README.md +++ b/README.md @@ -94,16 +94,18 @@ Just write the environment variables and langfuse will automatically read them. If you are using **openai** or **azure_openai**, then your preparations are now complete. -for other modules, you need to configure manually. +for other modules, you need to manually configure and call. ```python import dspy from dsp.trackers.langfuse_tracker import LangfuseTracker # e.g: # Assuming the environment variables have been set langfuse = LangfuseTracker() -turbo = dspy.OllamaLocal(model="llama2", tracker=langfuse) +turbo = dspy.OllamaLocal() dspy.settings.configure(lm=turbo) -# turbo.tracker.call(prompt="your prompt", output='llm answer') # If you are using a module other than OllamaLocal, you will need to call manually. + +completions = turbo("Hi,how's it going today?") +turbo.tracker_call(tracker=langfuse) ``` ## 2) Documentation diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index c9904699f..be629eb12 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -136,6 +136,20 @@ def inspect_history(self, n: int = 1, skip: int = 0, color_format: bool = True): def __call__(self, prompt, only_completed=True, return_sorted=False, **kwargs): pass + def tracker_call(self, tracker, prompt=None, output=None, name=None, **kwargs): + from dsp import BaseTracker + assert issubclass(tracker.__class__, BaseTracker), "tracker must be a subclass of BaseTracker" + assert self.history, "tracker.call() requires a previous request" + + last_req = self.history[-1] + if not prompt: + prompt = last_req.get('prompt', None) + if not output: + output = last_req.get('response', None) + kwargs = {**self.kwargs, **kwargs} + name = name if name else self.__class__.__name__ + tracker.call(i=prompt, o=output, name=name, **kwargs) + def copy(self, **kwargs): """Returns a copy of the language model with the same parameters.""" kwargs = {**self.kwargs, **kwargs} diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index 4d604a148..e1e1b0fc2 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -5,7 +5,6 @@ import requests from dsp.modules.lm import LM from dsp.trackers.base import BaseTracker -from dsp.trackers.tracker_decorator import tracker_decorator def post_request_metadata(model_name, prompt): @@ -155,7 +154,6 @@ def request(self, prompt: str, **kwargs): def _get_choice_text(self, choice: dict[str, Any]) -> str: return choice["message"]["content"] - @tracker_decorator def __call__( self, prompt: str, diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py index fbe39a8e1..30bbc34af 100644 --- a/dsp/trackers/tracker_decorator.py +++ b/dsp/trackers/tracker_decorator.py @@ -1,5 +1,5 @@ -def tracker_decorator(func): +def tracker(func): def tracker_wrapper(*args, **kwargs): completions = func(*args, **kwargs) try: From 2a73b0d2964e736c24cbe389967339e2952dc2e4 Mon Sep 17 00:00:00 2001 From: xucai Date: Wed, 14 Aug 2024 03:47:28 +0800 Subject: [PATCH 17/25] Support langfuse & add md file --- README.md | 25 ---------- .../about_prompt_visible.md | 49 +++++++++++++++++++ dsp/modules/azure_openai.py | 1 - dsp/modules/gpt3.py | 1 - dsp/modules/lm.py | 2 +- dsp/modules/ollama.py | 3 -- dsp/trackers/tracker_decorator.py | 12 ----- 7 files changed, 50 insertions(+), 43 deletions(-) create mode 100644 docs/api/language_model_clients/about_prompt_visible.md delete mode 100644 dsp/trackers/tracker_decorator.py diff --git a/README.md b/README.md index 2bfe9ff1e..70b110713 100644 --- a/README.md +++ b/README.md @@ -83,31 +83,6 @@ For the optional (alphabetically sorted) [Chromadb](https://github.com/chroma-co pip install dspy-ai[chromadb] # or [groq] or [marqo] or [milvus] or [mongodb] or [myscale] or [pinecone] or [qdrant] or [snowflake] or [weaviate] or [langfuse] ``` -### How to make the prompt visible -We have now integrated langfuse as one of the tracker. - -How to configure langfuse?[langfuse details](https://langfuse.com/docs/deployment/self-host) . - -After that, you will get three environment variables, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. - -Just write the environment variables and langfuse will automatically read them. - -If you are using **openai** or **azure_openai**, then your preparations are now complete. - -for other modules, you need to manually configure and call. -```python -import dspy -from dsp.trackers.langfuse_tracker import LangfuseTracker -# e.g: -# Assuming the environment variables have been set -langfuse = LangfuseTracker() -turbo = dspy.OllamaLocal() -dspy.settings.configure(lm=turbo) - -completions = turbo("Hi,how's it going today?") -turbo.tracker_call(tracker=langfuse) -``` - ## 2) Documentation The DSPy documentation is divided into **tutorials** (step-by-step illustration of solving a task in DSPy), **guides** (how to use specific parts of the API), and **examples** (self-contained programs that illustrate usage). diff --git a/docs/api/language_model_clients/about_prompt_visible.md b/docs/api/language_model_clients/about_prompt_visible.md new file mode 100644 index 000000000..8d23d3111 --- /dev/null +++ b/docs/api/language_model_clients/about_prompt_visible.md @@ -0,0 +1,49 @@ +# How to make the prompt visible + +## Langfuse +We have now integrated langfuse as one of the tracker. + +How to configure langfuse?[langfuse details](https://langfuse.com/docs/deployment/self-host) . + +### install langfuse. + +```shell +pip install langfuse +``` + +After that, you will get three environment variables, they are `LANGFUSE_SECRET_KEY`、`LANGFUSE_PUBLIC_KEY` and `LANGFUSE_HOST`. + +Just write the environment variables and langfuse will automatically read them. + +If you are using **openai** or **azure_openai**, then your preparations are now complete. + +for other modules, you need to manually configure and call. + +### example + +```python +import dspy +from dsp.trackers.langfuse_tracker import LangfuseTracker +# e.g: +# Assuming the environment variables have been set +langfuse = LangfuseTracker() +turbo = dspy.OllamaLocal() +dspy.settings.configure(lm=turbo) + +completions = turbo("Hi,how's it going today?") +turbo.tracker_call(tracker=langfuse) +``` + +## Custom Tracker + +We provide `BaseTracker`, just inherit it and override the call() method +```python +# custom_tracker.py +from dsp.trackers.base import BaseTracker + +class CustomTracker(BaseTracker): + + def call(self, *args, **kwargs): + pass + +``` diff --git a/dsp/modules/azure_openai.py b/dsp/modules/azure_openai.py index 79fec2c75..f4ccdaea6 100644 --- a/dsp/modules/azure_openai.py +++ b/dsp/modules/azure_openai.py @@ -14,7 +14,6 @@ logging.info(f"You are using Langfuse,version{langfuse.__version__}") except: import openai - logging.info(f"You are using openai,version{openai.version.__version__}") from dsp.modules.cache_utils import CacheMemory, NotebookCacheMemory, cache_turn_on diff --git a/dsp/modules/gpt3.py b/dsp/modules/gpt3.py index 91cda8684..18b5be726 100644 --- a/dsp/modules/gpt3.py +++ b/dsp/modules/gpt3.py @@ -13,7 +13,6 @@ logging.info(f"You are using Langfuse,version{langfuse.__version__}") except: import openai - logging.info(f"You are using openai,version{openai.version.__version__}") from dsp.modules.cache_utils import CacheMemory, NotebookCacheMemory, cache_turn_on from dsp.modules.lm import LM diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 67d2d183f..e06962a63 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -137,7 +137,7 @@ def __call__(self, prompt, only_completed=True, return_sorted=False, **kwargs): pass def tracker_call(self, tracker, prompt=None, output=None, name=None, **kwargs): - from dsp import BaseTracker + from dsp.trackers.base import BaseTracker assert issubclass(tracker.__class__, BaseTracker), "tracker must be a subclass of BaseTracker" assert self.history, "tracker.call() requires a previous request" diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index f8083a757..acf248410 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -4,7 +4,6 @@ import requests from dsp.modules.lm import LM -from dsp.trackers.base import BaseTracker def post_request_metadata(model_name, prompt): @@ -46,7 +45,6 @@ def __init__( num_ctx: int = 1024, format: Optional[Literal["json"]] = None, system: Optional[str] = None, - tracker: BaseTracker = BaseTracker, **kwargs, ): super().__init__(model) @@ -58,7 +56,6 @@ def __init__( self.timeout_s = timeout_s self.format = format self.system = system - self.tracker = tracker self.kwargs = { "temperature": temperature, diff --git a/dsp/trackers/tracker_decorator.py b/dsp/trackers/tracker_decorator.py deleted file mode 100644 index 30bbc34af..000000000 --- a/dsp/trackers/tracker_decorator.py +++ /dev/null @@ -1,12 +0,0 @@ - -def tracker(func): - def tracker_wrapper(*args, **kwargs): - completions = func(*args, **kwargs) - try: - from dsp import BaseTracker - if hasattr(args[0], "tracker") and issubclass(args[0].tracker.__class__, BaseTracker): - args[0].tracker.call(i=args[1], o=completions, name=args[0].__class__.__name__, **args[0].kwargs) - except Exception as e: - raise RuntimeError(f"tracker TypeError and tracker.call() fail, detail:{e}") from e - return completions - return tracker_wrapper From b9aa5b7bfc675f122580556e454e1d80ca449ba1 Mon Sep 17 00:00:00 2001 From: xucai Date: Wed, 14 Aug 2024 14:26:51 +0800 Subject: [PATCH 18/25] Support langfuse & add md file --- dsp/modules/ollama.py | 57 ++++++++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index acf248410..c5780fb43 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -3,6 +3,7 @@ from typing import Any, Literal, Optional import requests + from dsp.modules.lm import LM @@ -30,22 +31,22 @@ class OllamaLocal(LM): """ def __init__( - self, - model: str = "llama2", - model_type: Literal["chat", "text"] = "text", - base_url: str = "http://localhost:11434", - timeout_s: float = 120, - temperature: float = 0.0, - max_tokens: int = 150, - top_p: int = 1, - top_k: int = 20, - frequency_penalty: float = 0, - presence_penalty: float = 0, - n: int = 1, - num_ctx: int = 1024, - format: Optional[Literal["json"]] = None, - system: Optional[str] = None, - **kwargs, + self, + model: str = "llama2", + model_type: Literal["chat", "text"] = "text", + base_url: str = "http://localhost:11434", + timeout_s: float = 120, + temperature: float = 0.0, + max_tokens: int = 150, + top_p: int = 1, + top_k: int = 20, + frequency_penalty: float = 0, + presence_penalty: float = 0, + n: int = 1, + num_ctx: int = 1024, + format: Optional[Literal["json"]] = None, + system: Optional[str] = None, + **kwargs, ): super().__init__(model) @@ -91,9 +92,18 @@ def basic_request(self, prompt: str, **kwargs): "options": {k: v for k, v in kwargs.items() if k not in ["n", "max_tokens"]}, "stream": False, } + + # Set the format if it was defined + if self.format: + settings_dict["format"] = self.format + if self.model_type == "chat": settings_dict["messages"] = [{"role": "user", "content": prompt}] else: + # Overwrite system prompt defined in modelfile + if self.system: + settings_dict["system"] = self.system + settings_dict["prompt"] = prompt urlstr = f"{self.base_url}/api/chat" if self.model_type == "chat" else f"{self.base_url}/api/generate" @@ -139,6 +149,7 @@ def basic_request(self, prompt: str, **kwargs): "raw_kwargs": raw_kwargs, } self.history.append(history) + return request_info def request(self, prompt: str, **kwargs): @@ -152,11 +163,11 @@ def _get_choice_text(self, choice: dict[str, Any]) -> str: return choice["message"]["content"] def __call__( - self, - prompt: str, - only_completed: bool = True, - return_sorted: bool = False, - **kwargs, + self, + prompt: str, + only_completed: bool = True, + return_sorted: bool = False, + **kwargs, ) -> list[dict[str, Any]]: """Retrieves completions from Ollama. @@ -184,7 +195,7 @@ def __call__( completions = [self._get_choice_text(c) for c in choices] return completions - + def copy(self, **kwargs): """Returns a copy of the language model with the same parameters.""" kwargs = {**self.kwargs, **kwargs} @@ -195,4 +206,4 @@ def copy(self, **kwargs): base_url=self.base_url, timeout_s=self.timeout_s, **kwargs, - ) + ) \ No newline at end of file From c0853ec1357b46bc6950244302314959c3152229 Mon Sep 17 00:00:00 2001 From: xucai Date: Wed, 14 Aug 2024 14:41:45 +0800 Subject: [PATCH 19/25] Support langfuse & add md file --- dsp/modules/ollama.py | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/dsp/modules/ollama.py b/dsp/modules/ollama.py index c5780fb43..f2352312e 100644 --- a/dsp/modules/ollama.py +++ b/dsp/modules/ollama.py @@ -31,22 +31,22 @@ class OllamaLocal(LM): """ def __init__( - self, - model: str = "llama2", - model_type: Literal["chat", "text"] = "text", - base_url: str = "http://localhost:11434", - timeout_s: float = 120, - temperature: float = 0.0, - max_tokens: int = 150, - top_p: int = 1, - top_k: int = 20, - frequency_penalty: float = 0, - presence_penalty: float = 0, - n: int = 1, - num_ctx: int = 1024, - format: Optional[Literal["json"]] = None, - system: Optional[str] = None, - **kwargs, + self, + model: str = "llama2", + model_type: Literal["chat", "text"] = "text", + base_url: str = "http://localhost:11434", + timeout_s: float = 120, + temperature: float = 0.0, + max_tokens: int = 150, + top_p: int = 1, + top_k: int = 20, + frequency_penalty: float = 0, + presence_penalty: float = 0, + n: int = 1, + num_ctx: int = 1024, + format: Optional[Literal["json"]] = None, + system: Optional[str] = None, + **kwargs, ): super().__init__(model) @@ -163,11 +163,11 @@ def _get_choice_text(self, choice: dict[str, Any]) -> str: return choice["message"]["content"] def __call__( - self, - prompt: str, - only_completed: bool = True, - return_sorted: bool = False, - **kwargs, + self, + prompt: str, + only_completed: bool = True, + return_sorted: bool = False, + **kwargs, ) -> list[dict[str, Any]]: """Retrieves completions from Ollama. From ea3d37a752f1cce57d63f75867c08d9d24c34266 Mon Sep 17 00:00:00 2001 From: xucai Date: Wed, 14 Aug 2024 14:43:54 +0800 Subject: [PATCH 20/25] Support langfuse & add md file --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 70b110713..f31ca47da 100644 --- a/README.md +++ b/README.md @@ -77,10 +77,10 @@ Or open our intro notebook in Google Colab: [ Date: Thu, 15 Aug 2024 14:51:54 -0700 Subject: [PATCH 23/25] Update azure_openai.py --- dsp/modules/azure_openai.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dsp/modules/azure_openai.py b/dsp/modules/azure_openai.py index f4ccdaea6..79fec2c75 100644 --- a/dsp/modules/azure_openai.py +++ b/dsp/modules/azure_openai.py @@ -14,6 +14,7 @@ logging.info(f"You are using Langfuse,version{langfuse.__version__}") except: import openai + logging.info(f"You are using openai,version{openai.version.__version__}") from dsp.modules.cache_utils import CacheMemory, NotebookCacheMemory, cache_turn_on From 500ff5eb999d0173a67d225b3b3e3a534265324d Mon Sep 17 00:00:00 2001 From: arnavsinghvi11 <54859892+arnavsinghvi11@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:56:46 -0700 Subject: [PATCH 24/25] Update azure_openai.py --- dsp/modules/azure_openai.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/dsp/modules/azure_openai.py b/dsp/modules/azure_openai.py index 79fec2c75..828489722 100644 --- a/dsp/modules/azure_openai.py +++ b/dsp/modules/azure_openai.py @@ -14,8 +14,6 @@ logging.info(f"You are using Langfuse,version{langfuse.__version__}") except: import openai - logging.info(f"You are using openai,version{openai.version.__version__}") - from dsp.modules.cache_utils import CacheMemory, NotebookCacheMemory, cache_turn_on from dsp.modules.lm import LM From 78afb06171b0a28130174476294e80c350f594ad Mon Sep 17 00:00:00 2001 From: arnavsinghvi11 <54859892+arnavsinghvi11@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:59:44 -0700 Subject: [PATCH 25/25] Update gpt3.py --- dsp/modules/gpt3.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dsp/modules/gpt3.py b/dsp/modules/gpt3.py index 832c4b723..548f0c0df 100644 --- a/dsp/modules/gpt3.py +++ b/dsp/modules/gpt3.py @@ -13,7 +13,6 @@ logging.info(f"You are using Langfuse,version{langfuse.__version__}") except: import openai - logging.info(f"You are using openai,version{openai.version.__version__}") from dsp.modules.cache_utils import CacheMemory, NotebookCacheMemory, cache_turn_on from dsp.modules.lm import LM