From 432757f95edf1e1bc9af79d1abc62d2a3a5247ec Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 17:11:42 +0200 Subject: [PATCH 01/13] fix(dsp): remove 'prompt' from settings_dictionary --- dsp/modules/unify.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 6963cd7c4..a8a3f83af 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -52,13 +52,10 @@ def basic_request(self, prompt: str, **kwargs) -> Any: "endpoint": self.endpoint, "stream": self.stream, } - if self.model_type == "chat": - messages = [{"role": "user", "content": prompt}] - settings_dict["messages"] = messages - if self.system_prompt: - settings_dict["messages"].insert(0, {"role": "system", "content": self.system_prompt}) - else: - settings_dict["prompt"] = prompt + messages = [{"role": "user", "content": prompt}] + settings_dict["messages"] = messages + if self.system_prompt: + settings_dict["messages"].insert(0, {"role": "system", "content": self.system_prompt}) logging.debug(f"Settings Dict: {settings_dict}") From 24dbecd5316d09bfeb27ddee41a66260c98042ec Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 17:12:14 +0200 Subject: [PATCH 02/13] fix(dsp): remove 'prompt' from settings_dictionary --- dsp/modules/unify.py | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index a8a3f83af..b6cd1f57a 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -59,20 +59,12 @@ def basic_request(self, prompt: str, **kwargs) -> Any: logging.debug(f"Settings Dict: {settings_dict}") - if "messages" in settings_dict: - response = self.client.generate( - messages=settings_dict["messages"], - stream=settings_dict["stream"], - temperature=kwargs["temperature"], - max_tokens=kwargs["max_tokens"], - ) - else: - response = self.client.generate( - user_prompt=settings_dict["prompt"], - stream=settings_dict["stream"], - temperature=kwargs["temperature"], - max_tokens=kwargs["max_tokens"], - ) + response = self.client.generate( + messages=settings_dict["messages"], + stream=settings_dict["stream"], + temperature=kwargs["temperature"], + max_tokens=kwargs["max_tokens"], + ) response = {"choices": [{"message": {"content": response}}]} # response with choices From 4bffe5d7e4d10897885fe7001307dc66eb811c59 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 17:14:14 +0200 Subject: [PATCH 03/13] fix(dsp): remove 'prompt' from settings_dictionary --- dsp/modules/unify.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index b6cd1f57a..db33b352e 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Literal, Optional +from typing import Any, Optional from unify.clients import Unify as UnifyClient @@ -14,7 +14,6 @@ def __init__( endpoint="router@q:1|c:4.65e-03|t:2.08e-05|i:2.07e-03", # model: Optional[str] = None, # provider: Optional[str] = None, - model_type: Literal["chat", "text"] = "chat", stream: Optional[bool] = False, base_url="https://api.unify.ai/v0", system_prompt: Optional[str] = None, @@ -32,7 +31,6 @@ def __init__( super().__init__(model=self.endpoint) self.system_prompt = system_prompt - self.model_type = model_type self.kwargs = { "temperature": 0.0, "max_tokens": 150, @@ -77,8 +75,6 @@ def basic_request(self, prompt: str, **kwargs) -> Any: def request(self, prompt: str, **kwargs) -> Any: """Handles retreival of model completions whilst handling rate limiting and caching.""" - if "model_type" in kwargs: - del kwargs["model_type"] return self.basic_request(prompt, **kwargs) def __call__( From f09c9a9a3881d6aa6447bce53863d1367502ea48 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 17:16:15 +0200 Subject: [PATCH 04/13] fix(dsp): remove 'prompt' from settings_dictionary --- dsp/modules/unify.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index db33b352e..8cf104d53 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -73,10 +73,6 @@ def basic_request(self, prompt: str, **kwargs) -> Any: return response - def request(self, prompt: str, **kwargs) -> Any: - """Handles retreival of model completions whilst handling rate limiting and caching.""" - return self.basic_request(prompt, **kwargs) - def __call__( self, prompt: str, From f64331c3335a0da43e42841a372ce9cdc5874df4 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 18:47:22 +0200 Subject: [PATCH 05/13] fix(dsp): __call__ return inspect_history() --- dsp/modules/lm.py | 2 +- dsp/modules/unify.py | 51 ++++++++++++++++++-------------------------- 2 files changed, 22 insertions(+), 31 deletions(-) diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 3e3c641e8..7c5b9f2a2 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -102,7 +102,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): "tensorrt_llm", ): text = choices - elif provider == "openai" or provider == "ollama" or provider == "unify": + elif provider == "openai" or provider == "ollama": text = " " + self._get_choice_text(choices[0]).strip() elif provider == "groq": text = " " + choices diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 8cf104d53..1762d1b94 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -6,30 +6,28 @@ from dsp.modules.lm import LM -class Unify(LM): +class Unify(LM, UnifyClient): """A class to interact with the Unify AI API.""" def __init__( self, - endpoint="router@q:1|c:4.65e-03|t:2.08e-05|i:2.07e-03", - # model: Optional[str] = None, - # provider: Optional[str] = None, + endpoint: str = "router@q:1|c:4.65e-03|t:2.08e-05|i:2.07e-03", + model: Optional[str] = None, + provider: Optional[str] = None, + api_key=None, stream: Optional[bool] = False, - base_url="https://api.unify.ai/v0", system_prompt: Optional[str] = None, + base_url: str = "https://api.unify.ai/v0", n: int = 1, - api_key=None, **kwargs, ): - self.api_key = api_key - # self.model = model - # self.provider = provider - self.endpoint = endpoint + self.base_url = base_url self.stream = stream - self.client = UnifyClient(api_key=self.api_key, endpoint=self.endpoint) - - super().__init__(model=self.endpoint) - + LM.__init__(self, model) + UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) + self.model = self._model + self.provider = self._provider + self.endpoint = self._endpoint self.system_prompt = system_prompt self.kwargs = { "temperature": 0.0, @@ -57,7 +55,7 @@ def basic_request(self, prompt: str, **kwargs) -> Any: logging.debug(f"Settings Dict: {settings_dict}") - response = self.client.generate( + response = self.generate( messages=settings_dict["messages"], stream=settings_dict["stream"], temperature=kwargs["temperature"], @@ -70,12 +68,15 @@ def basic_request(self, prompt: str, **kwargs) -> Any: logging.error("Unexpected response format, not response") elif "choices" not in response: logging.error(f"no choices in response: {response}") - + if isinstance(response, dict) and "choices" in response: + self.history.append({"prompt": prompt, "response": response}) + else: + raise ValueError("Unexpected response format") return response def __call__( self, - prompt: str, + prompt: Optional[str], only_completed: bool = True, return_sorted: bool = False, **kwargs, @@ -83,16 +84,6 @@ def __call__( """Request completions from the Unify API.""" assert only_completed, "for now" assert return_sorted is False, "for now" - - n = kwargs.pop("n", 1) - completions = [] - - for _ in range(n): - response = self.request(prompt, **kwargs) - - if isinstance(response, dict) and "choices" in response: - completions.append(response["choices"][0]["message"]["content"]) - else: - raise ValueError("Unexpected response format") - - return completions + n: int = kwargs.get("n") or 1 + skip: int = kwargs.get("skip") or 0 + return self.inspect_history(n=n, skip=skip) From 1af6156dfd9d7d82f7b1809c829356c43f422cf0 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 18:56:02 +0200 Subject: [PATCH 06/13] fix(dsp): endpoint, model and provider are set as @property in Unify --- dsp/modules/unify.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 1762d1b94..b32cbaa43 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -25,9 +25,6 @@ def __init__( self.stream = stream LM.__init__(self, model) UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) - self.model = self._model - self.provider = self._provider - self.endpoint = self._endpoint self.system_prompt = system_prompt self.kwargs = { "temperature": 0.0, From 09648e7563ce934f9ff38cc45eca989973608e8a Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 19:35:20 +0200 Subject: [PATCH 07/13] fix(dsp): try super() --- dsp/modules/unify.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index b32cbaa43..14176c007 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -23,8 +23,9 @@ def __init__( ): self.base_url = base_url self.stream = stream - LM.__init__(self, model) - UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) + # LM.__init__(self, model) + # UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) + super(Unify, self).__init__(endpoint=endpoint, model=model, provider=provider, api_key=api_key) self.system_prompt = system_prompt self.kwargs = { "temperature": 0.0, From 7a3e34c2093860b1ad9bdfeacf9787204030d0a5 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Fri, 12 Jul 2024 19:38:56 +0200 Subject: [PATCH 08/13] fix(dsp): try super() --- dsp/modules/unify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 14176c007..db96b4c9c 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -25,7 +25,7 @@ def __init__( self.stream = stream # LM.__init__(self, model) # UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) - super(Unify, self).__init__(endpoint=endpoint, model=model, provider=provider, api_key=api_key) + super().__init__(endpoint=endpoint, model=model, provider=provider, api_key=api_key) self.system_prompt = system_prompt self.kwargs = { "temperature": 0.0, From dccc52f65c9bc0380c8739162309f8926cccf1b6 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Sun, 14 Jul 2024 00:43:22 +0200 Subject: [PATCH 09/13] fix(dsp): adds provider setter --- dsp/modules/unify.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index db96b4c9c..9f1e19b2e 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -23,9 +23,9 @@ def __init__( ): self.base_url = base_url self.stream = stream - # LM.__init__(self, model) - # UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) - super().__init__(endpoint=endpoint, model=model, provider=provider, api_key=api_key) + LM.__init__(self, model) + UnifyClient.__init__(self, endpoint=endpoint, model=model, provider=provider, api_key=api_key) + # super().__init__(model) self.system_prompt = system_prompt self.kwargs = { "temperature": 0.0, @@ -39,6 +39,11 @@ def __init__( self.kwargs["endpoint"] = endpoint self.history: list[dict[str, Any]] = [] + @UnifyClient.provider.setter + def provider(self, value: str) -> None: + if value != "default": + self.set_provider(value) + def basic_request(self, prompt: str, **kwargs) -> Any: """Basic request to the Unify's API.""" kwargs = {**self.kwargs, **kwargs} From 25634284b1e45dcc1410626f03833ff992edd526 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Sun, 14 Jul 2024 17:40:32 +0200 Subject: [PATCH 10/13] fix(dsp): adds request to __call__ --- dsp/modules/unify.py | 1 + mwe_unify.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 9f1e19b2e..63d3868fe 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -89,4 +89,5 @@ def __call__( assert return_sorted is False, "for now" n: int = kwargs.get("n") or 1 skip: int = kwargs.get("skip") or 0 + self.request(prompt, **kwargs) return self.inspect_history(n=n, skip=skip) diff --git a/mwe_unify.py b/mwe_unify.py index 6e7b9e705..3a0c3e82f 100644 --- a/mwe_unify.py +++ b/mwe_unify.py @@ -1,4 +1,7 @@ import logging +import os + +from dotenv import load_dotenv import dsp import dspy @@ -6,11 +9,14 @@ from dspy.evaluate import Evaluate from dspy.teleprompt import BootstrapFewShot +load_dotenv() + +unify_api_key = os.getenv("UNIFY_KEY") + lm = dsp.Unify( endpoint="gpt-3.5-turbo@openai", max_tokens=150, - api_key="UNIFY_API_KEY", - model_type="text", + api_key=unify_api_key, ) dspy.settings.configure(lm=lm) From 4b129e5034dc07ca94452ab058a3a8a42480a3ce Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Sun, 14 Jul 2024 18:01:52 +0200 Subject: [PATCH 11/13] fix(dsp): adds self._dspy_provider and self.unify_provider --- dsp/modules/lm.py | 1 + dsp/modules/unify.py | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index 7c5b9f2a2..851fad073 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -100,6 +100,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): "premai", "you.com", "tensorrt_llm", + "unify", ): text = choices elif provider == "openai" or provider == "ollama": diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 63d3868fe..0439369e6 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -38,9 +38,22 @@ def __init__( } self.kwargs["endpoint"] = endpoint self.history: list[dict[str, Any]] = [] + self._dspy_provider = "unify" - @UnifyClient.provider.setter + @property + def provider(self) -> Optional[str]: + return self._dspy_provider + + @provider.setattr def provider(self, value: str) -> None: + self._dspy_provider = value + + @property + def unify_provider(self) -> Optional[str]: + return UnifyClient.provider() + + @unify_provider.setter + def unify_provider(self, value: str) -> None: if value != "default": self.set_provider(value) From 3f078f0a2e6eed98f127449e5d467d5ce1e2316f Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Sun, 14 Jul 2024 18:03:05 +0200 Subject: [PATCH 12/13] fix(dsp): adds self._dspy_provider and self.unify_provider --- dsp/modules/unify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index 0439369e6..f82ad667d 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -44,7 +44,7 @@ def __init__( def provider(self) -> Optional[str]: return self._dspy_provider - @provider.setattr + @provider.setter def provider(self, value: str) -> None: self._dspy_provider = value From a55a9ecfafda8ad88c2e33472c1f342aef293510 Mon Sep 17 00:00:00 2001 From: Kacper-W-Kozdon Date: Mon, 15 Jul 2024 13:19:49 +0200 Subject: [PATCH 13/13] fix(dsp): renames _dspy_provider to _dspy_integration_provider and unify_provider to model_provider --- dsp/modules/unify.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dsp/modules/unify.py b/dsp/modules/unify.py index f82ad667d..98085be39 100644 --- a/dsp/modules/unify.py +++ b/dsp/modules/unify.py @@ -38,22 +38,22 @@ def __init__( } self.kwargs["endpoint"] = endpoint self.history: list[dict[str, Any]] = [] - self._dspy_provider = "unify" + self._dspy_integration_provider = "unify" @property def provider(self) -> Optional[str]: - return self._dspy_provider + return self._dspy_integration_provider @provider.setter def provider(self, value: str) -> None: - self._dspy_provider = value + self._dspy_integration_provider = value @property - def unify_provider(self) -> Optional[str]: - return UnifyClient.provider() + def model_provider(self) -> Optional[str]: + return UnifyClient.provider(self) - @unify_provider.setter - def unify_provider(self, value: str) -> None: + @model_provider.setter + def model_provider(self, value: str) -> None: if value != "default": self.set_provider(value)