From 5a7645fa7d3864547088bdda04c409f8b7979920 Mon Sep 17 00:00:00 2001 From: lucifertrj Date: Wed, 5 Jun 2024 14:07:00 +0530 Subject: [PATCH] add Groq Model LLM for faster llm inference --- src/beyondllm/llms/__init__.py | 3 +- src/beyondllm/llms/chatgroq.py | 59 ++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 src/beyondllm/llms/chatgroq.py diff --git a/src/beyondllm/llms/__init__.py b/src/beyondllm/llms/__init__.py index 58ed340..5080768 100644 --- a/src/beyondllm/llms/__init__.py +++ b/src/beyondllm/llms/__init__.py @@ -5,4 +5,5 @@ from .azurechat import AzureOpenAIModel from .ollama import OllamaModel from .multimodal import GeminiMultiModal -from .gpt4o import GPT4oOpenAIModel \ No newline at end of file +from .gpt4o import GPT4oOpenAIModel +from .chatgroq import GroqModel \ No newline at end of file diff --git a/src/beyondllm/llms/chatgroq.py b/src/beyondllm/llms/chatgroq.py new file mode 100644 index 0000000..2646c16 --- /dev/null +++ b/src/beyondllm/llms/chatgroq.py @@ -0,0 +1,59 @@ +from beyondllm.llms.base import BaseLLMModel, ModelConfig +from typing import Any, Dict, List, Optional +import os +from dataclasses import dataclass,field + +@dataclass +class GroqModel: + """ + Class representing a Chat Language Model (LLM) model using OpenAI + Example: + from beyondllm.llms import GroqModel + llm = ChatOpenAIModel(model="gpt-3.5-turbo",api_key = "",model_kwargs = {"max_tokens":512,"temperature":0.1}) + """ + groq_api_key: str = "" + model: str = field(default="mixtral-8x7b-32768") + system_prompt: str = field(default="You are an AI assistant") + + def __post_init__(self): + if not self.groq_api_key: + self.groq_api_key = os.getenv('GROQ_API_KEY') + if not self.groq_api_key: + raise ValueError("GROQ_API_KEY is not provided and not found in environment variables.") + self.load_llm() + + def load_llm(self): + try: + from groq import Groq + except ImportError: + raise ImportError("Groq library is not installed. Please install it with ``pip install groq``.") + + try: + self.client = Groq(api_key=self.groq_api_key) + + except Exception as e: + raise Exception("Failed to load the model from Groq:", str(e)) + + return self.client + + def predict(self,prompt:Any): + response = self.client.chat.completions.create( + messages=[ + { + "role": "system", + "content": f"{self.system_prompt}", + }, + { + "role": "user", + "content": f"{prompt}", + }, + ], + model=self.model, + ) + return response.choices[0].message.content + + @staticmethod + def load_from_kwargs(self,kwargs): + model_config = ModelConfig(**kwargs) + self.config = model_config + self.load_llm() \ No newline at end of file