-
Notifications
You must be signed in to change notification settings - Fork 0
/
play.py
39 lines (29 loc) · 1.44 KB
/
play.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# LLM CHAIN - language model
from langchain_openai import ChatOpenAI, OpenAI
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
from langchain_core.output_parsers import StrOutputParser, CommaSeparatedListOutputParser
from langchain_core.messages import HumanMessage, SystemMessage
# llm = ChatOpenAI()
output_parser = CommaSeparatedListOutputParser()
# prompt = ChatPromptTemplate.from_messages([
# ("system", "You are a world class technical documentation writer."),
# ("user", "{input}")
# ])
# chain = llm | prompt | output_parser
# chain.invoke({"input": "how can langsmith help with testing?"})
# ========================================
llm = OpenAI()
chat_model = ChatOpenAI()
# template = "You are a helpful assistant that translates {input_language} to {output_language}."
# human_template = "{text}"
# chat_prompt = ChatPromptTemplate.from_messages([
# SystemMessage("You are a helpful assistant that translates {input_language} to {output_language}."),
# HumanMessage("{text}")
# ])
# chat_prompt.format_messages(input_language="English", output_language="French", text="I love programming.")
template = "Generate a list of 5 {text}.\n\n{format_instructions}"
chat_prompt = ChatPromptTemplate.from_template(template)
chat_prompt = chat_prompt.partial(format_instructions=output_parser.get_format_instructions())
chain = chat_prompt | chat_model | output_parser
result = chain.invoke({"text": "colors"})
print(result)