Skip to content

Commit

Permalink
add option to override template messages with thread messages when sp…
Browse files Browse the repository at this point in the history
…ecified
  • Loading branch information
vojtatranta committed Aug 13, 2024
1 parent a041163 commit bbed8f3
Show file tree
Hide file tree
Showing 2 changed files with 156 additions and 19 deletions.
125 changes: 125 additions & 0 deletions src/getOpenAIBody.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -246,4 +246,129 @@ describe("getOpenAIBody", () => {
}
`) // template is overridden by the one in parsedBody
})


describe('thread messages', () => {
it('should compile thread messages with passed variables', () => {
const completionConfig = {
state: {
type: "chat" as const,
args: {
model: "gpt-3.5-turbo",
max_tokens: 100,
temperature: 0.8,
top_p: 1,
presence_penalty: 0,
frequency_penalty: 0,
jsonmode: false,
seed: null,
stop: [],
},
template: [
{
role: "user" as const,
content: "TEAMPLTE: This use previous user message.",
},
]
},
chatInput: {},
}

const openAIbody = getOpenAIBody(completionConfig, {
variables: {
footballClub: "Slavia Praha",
},
messages: [],
}, {
threadMessages: [
{
role: "system" as const,
content: "THREAD: Your favourite football club is {{ footballClub }}",
},
]
})

expect(openAIbody).toMatchInlineSnapshot(`
{
"frequency_penalty": 0,
"max_tokens": 100,
"messages": [
{
"content": "TEAMPLTE: This use previous user message.",
"role": "user",
},
{
"content": "THREAD: Your favourite football club is {{ footballClub }}",
"role": "system",
},
],
"model": "gpt-3.5-turbo",
"presence_penalty": 0,
"temperature": 0.8,
"top_p": 1,
}
`)
})

it('should compile thread messages with passed chat input', () => {
const completionConfig = {
state: {
type: "chat" as const,
args: {
model: "gpt-3.5-turbo",
max_tokens: 100,
temperature: 0.8,
top_p: 1,
presence_penalty: 0,
frequency_penalty: 0,
jsonmode: false,
seed: null,
stop: [],
},
template: [
{
role: "system" as const,
content: "TEMPLATE MESSAGE: This use previous user message.",
},
]
},
chatInput: {
footballClub: "Sparta Praha",
},
}

const openAIbody = getOpenAIBody(completionConfig, {
variables: {},
messages: [],
}, {
threadMessages: [
{
role: "user" as const,
content: "THREAD message: Your favourite football club is NOT {{ footballClub }}",
},
]
})

expect(openAIbody).toMatchInlineSnapshot(`
{
"frequency_penalty": 0,
"max_tokens": 100,
"messages": [
{
"content": "TEMPLATE MESSAGE: This use previous user message.",
"role": "system",
},
{
"content": "THREAD message: Your favourite football club is NOT {{ footballClub }}",
"role": "user",
},
],
"model": "gpt-3.5-turbo",
"presence_penalty": 0,
"temperature": 0.8,
"top_p": 1,
}
`)
})
})
})
50 changes: 31 additions & 19 deletions src/getOpenAIBody.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,39 +2,51 @@
import type OpenAI from "openai"

import { compileLTTemplate } from "./template"
import { ChatCompletionsCreateParams } from "./schemas"
import { ChatCompletionsCreateParams, PlaygroundMessage } from "./schemas"
import { IncomingBodyType, PlaygroundState } from "./schemas"
import { ChatCompletionMessageParam } from "openai/resources"

function compileMessages(
messages: PlaygroundMessage[],
variables: Record<string, any>,
): ChatCompletionMessageParam[] {
return messages.map((item) => {
const needsCompilation =
typeof item.content === "string" ? item.content?.includes("{{") : true

return {
...item,
content:
item.content &&
(needsCompilation
? compileLTTemplate(item.content, variables)
: item.content),
} as ChatCompletionMessageParam
})
}

/**
* Get the body for the OpenAI API request. Used in the langtail prompt API. // TODO remove this from our prompt-API when this is merged so that we don't have this code duplicated
*/
export function getOpenAIBody(
completionConfig: PlaygroundState,
parsedBody: IncomingBodyType,
threadParams?: {
threadMessages: PlaygroundMessage[]
}
): ChatCompletionsCreateParams {
const completionArgs = completionConfig.state.args

const template = parsedBody.template ?? completionConfig.state.template
const inputMessages = [
...template.map((item) => {
const needsCompilation =
typeof item.content === "string" ? item.content?.includes("{{") : true

const variables = Object.assign(
completionConfig.chatInput,
parsedBody.variables ?? {},
)
return {
...item,
content:
item.content &&
(needsCompilation
? compileLTTemplate(item.content, variables)
: item.content),
}
}),
...(parsedBody.messages ?? []),
...compileMessages(template, Object.assign(
completionConfig.chatInput,
parsedBody.variables ?? {},
)),
...[...(threadParams?.threadMessages ?? []) as ChatCompletionMessageParam[]],
...(parsedBody.messages ?? []) as ChatCompletionMessageParam[]
]

const openAIbody: OpenAI.Chat.ChatCompletionCreateParams = {
model: parsedBody.model ?? completionArgs.model,
temperature: parsedBody.temperature ?? completionArgs.temperature,
Expand Down

0 comments on commit bbed8f3

Please sign in to comment.