File size: 1,411 Bytes
8d88d9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import type { EndpointParameters } from "./server/endpoints/endpoints";
import type { BackendModel } from "./server/models";
import type { Tool, ToolResult } from "./types/Tool";

type buildPromptOptions = Pick<EndpointParameters, "messages" | "preprompt" | "continueMessage"> & {
	model: BackendModel;
	tools?: Tool[];
	toolResults?: ToolResult[];
};

export async function buildPrompt({
	messages,
	model,
	preprompt,
	continueMessage,
	tools,
	toolResults,
}: buildPromptOptions): Promise<string> {
	const filteredMessages = messages;

	if (filteredMessages[0].from === "system" && preprompt) {
		filteredMessages[0].content = preprompt;
	}

	let prompt = model
		.chatPromptRender({
			messages: filteredMessages,
			preprompt,
			tools,
			toolResults,
			continueMessage,
		})
		// Not super precise, but it's truncated in the model's backend anyway
		.split(" ")
		.slice(-(model.parameters?.truncate ?? 0))
		.join(" ");

	if (continueMessage && model.parameters?.stop) {
		let trimmedPrompt = prompt.trimEnd();
		let hasRemovedStop = true;
		while (hasRemovedStop) {
			hasRemovedStop = false;
			for (const stopToken of model.parameters.stop) {
				if (trimmedPrompt.endsWith(stopToken)) {
					trimmedPrompt = trimmedPrompt.slice(0, -stopToken.length);
					hasRemovedStop = true;
					break;
				}
			}
			trimmedPrompt = trimmedPrompt.trimEnd();
		}
		prompt = trimmedPrompt;
	}

	return prompt;
}