v0.2.3
This commit is contained in:
16
app/src/plugins/index.ts
Normal file
16
app/src/plugins/index.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import Plugin from "../core/plugins";
|
||||
|
||||
import { SystemPromptPlugin } from "./system-prompt";
|
||||
import { TitlePlugin } from "./titles";
|
||||
import { ContextTrimmerPlugin } from "./trimmer";
|
||||
|
||||
import ElevenLabsPlugin from "../tts-plugins/elevenlabs";
|
||||
import WebSpeechPlugin from "../tts-plugins/web-speech";
|
||||
|
||||
export const registeredPlugins: Array<typeof Plugin<any>> = [
|
||||
SystemPromptPlugin,
|
||||
ContextTrimmerPlugin,
|
||||
TitlePlugin,
|
||||
WebSpeechPlugin,
|
||||
ElevenLabsPlugin,
|
||||
];
|
||||
60
app/src/plugins/system-prompt.tsx
Normal file
60
app/src/plugins/system-prompt.tsx
Normal file
@@ -0,0 +1,60 @@
|
||||
import { FormattedMessage } from "react-intl";
|
||||
import Plugin from "../core/plugins";
|
||||
import { PluginDescription } from "../core/plugins/plugin-description";
|
||||
import { OpenAIMessage, Parameters } from "../core/chat/types";
|
||||
|
||||
export const defaultSystemPrompt = `
|
||||
You are ChatGPT, a large language model trained by OpenAI.
|
||||
Knowledge cutoff: 2021-09
|
||||
Current date and time: {{ datetime }}
|
||||
`.trim();
|
||||
|
||||
export interface SystemPromptPluginOptions {
|
||||
systemPrompt: string;
|
||||
}
|
||||
|
||||
export class SystemPromptPlugin extends Plugin<SystemPromptPluginOptions> {
|
||||
describe(): PluginDescription {
|
||||
return {
|
||||
id: "system-prompt",
|
||||
name: "System Prompt",
|
||||
options: [
|
||||
{
|
||||
id: "systemPrompt",
|
||||
defaultValue: defaultSystemPrompt,
|
||||
displayOnSettingsScreen: "chat",
|
||||
resettable: true,
|
||||
scope: "chat",
|
||||
renderProps: {
|
||||
type: "textarea",
|
||||
description: <p>
|
||||
<FormattedMessage defaultMessage={"The System Prompt is an invisible message inserted at the start of the chat and can be used to give ChatGPT information about itself and general guidelines for how it should respond. The <code>'{{ datetime }}'</code> tag is automatically replaced by the current date and time (use this to give the AI access to the time)."}
|
||||
values={{ code: v => <code>{v}</code> }} />
|
||||
</p>,
|
||||
},
|
||||
displayInQuickSettings: {
|
||||
name: "System Prompt",
|
||||
displayByDefault: true,
|
||||
label: "Customize system prompt",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async preprocessModelInput(messages: OpenAIMessage[], parameters: Parameters): Promise<{ messages: OpenAIMessage[]; parameters: Parameters; }> {
|
||||
const output = [
|
||||
{
|
||||
role: 'system',
|
||||
content: (this.options?.systemPrompt || defaultSystemPrompt)
|
||||
.replace('{{ datetime }}', new Date().toLocaleString()),
|
||||
},
|
||||
...messages,
|
||||
];
|
||||
|
||||
return {
|
||||
messages: output,
|
||||
parameters,
|
||||
};
|
||||
}
|
||||
}
|
||||
75
app/src/plugins/titles.ts
Normal file
75
app/src/plugins/titles.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import Plugin from "../core/plugins";
|
||||
import { PluginDescription } from "../core/plugins/plugin-description";
|
||||
import { OpenAIMessage, Parameters } from "../core/chat/types";
|
||||
import { countTokens, runChatTrimmer } from "../core/tokenizer/wrapper";
|
||||
import { defaultModel } from "../core/chat/openai";
|
||||
|
||||
export const systemPrompt = `
|
||||
Please read the following exchange and write a short, concise title describing the topic (in the user's language).
|
||||
If there is no clear topic for the exchange, respond with: N/A
|
||||
`.trim();
|
||||
|
||||
export const systemPromptForLongExchanges = `
|
||||
Please read the following exchange and write a short, concise title describing the topic (in the user's language).
|
||||
`.trim();
|
||||
|
||||
export interface TitlePluginOptions {
|
||||
}
|
||||
|
||||
const userPrompt = (messages: OpenAIMessage[]) => {
|
||||
return messages.map(m => `${m.role.toLocaleUpperCase()}:\n${m.content}`)
|
||||
.join("\n===\n")
|
||||
+ "\n===\nTitle:";
|
||||
}
|
||||
|
||||
export class TitlePlugin extends Plugin<TitlePluginOptions> {
|
||||
describe(): PluginDescription {
|
||||
return {
|
||||
id: "titles",
|
||||
name: "Title Generator",
|
||||
options: [],
|
||||
};
|
||||
}
|
||||
|
||||
async postprocessModelOutput(message: OpenAIMessage, contextMessages: OpenAIMessage[], parameters: Parameters, done: boolean): Promise<OpenAIMessage> {
|
||||
if (done && !this.context?.getCurrentChat().title) {
|
||||
(async () => {
|
||||
let messages = [
|
||||
...contextMessages.filter(m => m.role === 'user' || m.role === 'assistant'),
|
||||
message,
|
||||
];
|
||||
|
||||
const tokens = await countTokens(messages);
|
||||
|
||||
messages = await runChatTrimmer(messages, {
|
||||
maxTokens: 1024,
|
||||
preserveFirstUserMessage: true,
|
||||
preserveSystemPrompt: false,
|
||||
});
|
||||
|
||||
messages = [
|
||||
{
|
||||
role: 'system',
|
||||
content: tokens.length > 512 ? systemPromptForLongExchanges : systemPrompt,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: userPrompt(messages),
|
||||
},
|
||||
]
|
||||
|
||||
const output = await this.context?.createChatCompletion(messages, {
|
||||
model: defaultModel,
|
||||
temperature: 0,
|
||||
});
|
||||
|
||||
if (!output || output === 'N/A') {
|
||||
return;
|
||||
}
|
||||
|
||||
this.context?.setChatTitle(output);
|
||||
})();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
}
|
||||
106
app/src/plugins/trimmer.ts
Normal file
106
app/src/plugins/trimmer.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import Plugin from "../core/plugins";
|
||||
import { PluginDescription } from "../core/plugins/plugin-description";
|
||||
import { OpenAIMessage, Parameters } from "../core/chat/types";
|
||||
import { maxTokensByModel } from "../core/chat/openai";
|
||||
import { countTokens, runChatTrimmer } from "../core/tokenizer/wrapper";
|
||||
|
||||
export interface ContextTrimmerPluginOptions {
|
||||
maxTokens: number;
|
||||
maxMessages: number | null;
|
||||
preserveSystemPrompt: boolean;
|
||||
preserveFirstUserMessage: boolean;
|
||||
}
|
||||
|
||||
export class ContextTrimmerPlugin extends Plugin<ContextTrimmerPluginOptions> {
|
||||
describe(): PluginDescription {
|
||||
return {
|
||||
id: "context-trimmer",
|
||||
name: "Message Context",
|
||||
options: [
|
||||
{
|
||||
id: 'maxTokens',
|
||||
displayOnSettingsScreen: "chat",
|
||||
defaultValue: 2048,
|
||||
scope: "chat",
|
||||
renderProps: (value, options) => ({
|
||||
label: `Include a maximum of ${value} tokens`,
|
||||
type: "slider",
|
||||
min: 512,
|
||||
max: maxTokensByModel[options.getOption('parameters', 'model')] || 2048,
|
||||
step: 512,
|
||||
}),
|
||||
validate: (value, options) => {
|
||||
const max = maxTokensByModel[options.getOption('parameters', 'model')] || 2048;
|
||||
return value < max;
|
||||
},
|
||||
displayInQuickSettings: {
|
||||
name: "Max Tokens",
|
||||
displayByDefault: false,
|
||||
label: value => `Max tokens: ${value}`,
|
||||
},
|
||||
},
|
||||
// {
|
||||
// id: 'maxMessages',
|
||||
// displayOnSettingsScreen: "chat",
|
||||
// defaultValue: null,
|
||||
// scope: "chat",
|
||||
// renderProps: (value) => ({
|
||||
// label: `Include only the last ${value || 'N'} messages (leave blank for all)`,
|
||||
// type: "number",
|
||||
// min: 1,
|
||||
// max: 10,
|
||||
// step: 1,
|
||||
// }),
|
||||
// displayInQuickSettings: {
|
||||
// name: "Max Messages",
|
||||
// displayByDefault: false,
|
||||
// label: value => `Include ${value ?? 'all'} messages`,
|
||||
// },
|
||||
// },
|
||||
{
|
||||
id: 'preserveSystemPrompt',
|
||||
displayOnSettingsScreen: "chat",
|
||||
defaultValue: true,
|
||||
scope: "chat",
|
||||
renderProps: {
|
||||
label: "Try to always include the System Prompt",
|
||||
type: "checkbox",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'preserveFirstUserMessage',
|
||||
displayOnSettingsScreen: "chat",
|
||||
defaultValue: true,
|
||||
scope: "chat",
|
||||
renderProps: {
|
||||
label: "Try to always include your first message",
|
||||
type: "checkbox",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async preprocessModelInput(messages: OpenAIMessage[], parameters: Parameters): Promise<{ messages: OpenAIMessage[]; parameters: Parameters; }> {
|
||||
const before = await countTokens(messages);
|
||||
|
||||
const options = this.options;
|
||||
|
||||
const trimmed = await runChatTrimmer(messages, {
|
||||
maxTokens: options?.maxTokens ?? 2048,
|
||||
nMostRecentMessages: options?.maxMessages ?? undefined,
|
||||
preserveFirstUserMessage: options?.preserveFirstUserMessage || true,
|
||||
preserveSystemPrompt: options?.preserveSystemPrompt || true,
|
||||
});
|
||||
|
||||
const after = await countTokens(trimmed);
|
||||
|
||||
const diff = after - before;
|
||||
console.log(`[context trimmer] trimmed ${diff} tokens from context`);
|
||||
|
||||
return {
|
||||
messages: trimmed,
|
||||
parameters,
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user