From 1d74cafe2dee062ffcdde892cf846656527f4848 Mon Sep 17 00:00:00 2001 From: Philipp Schmid <schmidp@schmidp.com> Date: Wed, 15 Mar 2023 21:13:14 +0100 Subject: [PATCH 1/2] allow to set model name --- app/src/components/settings/options.tsx | 38 +++++++++++++++++++++---- app/src/openai.ts | 6 ++-- app/src/parameters.ts | 2 ++ app/src/store/parameters.ts | 11 +++++-- app/src/titles.ts | 4 +-- app/src/types.ts | 1 + 6 files changed, 50 insertions(+), 12 deletions(-) diff --git a/app/src/components/settings/options.tsx b/app/src/components/settings/options.tsx index 3fbd3de..783d30f 100644 --- a/app/src/components/settings/options.tsx +++ b/app/src/components/settings/options.tsx @@ -2,9 +2,9 @@ import SettingsTab from "./tab"; import SettingsOption from "./option"; import { Button, Slider, Textarea } from "@mantine/core"; import { useCallback, useMemo } from "react"; -import { defaultSystemPrompt } from "../../openai"; +import { defaultSystemPrompt, defaultModel } from "../../openai"; import { useAppDispatch, useAppSelector } from "../../store"; -import { resetSystemPrompt, selectSystemPrompt, selectTemperature, setSystemPrompt, setTemperature } from "../../store/parameters"; +import { resetModel, setModel, selectModel, resetSystemPrompt, selectSystemPrompt, selectTemperature, setSystemPrompt, setTemperature } from "../../store/parameters"; import { selectSettingsOption } from "../../store/settings-ui"; import { FormattedMessage, useIntl } from "react-intl"; @@ -13,16 +13,22 @@ export default function GenerationOptionsTab(props: any) { const option = useAppSelector(selectSettingsOption); const initialSystemPrompt = useAppSelector(selectSystemPrompt); + const model = useAppSelector(selectModel); const temperature = useAppSelector(selectTemperature); const dispatch = useAppDispatch(); const onSystemPromptChange = useCallback((event: React.ChangeEvent<HTMLTextAreaElement>) => dispatch(setSystemPrompt(event.target.value)), [dispatch]); + const onModelChange = useCallback((event: React.ChangeEvent<HTMLTextAreaElement>) => dispatch(setModel(event.target.value)), [dispatch]); const onResetSystemPrompt = useCallback(() => dispatch(resetSystemPrompt()), [dispatch]); + const onResetModel = useCallback(() => dispatch(resetModel()), [dispatch]); const onTemperatureChange = useCallback((value: number) => dispatch(setTemperature(value)), [dispatch]); - const resettable = initialSystemPrompt + const resettableSystemPromopt = initialSystemPrompt && (initialSystemPrompt?.trim() !== defaultSystemPrompt.trim()); + const resettableModel = model + && (model?.trim() !== defaultModel.trim()); + const systemPromptOption = useMemo(() => ( <SettingsOption heading={intl.formatMessage({ defaultMessage: "System Prompt" })} focused={option === 'system-prompt'}> @@ -36,11 +42,30 @@ export default function GenerationOptionsTab(props: any) { <FormattedMessage defaultMessage="The System Prompt is shown to ChatGPT by the "System" before your first message. The <code>'{{ datetime }}'</code> tag is automatically replaced by the current date and time." values={{ code: chunk => <code style={{ whiteSpace: 'nowrap' }}>{chunk}</code> }} /> </p> - {resettable && <Button size="xs" compact variant="light" onClick={onResetSystemPrompt}> + {resettableSystemPromopt && <Button size="xs" compact variant="light" onClick={onResetSystemPrompt}> <FormattedMessage defaultMessage="Reset to default" /> </Button>} </SettingsOption> - ), [option, initialSystemPrompt, resettable, onSystemPromptChange, onResetSystemPrompt]); + ), [option, initialSystemPrompt, resettableSystemPromopt, onSystemPromptChange, onResetSystemPrompt]); + + const modelOption = useMemo(() => ( + <SettingsOption heading={intl.formatMessage({ defaultMessage: "Model" })} + focused={option === 'model'}> + <Textarea + value={model || defaultModel} + onChange={onModelChange} + minRows={1} + maxRows={1} + autosize /> + <p style={{ marginBottom: '0.7rem' }}> + <FormattedMessage defaultMessage="The model name. Known names are: gpt-3.5, gpt-3.5-turbo and gpt-4.0" + values={{ code: chunk => <code style={{ whiteSpace: 'nowrap' }}>{chunk}</code> }} /> + </p> + {resettableModel && <Button size="xs" compact variant="light" onClick={onResetModel}> + <FormattedMessage defaultMessage="Reset to default" /> + </Button>} + </SettingsOption> + ), [option, model, resettableModel, onModelChange, onResetModel]); const temperatureOption = useMemo(() => ( <SettingsOption heading={intl.formatMessage({ defaultMessage: "Temperature: {temperature, number, ::.0}", }, { temperature })} @@ -55,9 +80,10 @@ export default function GenerationOptionsTab(props: any) { const elem = useMemo(() => ( <SettingsTab name="options"> {systemPromptOption} + {modelOption} {temperatureOption} </SettingsTab> - ), [systemPromptOption, temperatureOption]); + ), [systemPromptOption, modelOption, temperatureOption]); return elem; } \ No newline at end of file diff --git a/app/src/openai.ts b/app/src/openai.ts index 6906b44..0993f4d 100644 --- a/app/src/openai.ts +++ b/app/src/openai.ts @@ -9,6 +9,8 @@ Knowledge cutoff: 2021-09 Current date and time: {{ datetime }} `.trim(); +export const defaultModel = 'gpt-3.5-turbo'; + export interface OpenAIResponseChunk { id?: string; done: boolean; @@ -53,7 +55,7 @@ export async function createChatCompletion(messages: OpenAIMessage[], parameters const openai = new OpenAIApi(configuration); const response = await openai.createChatCompletion({ - model: 'gpt-3.5-turbo', + model: parameters.model, temperature: parameters.temperature, messages: messages as any, }); @@ -93,7 +95,7 @@ export async function createStreamingChatCompletion(messages: OpenAIMessage[], p 'Content-Type': 'application/json', }, payload: JSON.stringify({ - "model": "gpt-3.5-turbo", + "model": parameters.model, "messages": messagesToSend, "temperature": parameters.temperature, "stream": true, diff --git a/app/src/parameters.ts b/app/src/parameters.ts index 0d52273..20d36d2 100644 --- a/app/src/parameters.ts +++ b/app/src/parameters.ts @@ -1,7 +1,9 @@ +import { defaultModel } from "./openai"; import { Parameters } from "./types"; export const defaultParameters: Parameters = { temperature: 0.5, + model: defaultModel }; export function loadParameters(id: string | null | undefined = null): Parameters { diff --git a/app/src/store/parameters.ts b/app/src/store/parameters.ts index 97eec9a..deb82d8 100644 --- a/app/src/store/parameters.ts +++ b/app/src/store/parameters.ts @@ -1,6 +1,6 @@ import { createSlice, PayloadAction } from '@reduxjs/toolkit' import type { RootState } from '.'; -import { defaultSystemPrompt } from '../openai'; +import { defaultSystemPrompt, defaultModel } from '../openai'; import { defaultParameters } from '../parameters'; import { Parameters } from '../types'; @@ -16,15 +16,22 @@ export const parametersSlice = createSlice({ resetSystemPrompt: (state) => { state.initialSystemPrompt = defaultSystemPrompt; }, + setModel: (state, action: PayloadAction<string>) => { + state.model = action.payload; + }, + resetModel: (state) => { + state.model = defaultModel; + }, setTemperature: (state, action: PayloadAction<number>) => { state.temperature = action.payload; }, }, }) -export const { setSystemPrompt, setTemperature, resetSystemPrompt } = parametersSlice.actions; +export const { setSystemPrompt, setModel, setTemperature, resetSystemPrompt, resetModel } = parametersSlice.actions; export const selectSystemPrompt = (state: RootState) => state.parameters.initialSystemPrompt; +export const selectModel = (state: RootState) => state.parameters.model; export const selectTemperature = (state: RootState) => state.parameters.temperature; export default parametersSlice.reducer; \ No newline at end of file diff --git a/app/src/titles.ts b/app/src/titles.ts index e29b3b8..0f7143e 100644 --- a/app/src/titles.ts +++ b/app/src/titles.ts @@ -1,4 +1,4 @@ -import { createChatCompletion } from "./openai"; +import { createChatCompletion, defaultModel } from "./openai"; import { OpenAIMessage, Chat } from "./types"; const systemPrompt = ` @@ -38,7 +38,7 @@ export async function createTitle(chat: Chat, apiKey: string | undefined | null, }, ]; - let title = await createChatCompletion(messages as any, { temperature: 0.5, apiKey }); + let title = await createChatCompletion(messages as any, { temperature: 0.5, model: defaultModel, apiKey }); if (!title?.length) { if (firstUserMessage.content.trim().length > 2 && firstUserMessage.content.trim().length < 250) { diff --git a/app/src/types.ts b/app/src/types.ts index 8fc8deb..8f4945a 100644 --- a/app/src/types.ts +++ b/app/src/types.ts @@ -4,6 +4,7 @@ export interface Parameters { temperature: number; apiKey?: string; initialSystemPrompt?: string; + model: string; } export interface Message { From 73e67035b9b5dc27170a1d4e4383fe35430e0b6f Mon Sep 17 00:00:00 2001 From: Philipp Schmid <schmidp@schmidp.com> Date: Wed, 15 Mar 2023 21:22:48 +0100 Subject: [PATCH 2/2] link to model names --- app/src/components/settings/options.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/src/components/settings/options.tsx b/app/src/components/settings/options.tsx index 783d30f..2bb4c8b 100644 --- a/app/src/components/settings/options.tsx +++ b/app/src/components/settings/options.tsx @@ -58,7 +58,7 @@ export default function GenerationOptionsTab(props: any) { maxRows={1} autosize /> <p style={{ marginBottom: '0.7rem' }}> - <FormattedMessage defaultMessage="The model name. Known names are: gpt-3.5, gpt-3.5-turbo and gpt-4.0" + <FormattedMessage defaultMessage="The model name. You can find model names here: https://platform.openai.com/docs/models/overview" values={{ code: chunk => <code style={{ whiteSpace: 'nowrap' }}>{chunk}</code> }} /> </p> {resettableModel && <Button size="xs" compact variant="light" onClick={onResetModel}>