Merge pull request #33 from openresearch/feature/specify_model
Allow to specify model as free textmain
commit
49770192ee
|
@ -2,9 +2,9 @@ import SettingsTab from "./tab";
|
|||
import SettingsOption from "./option";
|
||||
import { Button, Slider, Textarea } from "@mantine/core";
|
||||
import { useCallback, useMemo } from "react";
|
||||
import { defaultSystemPrompt } from "../../openai";
|
||||
import { defaultSystemPrompt, defaultModel } from "../../openai";
|
||||
import { useAppDispatch, useAppSelector } from "../../store";
|
||||
import { resetSystemPrompt, selectSystemPrompt, selectTemperature, setSystemPrompt, setTemperature } from "../../store/parameters";
|
||||
import { resetModel, setModel, selectModel, resetSystemPrompt, selectSystemPrompt, selectTemperature, setSystemPrompt, setTemperature } from "../../store/parameters";
|
||||
import { selectSettingsOption } from "../../store/settings-ui";
|
||||
import { FormattedMessage, useIntl } from "react-intl";
|
||||
|
||||
|
@ -13,16 +13,22 @@ export default function GenerationOptionsTab(props: any) {
|
|||
|
||||
const option = useAppSelector(selectSettingsOption);
|
||||
const initialSystemPrompt = useAppSelector(selectSystemPrompt);
|
||||
const model = useAppSelector(selectModel);
|
||||
const temperature = useAppSelector(selectTemperature);
|
||||
|
||||
const dispatch = useAppDispatch();
|
||||
const onSystemPromptChange = useCallback((event: React.ChangeEvent<HTMLTextAreaElement>) => dispatch(setSystemPrompt(event.target.value)), [dispatch]);
|
||||
const onModelChange = useCallback((event: React.ChangeEvent<HTMLTextAreaElement>) => dispatch(setModel(event.target.value)), [dispatch]);
|
||||
const onResetSystemPrompt = useCallback(() => dispatch(resetSystemPrompt()), [dispatch]);
|
||||
const onResetModel = useCallback(() => dispatch(resetModel()), [dispatch]);
|
||||
const onTemperatureChange = useCallback((value: number) => dispatch(setTemperature(value)), [dispatch]);
|
||||
|
||||
const resettable = initialSystemPrompt
|
||||
const resettableSystemPromopt = initialSystemPrompt
|
||||
&& (initialSystemPrompt?.trim() !== defaultSystemPrompt.trim());
|
||||
|
||||
const resettableModel = model
|
||||
&& (model?.trim() !== defaultModel.trim());
|
||||
|
||||
const systemPromptOption = useMemo(() => (
|
||||
<SettingsOption heading={intl.formatMessage({ defaultMessage: "System Prompt" })}
|
||||
focused={option === 'system-prompt'}>
|
||||
|
@ -36,11 +42,30 @@ export default function GenerationOptionsTab(props: any) {
|
|||
<FormattedMessage defaultMessage="The System Prompt is shown to ChatGPT by the "System" before your first message. The <code>'{{ datetime }}'</code> tag is automatically replaced by the current date and time."
|
||||
values={{ code: chunk => <code style={{ whiteSpace: 'nowrap' }}>{chunk}</code> }} />
|
||||
</p>
|
||||
{resettable && <Button size="xs" compact variant="light" onClick={onResetSystemPrompt}>
|
||||
{resettableSystemPromopt && <Button size="xs" compact variant="light" onClick={onResetSystemPrompt}>
|
||||
<FormattedMessage defaultMessage="Reset to default" />
|
||||
</Button>}
|
||||
</SettingsOption>
|
||||
), [option, initialSystemPrompt, resettable, onSystemPromptChange, onResetSystemPrompt]);
|
||||
), [option, initialSystemPrompt, resettableSystemPromopt, onSystemPromptChange, onResetSystemPrompt]);
|
||||
|
||||
const modelOption = useMemo(() => (
|
||||
<SettingsOption heading={intl.formatMessage({ defaultMessage: "Model" })}
|
||||
focused={option === 'model'}>
|
||||
<Textarea
|
||||
value={model || defaultModel}
|
||||
onChange={onModelChange}
|
||||
minRows={1}
|
||||
maxRows={1}
|
||||
autosize />
|
||||
<p style={{ marginBottom: '0.7rem' }}>
|
||||
<FormattedMessage defaultMessage="The model name. You can find model names here: https://platform.openai.com/docs/models/overview"
|
||||
values={{ code: chunk => <code style={{ whiteSpace: 'nowrap' }}>{chunk}</code> }} />
|
||||
</p>
|
||||
{resettableModel && <Button size="xs" compact variant="light" onClick={onResetModel}>
|
||||
<FormattedMessage defaultMessage="Reset to default" />
|
||||
</Button>}
|
||||
</SettingsOption>
|
||||
), [option, model, resettableModel, onModelChange, onResetModel]);
|
||||
|
||||
const temperatureOption = useMemo(() => (
|
||||
<SettingsOption heading={intl.formatMessage({ defaultMessage: "Temperature: {temperature, number, ::.0}", }, { temperature })}
|
||||
|
@ -55,9 +80,10 @@ export default function GenerationOptionsTab(props: any) {
|
|||
const elem = useMemo(() => (
|
||||
<SettingsTab name="options">
|
||||
{systemPromptOption}
|
||||
{modelOption}
|
||||
{temperatureOption}
|
||||
</SettingsTab>
|
||||
), [systemPromptOption, temperatureOption]);
|
||||
), [systemPromptOption, modelOption, temperatureOption]);
|
||||
|
||||
return elem;
|
||||
}
|
|
@ -9,6 +9,8 @@ Knowledge cutoff: 2021-09
|
|||
Current date and time: {{ datetime }}
|
||||
`.trim();
|
||||
|
||||
export const defaultModel = 'gpt-3.5-turbo';
|
||||
|
||||
export interface OpenAIResponseChunk {
|
||||
id?: string;
|
||||
done: boolean;
|
||||
|
@ -53,7 +55,7 @@ export async function createChatCompletion(messages: OpenAIMessage[], parameters
|
|||
const openai = new OpenAIApi(configuration);
|
||||
|
||||
const response = await openai.createChatCompletion({
|
||||
model: 'gpt-3.5-turbo',
|
||||
model: parameters.model,
|
||||
temperature: parameters.temperature,
|
||||
messages: messages as any,
|
||||
});
|
||||
|
@ -93,7 +95,7 @@ export async function createStreamingChatCompletion(messages: OpenAIMessage[], p
|
|||
'Content-Type': 'application/json',
|
||||
},
|
||||
payload: JSON.stringify({
|
||||
"model": "gpt-3.5-turbo",
|
||||
"model": parameters.model,
|
||||
"messages": messagesToSend,
|
||||
"temperature": parameters.temperature,
|
||||
"stream": true,
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { defaultModel } from "./openai";
|
||||
import { Parameters } from "./types";
|
||||
|
||||
export const defaultParameters: Parameters = {
|
||||
temperature: 0.5,
|
||||
model: defaultModel
|
||||
};
|
||||
|
||||
export function loadParameters(id: string | null | undefined = null): Parameters {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { createSlice, PayloadAction } from '@reduxjs/toolkit'
|
||||
import type { RootState } from '.';
|
||||
import { defaultSystemPrompt } from '../openai';
|
||||
import { defaultSystemPrompt, defaultModel } from '../openai';
|
||||
import { defaultParameters } from '../parameters';
|
||||
import { Parameters } from '../types';
|
||||
|
||||
|
@ -16,15 +16,22 @@ export const parametersSlice = createSlice({
|
|||
resetSystemPrompt: (state) => {
|
||||
state.initialSystemPrompt = defaultSystemPrompt;
|
||||
},
|
||||
setModel: (state, action: PayloadAction<string>) => {
|
||||
state.model = action.payload;
|
||||
},
|
||||
resetModel: (state) => {
|
||||
state.model = defaultModel;
|
||||
},
|
||||
setTemperature: (state, action: PayloadAction<number>) => {
|
||||
state.temperature = action.payload;
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
export const { setSystemPrompt, setTemperature, resetSystemPrompt } = parametersSlice.actions;
|
||||
export const { setSystemPrompt, setModel, setTemperature, resetSystemPrompt, resetModel } = parametersSlice.actions;
|
||||
|
||||
export const selectSystemPrompt = (state: RootState) => state.parameters.initialSystemPrompt;
|
||||
export const selectModel = (state: RootState) => state.parameters.model;
|
||||
export const selectTemperature = (state: RootState) => state.parameters.temperature;
|
||||
|
||||
export default parametersSlice.reducer;
|
|
@ -1,4 +1,4 @@
|
|||
import { createChatCompletion } from "./openai";
|
||||
import { createChatCompletion, defaultModel } from "./openai";
|
||||
import { OpenAIMessage, Chat } from "./types";
|
||||
|
||||
const systemPrompt = `
|
||||
|
@ -38,7 +38,7 @@ export async function createTitle(chat: Chat, apiKey: string | undefined | null,
|
|||
},
|
||||
];
|
||||
|
||||
let title = await createChatCompletion(messages as any, { temperature: 0.5, apiKey });
|
||||
let title = await createChatCompletion(messages as any, { temperature: 0.5, model: defaultModel, apiKey });
|
||||
|
||||
if (!title?.length) {
|
||||
if (firstUserMessage.content.trim().length > 2 && firstUserMessage.content.trim().length < 250) {
|
||||
|
|
|
@ -4,6 +4,7 @@ export interface Parameters {
|
|||
temperature: number;
|
||||
apiKey?: string;
|
||||
initialSystemPrompt?: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
|
|
Loading…
Reference in New Issue