llama-gpt/utils/app/clean.ts

60 lines
1.6 KiB
TypeScript

import { Conversation, OpenAIModelID, OpenAIModels } from "@/types";
import { DEFAULT_SYSTEM_PROMPT } from "./const";
export const cleanSelectedConversation = (conversation: Conversation) => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)
let updatedConversation = conversation;
// check for model on each conversation
if (!updatedConversation.model) {
updatedConversation = {
...updatedConversation,
model: updatedConversation.model || OpenAIModels[OpenAIModelID.GPT_3_5]
};
}
// check for system prompt on each conversation
if (!updatedConversation.prompt) {
updatedConversation = {
...updatedConversation,
prompt: updatedConversation.prompt || DEFAULT_SYSTEM_PROMPT
};
}
if (!updatedConversation.folderId) {
updatedConversation = {
...updatedConversation,
folderId: updatedConversation.folderId || 0
};
}
return updatedConversation;
};
export const cleanConversationHistory = (history: Conversation[]) => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)
let updatedHistory = [...history];
updatedHistory.forEach((conversation) => {
if (!conversation.model) {
conversation.model = OpenAIModels[OpenAIModelID.GPT_3_5];
}
if (!conversation.prompt) {
conversation.prompt = DEFAULT_SYSTEM_PROMPT;
}
if (!conversation.folderId) {
conversation.folderId = 0;
}
});
return updatedHistory;
};