diff options
Diffstat (limited to 'src/client/apis')
| -rw-r--r-- | src/client/apis/gpt/GPT.ts | 17 |
1 files changed, 9 insertions, 8 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts index 30194f9f8..3370f19fc 100644 --- a/src/client/apis/gpt/GPT.ts +++ b/src/client/apis/gpt/GPT.ts @@ -5,6 +5,7 @@ enum GPTCallType { SUMMARY = 'summary', COMPLETION = 'completion', EDIT = 'edit', + MERMAID='mermaid', DATA = 'data', } @@ -20,14 +21,15 @@ type GPTCallOpts = { */ const callTypeMap: { [type: string]: GPTCallOpts } = { - // newest model: gpt-4 - summary: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: 'Summarize the text given in simpler terms.' }, - edit: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: 'Reword the text.' }, - completion: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: "You are a helpful assistant. Answer the user's prompt." }, + summary: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' }, + edit: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' }, + completion: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: '' }, + mermaid:{model:'gpt-4-turbo',maxTokens:2048,temp:0,prompt:"(Heres an example of changing color of a pie chart to help you pie title Example \"Red\": 20 \"Blue\": 50 \"Green\": 30 %%{init: {'theme': 'base', 'themeVariables': {'pie1': '#0000FF', 'pie2': '#00FF00', 'pie3': '#FF0000'}}}%% keep in mind that pie1 is the highest since its sorted in descending order. Heres an example of a mindmap: mindmap root((mindmap)) Origins Long history ::icon(fa fa-book) Popularisation British popular psychology author Tony Buzan Research On effectivness<br/>and features On Automatic creation Uses Creative techniques Strategic planning Argument mapping Tools Pen and paper Mermaid. "}, data: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: "You are a helpful resarch assistant. Analyze the user's data to find meaningful patterns and/or correlation. Please keep your response short and to the point." }, }; -/** + +/**` * Calls the OpenAI API. * * @param inputText Text to process @@ -38,7 +40,7 @@ const gptAPICall = async (inputText: string, callType: GPTCallType, prompt?: any const opts: GPTCallOpts = callTypeMap[callType]; try { const configuration: ClientOptions = { - apiKey: process.env.OPENAI_KEY, + apiKey: "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", dangerouslyAllowBrowser: true, }; const openai = new OpenAI(configuration); @@ -51,9 +53,8 @@ const gptAPICall = async (inputText: string, callType: GPTCallType, prompt?: any const response = await openai.chat.completions.create({ model: opts.model, - max_tokens: opts.maxTokens, + messages: messages, temperature: opts.temp, - messages, }); const content = response.choices[0].message.content; return content; |
