aboutsummaryrefslogtreecommitdiff
path: root/src/client/apis/gpt/GPT.ts
diff options
context:
space:
mode:
authorbobzel <zzzman@gmail.com>2024-05-17 14:55:36 -0400
committerbobzel <zzzman@gmail.com>2024-05-17 14:55:36 -0400
commit0b451af28e5aef6b749da61e8a9fcd0a840789ac (patch)
treebdee4e28ee4715b69299a8da1b615c70b6adc445 /src/client/apis/gpt/GPT.ts
parent8c1b420a143e4b72ec551277887c211ca6ca003b (diff)
parent38a382a03675d6a50ec7de75f05025efd093f570 (diff)
merged with new master
Diffstat (limited to 'src/client/apis/gpt/GPT.ts')
-rw-r--r--src/client/apis/gpt/GPT.ts46
1 files changed, 30 insertions, 16 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts
index 3370f19fc..078ac3e55 100644
--- a/src/client/apis/gpt/GPT.ts
+++ b/src/client/apis/gpt/GPT.ts
@@ -5,7 +5,7 @@ enum GPTCallType {
SUMMARY = 'summary',
COMPLETION = 'completion',
EDIT = 'edit',
- MERMAID='mermaid',
+ MERMAID = 'mermaid',
DATA = 'data',
}
@@ -21,32 +21,46 @@ type GPTCallOpts = {
*/
const callTypeMap: { [type: string]: GPTCallOpts } = {
- summary: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' },
- edit: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' },
- completion: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: '' },
- mermaid:{model:'gpt-4-turbo',maxTokens:2048,temp:0,prompt:"(Heres an example of changing color of a pie chart to help you pie title Example \"Red\": 20 \"Blue\": 50 \"Green\": 30 %%{init: {'theme': 'base', 'themeVariables': {'pie1': '#0000FF', 'pie2': '#00FF00', 'pie3': '#FF0000'}}}%% keep in mind that pie1 is the highest since its sorted in descending order. Heres an example of a mindmap: mindmap root((mindmap)) Origins Long history ::icon(fa fa-book) Popularisation British popular psychology author Tony Buzan Research On effectivness<br/>and features On Automatic creation Uses Creative techniques Strategic planning Argument mapping Tools Pen and paper Mermaid. "},
- data: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: "You are a helpful resarch assistant. Analyze the user's data to find meaningful patterns and/or correlation. Please keep your response short and to the point." },
+ // newest model: gpt-4
+ summary: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: 'Summarize the text given in simpler terms.' },
+ edit: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: 'Reword the text.' },
+ completion: { model: 'gpt-3.5-turbo', maxTokens: 256, temp: 0.5, prompt: "You are a helpful assistant. Answer the user's prompt." },
+ mermaid: {
+ model: 'gpt-4-turbo',
+ maxTokens: 2048,
+ temp: 0,
+ prompt: "(Heres an example of changing color of a pie chart to help you pie title Example \"Red\": 20 \"Blue\": 50 \"Green\": 30 %%{init: {'theme': 'base', 'themeVariables': {'pie1': '#0000FF', 'pie2': '#00FF00', 'pie3': '#FF0000'}}}%% keep in mind that pie1 is the highest since its sorted in descending order. Heres an example of a mindmap: mindmap root((mindmap)) Origins Long history ::icon(fa fa-book) Popularisation British popular psychology author Tony Buzan Research On effectivness<br/>and features On Automatic creation Uses Creative techniques Strategic planning Argument mapping Tools Pen and paper Mermaid. ",
+ },
+ data: {
+ model: 'gpt-3.5-turbo',
+ maxTokens: 256,
+ temp: 0.5,
+ prompt: "You are a helpful resarch assistant. Analyze the user's data to find meaningful patterns and/or correlation. Please only return a JSON with a correlation column 1 propert, a correlation column 2 property, and an analysis property. ",
+ },
};
-
-/**`
+let lastCall = '';
+let lastResp = '';
+/**
* Calls the OpenAI API.
*
* @param inputText Text to process
* @returns AI Output
*/
-const gptAPICall = async (inputText: string, callType: GPTCallType, prompt?: any) => {
- if (callType === GPTCallType.SUMMARY) inputText += '.';
+const gptAPICall = async (inputTextIn: string, callType: GPTCallType, prompt?: any) => {
+ const inputText = callType === GPTCallType.SUMMARY ? inputTextIn + '.' : inputTextIn;
const opts: GPTCallOpts = callTypeMap[callType];
+ if (lastCall === inputText) return lastResp;
try {
const configuration: ClientOptions = {
- apiKey: "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",
+ apiKey: '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',
dangerouslyAllowBrowser: true,
};
+ lastCall = inputText;
const openai = new OpenAI(configuration);
- let usePrompt = prompt ? opts.prompt + prompt : opts.prompt;
- let messages: ChatCompletionMessageParam[] = [
+ const usePrompt = prompt ? opts.prompt + prompt : opts.prompt;
+ const messages: ChatCompletionMessageParam[] = [
{ role: 'system', content: usePrompt },
{ role: 'user', content: inputText },
];
@@ -56,8 +70,8 @@ const gptAPICall = async (inputText: string, callType: GPTCallType, prompt?: any
messages: messages,
temperature: opts.temp,
});
- const content = response.choices[0].message.content;
- return content;
+ lastResp = response.choices[0].message.content ?? '';
+ return lastResp;
} catch (err) {
console.log(err);
return 'Error connecting with API.';
@@ -80,8 +94,8 @@ const gptImageCall = async (prompt: string, n?: number) => {
return response.data.map(data => data.url);
} catch (err) {
console.error(err);
- return;
}
+ return undefined;
};
export { gptAPICall, gptImageCall, GPTCallType };