aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/ChatBox/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/ChatBox/tools')
-rw-r--r--src/client/views/nodes/ChatBox/tools/CalculateTool.ts1
-rw-r--r--src/client/views/nodes/ChatBox/tools/CreateCollectionTool.ts1
-rw-r--r--src/client/views/nodes/ChatBox/tools/DataAnalysisTool.ts36
-rw-r--r--src/client/views/nodes/ChatBox/tools/SearchTool.ts63
-rw-r--r--src/client/views/nodes/ChatBox/tools/WebsiteInfoScraperTool.ts52
5 files changed, 95 insertions, 58 deletions
diff --git a/src/client/views/nodes/ChatBox/tools/CalculateTool.ts b/src/client/views/nodes/ChatBox/tools/CalculateTool.ts
index 818332c44..74b7ca27b 100644
--- a/src/client/views/nodes/ChatBox/tools/CalculateTool.ts
+++ b/src/client/views/nodes/ChatBox/tools/CalculateTool.ts
@@ -10,6 +10,7 @@ export class CalculateTool extends BaseTool<{ expression: string }> {
type: 'string',
description: 'The mathematical expression to evaluate',
required: 'true',
+ max_inputs: '1',
},
},
'Provide a mathematical expression to calculate that would work with JavaScript eval().',
diff --git a/src/client/views/nodes/ChatBox/tools/CreateCollectionTool.ts b/src/client/views/nodes/ChatBox/tools/CreateCollectionTool.ts
index 26ac0d7cc..573428179 100644
--- a/src/client/views/nodes/ChatBox/tools/CreateCollectionTool.ts
+++ b/src/client/views/nodes/ChatBox/tools/CreateCollectionTool.ts
@@ -16,6 +16,7 @@ export class GetDocsContentTool extends BaseTool<{ title: string; document_ids:
type: 'string',
description: 'the title of the collection that you will be making',
required: 'true',
+ max_inputs: '1',
},
},
'Provide a mathematical expression to calculate that would work with JavaScript eval().',
diff --git a/src/client/views/nodes/ChatBox/tools/DataAnalysisTool.ts b/src/client/views/nodes/ChatBox/tools/DataAnalysisTool.ts
index b45733639..a12ee46e5 100644
--- a/src/client/views/nodes/ChatBox/tools/DataAnalysisTool.ts
+++ b/src/client/views/nodes/ChatBox/tools/DataAnalysisTool.ts
@@ -1,21 +1,22 @@
import { BaseTool } from './BaseTool';
-export class DataAnalysisTool extends BaseTool<{ csv_file_name: string }> {
+export class DataAnalysisTool extends BaseTool<{ csv_file_name: string | string[] }> {
private csv_files_function: () => { filename: string; id: string; text: string }[];
constructor(csv_files: () => { filename: string; id: string; text: string }[]) {
super(
'dataAnalysis',
- 'Analyzes, and provides insights, from a CSV file',
+ 'Analyzes, and provides insights, from one or more CSV files',
{
csv_file_name: {
type: 'string',
- description: 'Name of the CSV file to analyze',
+ description: 'Name(s) of the CSV file(s) to analyze',
required: 'true',
+ max_inputs: '3',
},
},
- 'Provide the name of the CSV file to analyze based on the user query and whichever available CSV file may be relevant.',
- 'Provides the full CSV file text for your analysis based on the user query and the available CSV file. '
+ 'Provide the name(s) of up to 3 CSV files to analyze based on the user query and whichever available CSV files may be relevant.',
+ 'Provides the full CSV file text for your analysis based on the user query and the available CSV file(s). '
);
this.csv_files_function = csv_files;
}
@@ -32,8 +33,27 @@ export class DataAnalysisTool extends BaseTool<{ csv_file_name: string }> {
return file?.id;
}
- async execute(args: { csv_file_name: string }): Promise<any> {
- console.log(this.csv_files_function());
- return [{ type: 'text', text: `<chunk chunk_id=${this.getFileID(args.csv_file_name)} chunk_type=csv}>` + this.getFileContent(args.csv_file_name) + '</chunk>' }];
+ async execute(args: { csv_file_name: string | string[] }): Promise<any> {
+ const filenames = Array.isArray(args.csv_file_name) ? args.csv_file_name : [args.csv_file_name];
+ const results = [];
+
+ for (const filename of filenames) {
+ const fileContent = this.getFileContent(filename);
+ const fileID = this.getFileID(filename);
+
+ if (fileContent && fileID) {
+ results.push({
+ type: 'text',
+ text: `<chunk chunk_id=${fileID} chunk_type=csv>${fileContent}</chunk>`,
+ });
+ } else {
+ results.push({
+ type: 'text',
+ text: `File not found: ${filename}`,
+ });
+ }
+ }
+
+ return results;
}
}
diff --git a/src/client/views/nodes/ChatBox/tools/SearchTool.ts b/src/client/views/nodes/ChatBox/tools/SearchTool.ts
index 6523fd11c..b926cbadc 100644
--- a/src/client/views/nodes/ChatBox/tools/SearchTool.ts
+++ b/src/client/views/nodes/ChatBox/tools/SearchTool.ts
@@ -1,47 +1,54 @@
+import { max } from 'lodash';
import { Networking } from '../../../../Network';
import { BaseTool } from './BaseTool';
import { v4 as uuidv4 } from 'uuid';
-export class SearchTool extends BaseTool<{ query: string }> {
+export class SearchTool extends BaseTool<{ query: string | string[] }> {
private _addLinkedUrlDoc: (url: string, id: string) => void;
-
- constructor(addLinkedUrlDoc: (url: string, id: string) => void) {
+ private _max_results: number;
+ constructor(addLinkedUrlDoc: (url: string, id: string) => void, max_results: number = 5) {
super(
'searchTool',
- 'Search the web to find a wide range of websites related to a query',
+ 'Search the web to find a wide range of websites related to a query or multiple queries',
{
query: {
type: 'string',
- description: 'The search query to use for finding websites',
- required: true,
+ description: 'The search query or queries to use for finding websites',
+ required: 'true',
+ max_inputs: '3',
},
},
- 'Provide a search query to find a broad range of websites. This tool is intended to help you identify relevant websites, but not to be used for providing the final answer. Use this information to determine which specific website to investigate further.',
- 'Returns a list of websites and their overviews based on the search query, helping to identify which website might contain the most relevant information.'
+ 'Provide up to 3 search queries to find a broad range of websites. This tool is intended to help you identify relevant websites, but not to be used for providing the final answer. Use this information to determine which specific website to investigate further.',
+ 'Returns a list of websites and their overviews based on the search queries, helping to identify which websites might contain relevant information.'
);
this._addLinkedUrlDoc = addLinkedUrlDoc;
+ this._max_results = max_results;
}
- async execute(args: { query: string }): Promise<any> {
- try {
- const { results } = await Networking.PostToServer('/getWebSearchResults', { query: args.query });
- console.log(results);
- const data: { type: string; text: string }[] = results.map((result: { url: string; snippet: string }) => {
- console.log;
- const id = uuidv4();
- //this._addLinkedUrlDoc(result.url, id); //not needed right now because it shouldn't use this information to ground responses and should scrape afterwards
- return {
- type: 'text',
- text: `<chunk chunk_id="${id}" chunk_type="text">
- <url>${result.url}</url>
- <overview>${result.snippet}</overview>
- </chunk>`,
- };
- });
- return data;
- } catch (error) {
- console.log(error);
- return [{ type: 'text', text: 'An error occurred while performing the web search.' }];
+ async execute(args: { query: string | string[] }): Promise<any> {
+ const queries = Array.isArray(args.query) ? args.query : [args.query];
+ const allResults = [];
+
+ for (const query of queries) {
+ try {
+ const { results } = await Networking.PostToServer('/getWebSearchResults', { query, max_results: this._max_results });
+ const data: { type: string; text: string }[] = results.map((result: { url: string; snippet: string }) => {
+ const id = uuidv4();
+ return {
+ type: 'text',
+ text: `<chunk chunk_id="${id}" chunk_type="text">
+ <url>${result.url}</url>
+ <overview>${result.snippet}</overview>
+ </chunk>`,
+ };
+ });
+ allResults.push(...data);
+ } catch (error) {
+ console.log(error);
+ allResults.push({ type: 'text', text: `An error occurred while performing the web search for query: ${query}` });
+ }
}
+
+ return allResults;
}
}
diff --git a/src/client/views/nodes/ChatBox/tools/WebsiteInfoScraperTool.ts b/src/client/views/nodes/ChatBox/tools/WebsiteInfoScraperTool.ts
index c59afefbd..739329bea 100644
--- a/src/client/views/nodes/ChatBox/tools/WebsiteInfoScraperTool.ts
+++ b/src/client/views/nodes/ChatBox/tools/WebsiteInfoScraperTool.ts
@@ -2,43 +2,51 @@ import { Networking } from '../../../../Network';
import { BaseTool } from './BaseTool';
import { v4 as uuidv4 } from 'uuid';
-export class WebsiteInfoScraperTool extends BaseTool<{ url: string }> {
+export class WebsiteInfoScraperTool extends BaseTool<{ url: string | string[] }> {
private _addLinkedUrlDoc: (url: string, id: string) => void;
constructor(addLinkedUrlDoc: (url: string, id: string) => void) {
super(
'websiteInfoScraper',
- 'Scrape detailed information from a specific website identified as the most relevant',
+ 'Scrape detailed information from specific websites identified as relevant to the user query',
{
url: {
type: 'string',
- description: 'The URL of the website to scrape',
- required: true,
+ description: 'The URL(s) of the website(s) to scrape',
+ required: 'true',
+ max_inputs: '3',
},
},
- 'Provide the URL of the website that you have identified as the most relevant from the previous search. This tool will scrape and process detailed information from that specific website. It will also create a document from the scraped content for future reference.',
- 'Returns the full HTML content from the provided URL and creates a document from the content for further analysis.'
+ 'Provide up to 3 URLs of websites that you have identified as the most relevant from the previous search. This tool will provide screenshots of those specific websites. It will also create a document from the scraped content for future reference. When providing a response to the user, ideally reference as many of the websites as possible in order to provide a well grounded result.',
+ 'Returns the full content of the webpages as images for analysis.'
);
this._addLinkedUrlDoc = addLinkedUrlDoc;
}
- async execute(args: { url: string }): Promise<any> {
- try {
- const { website_image_base64 } = await Networking.PostToServer('/scrapeWebsite', { url: args.url });
- const id = uuidv4();
- this._addLinkedUrlDoc(args.url, id);
- return [
- { type: 'text', text: `<chunk chunk_id=${id} chunk_type=url> ` },
- {
- type: 'image_url',
- image_url: {
- url: `data:image/jpeg;base64,${website_image_base64}`,
+ async execute(args: { url: string | string[] }): Promise<any> {
+ const urls = Array.isArray(args.url) ? args.url : [args.url];
+ const results = [];
+
+ for (const url of urls) {
+ try {
+ const { website_image_base64 } = await Networking.PostToServer('/scrapeWebsite', { url });
+ const id = uuidv4();
+ this._addLinkedUrlDoc(url, id);
+ results.push(
+ { type: 'text', text: `<chunk chunk_id=${id} chunk_type=url> ` },
+ {
+ type: 'image_url',
+ image_url: {
+ url: `data:image/jpeg;base64,${website_image_base64}`,
+ },
},
- },
- { type: 'text', text: `</chunk>\n` },
- ];
- } catch (error) {
- return [{ type: 'text', text: 'An error occurred while scraping the website.' }];
+ { type: 'text', text: `</chunk>\n` }
+ );
+ } catch (error) {
+ results.push({ type: 'text', text: `An error occurred while scraping the website: ${url}` });
+ }
}
+
+ return results;
}
}