diff --git a/src/agents/agent.ts b/src/agents/agent.ts index fe6f8fe..b4bd074 100644 --- a/src/agents/agent.ts +++ b/src/agents/agent.ts @@ -18,10 +18,13 @@ declare global { interface AgentParameters { debug?: boolean input: any + uid?: string output?: any agent?: Agent error?: string apikeys?: APIKeys + state?: any + logWriter?: (p: { worker: AIWorker, state: any }) => void } } diff --git a/src/agents/agentfactory.ts b/src/agents/agentfactory.ts index 4b78533..98c5c90 100644 --- a/src/agents/agentfactory.ts +++ b/src/agents/agentfactory.ts @@ -2,13 +2,23 @@ import { supabase } from "./db" import { workerRegistry } from "./registry" import { buildWorker } from "./worker" import { ulid } from 'ulid' +import { ApiWorker } from "./workers/api" +import { z } from "zod" interface AgentConfig { edges?: object id?: number title?: string + description?: string + type?: AgentTypes workers?: object + team_id?: string + debuguuid?: string +} + +declare global { + type AgentTypes = "conversational" | "data" } @@ -25,6 +35,12 @@ export function createAgent(config: AgentConfig) { set title(v: string) { config.title = v }, edges, workers, + displayData: false, + + type: "data" as AgentTypes, + debuguuid: config.debuguuid || "", + description: "", + currentWorker: null as AIWorker, update() { @@ -43,6 +59,19 @@ export function createAgent(config: AgentConfig) { } return null }, + getEndAPIWorkers(p: AgentParameters) { + const apiWorkers: ApiWorker[] = [] + for (const key in workers) { + const w = workers[key] + if (w.config.type !== "api") continue + const outputHandles = Object.values(w.handles).filter((h) => h.direction === "output") + if (outputHandles.length === 0) continue + const cw = w.getConnectedWokers(p) + if (cw.length === 0) continue + apiWorkers.push(w as any) + } + return apiWorkers + }, getInputWorker() { for (const key in workers) { @@ -82,16 +111,41 @@ export function createAgent(config: AgentConfig) { p.input ||= {} p.output ||= {} p.apikeys ||= {} + p.state ||= {} + p.logWriter ||= () => { } p.agent = agent + + if (p.debug && agent.debuguuid && !p.uid) { + p.uid = agent.debuguuid + } + + const hasUid = p.uid && z.string().uuid().safeParse(p.uid).success + + if (hasUid) { + const dbState = await supabase.from("states").select("*").eq("id", p.uid).single() + if (dbState.data) p.state = dbState.data.state || {} + } + console.log(`Executing agent '${agent.title}'`) + const worker = agent.getResponseWorker() - if (!worker) return + const apiWorkers = agent.getEndAPIWorkers(p) + + if (!worker && !apiWorkers.length) return + try { await worker.execute(p) + for (const w of apiWorkers) { + await w.execute(p) + } + + if (hasUid) await supabase.from("states").upsert({ id: p.uid, state: p.state || {} }) + } catch (error) { console.error(error) p.error = error.toString() } + agent.currentWorker = null agent.update() }, @@ -107,7 +161,9 @@ export function createAgent(config: AgentConfig) { }, addWorker(w: WorkerConfig): AIWorker { - w.id ||= `NODE_${ulid()}` + const nameType = w.type?.toUpperCase() || "NODE" + + w.id ||= `${nameType}_${ulid()}` w.handles ||= {} const worker = buildWorker(w) workers[w.id] = worker @@ -127,6 +183,8 @@ export function configureAgent(data: AgentConfig) { const workers: WorkerConfig[] = (data.workers || []) as any const agent = createAgent(data) + agent.type = data.type || "data" + agent.description = data.description || "" for (const w of workers) { const { handles, ...rest } = w @@ -170,11 +228,15 @@ export function configureAgent(data: AgentConfig) { } -export async function saveAgent(agent: Agent) { +export async function saveAgent(agent: Agent, team_id?: string) { const agentData: AgentConfig = { title: agent.title, - edges: agent.edges + description: agent.description, + type: agent.type, + edges: agent.edges, + team_id, + debuguuid: agent.debuguuid || "", } const workerlist = [] @@ -212,9 +274,7 @@ export async function saveAgent(agent: Agent) { agent.id = data[0].id } - - console.log(workerlist) - + return agent } diff --git a/src/agents/registry.ts b/src/agents/registry.ts index f0d3164..aca2b44 100644 --- a/src/agents/registry.ts +++ b/src/agents/registry.ts @@ -9,9 +9,11 @@ import { schema } from "./workers/schema" import { search } from "./workers/search" import { text } from "./workers/text" import { api } from "./workers/api" -// import { stt } from "./workers/tts" -// import { background } from "./workers/background" -// import { condition } from "./workers/condition" +import { documentSelector } from "./workers/documentselector" +import { state } from "./workers/state" +import { stt } from "./workers/stt" +import { tts } from "./workers/tts" +import { translate } from "./workers/translate" type WorkerCategories = "io" | "generator" | "debug" | "tool" @@ -45,9 +47,14 @@ export const workerRegistry = { search, combine, + documentSelector, mock, display, api, + state, + stt, + tts, + translate, } satisfies { [index: string]: WorkerRegistryItem } diff --git a/src/agents/supabase.ts b/src/agents/supabase.ts index 13ffd6c..759d2b7 100644 --- a/src/agents/supabase.ts +++ b/src/agents/supabase.ts @@ -37,26 +37,81 @@ export type Database = { agents: { Row: { created_at: string + debuguuid: string | null + description: string | null edges: Json | null id: number + team_id: string | null title: string | null + type: Database["public"]["Enums"]["agent_types"] | null workers: Json | null } Insert: { created_at?: string + debuguuid?: string | null + description?: string | null edges?: Json | null id?: number + team_id?: string | null title?: string | null + type?: Database["public"]["Enums"]["agent_types"] | null workers?: Json | null } Update: { created_at?: string + debuguuid?: string | null + description?: string | null edges?: Json | null id?: number + team_id?: string | null title?: string | null + type?: Database["public"]["Enums"]["agent_types"] | null workers?: Json | null } - Relationships: [] + Relationships: [ + { + foreignKeyName: "agents_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] + } + api_keys: { + Row: { + created_at: string + description: string | null + id: string + key: string | null + team_id: string | null + type: string + } + Insert: { + created_at?: string + description?: string | null + id?: string + key?: string | null + team_id?: string | null + type: string + } + Update: { + created_at?: string + description?: string | null + id?: string + key?: string | null + team_id?: string | null + type?: string + } + Relationships: [ + { + foreignKeyName: "api_keys_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] } bot_conversations: { Row: { @@ -65,6 +120,7 @@ export type Database = { created_at: string | null id: string session_id: string | null + team_id: string | null user_message: string | null } Insert: { @@ -73,6 +129,7 @@ export type Database = { created_at?: string | null id?: string session_id?: string | null + team_id?: string | null user_message?: string | null } Update: { @@ -81,6 +138,7 @@ export type Database = { created_at?: string | null id?: string session_id?: string | null + team_id?: string | null user_message?: string | null } Relationships: [ @@ -91,6 +149,13 @@ export type Database = { referencedRelation: "bots" referencedColumns: ["id"] }, + { + foreignKeyName: "bot_conversations_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } bot_logs: { @@ -103,6 +168,7 @@ export type Database = { detected_location: string | null id: string search_term: string | null + team_id: string | null user_message: string | null } Insert: { @@ -114,6 +180,7 @@ export type Database = { detected_location?: string | null id?: string search_term?: string | null + team_id?: string | null user_message?: string | null } Update: { @@ -125,6 +192,7 @@ export type Database = { detected_location?: string | null id?: string search_term?: string | null + team_id?: string | null user_message?: string | null } Relationships: [ @@ -142,6 +210,13 @@ export type Database = { referencedRelation: "service_categories" referencedColumns: ["id"] }, + { + foreignKeyName: "bot_logs_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } bot_scores: { @@ -151,10 +226,12 @@ export type Database = { category: string | null created_at: string id: string + log_id: string | null message: string | null question: string | null reporter: string | null score: string | null + team_id: string | null } Insert: { answer?: string | null @@ -162,10 +239,12 @@ export type Database = { category?: string | null created_at?: string id?: string + log_id?: string | null message?: string | null question?: string | null reporter?: string | null score?: string | null + team_id?: string | null } Update: { answer?: string | null @@ -173,10 +252,12 @@ export type Database = { category?: string | null created_at?: string id?: string + log_id?: string | null message?: string | null question?: string | null reporter?: string | null score?: string | null + team_id?: string | null } Relationships: [ { @@ -193,6 +274,20 @@ export type Database = { referencedRelation: "service_categories" referencedColumns: ["id"] }, + { + foreignKeyName: "bot_scores_log_id_fkey" + columns: ["log_id"] + isOneToOne: false + referencedRelation: "bot_logs" + referencedColumns: ["id"] + }, + { + foreignKeyName: "bot_scores_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } bot_system_prompts: { @@ -202,6 +297,7 @@ export type Database = { id: string position: number | null system_prompt_id: string + team_id: string | null } Insert: { bot_id: string @@ -209,6 +305,7 @@ export type Database = { id?: string position?: number | null system_prompt_id: string + team_id?: string | null } Update: { bot_id?: string @@ -216,6 +313,7 @@ export type Database = { id?: string position?: number | null system_prompt_id?: string + team_id?: string | null } Relationships: [ { @@ -232,6 +330,13 @@ export type Database = { referencedRelation: "system_prompts" referencedColumns: ["id"] }, + { + foreignKeyName: "bot_system_prompts_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } bots: { @@ -249,6 +354,7 @@ export type Database = { name: string system_prompt: string | null system_prompt_id: string | null + team_id: string | null temperature: number | null translate_to_user_language: boolean | null updated_at: string | null @@ -267,6 +373,7 @@ export type Database = { name: string system_prompt?: string | null system_prompt_id?: string | null + team_id?: string | null temperature?: number | null translate_to_user_language?: boolean | null updated_at?: string | null @@ -285,6 +392,7 @@ export type Database = { name?: string system_prompt?: string | null system_prompt_id?: string | null + team_id?: string | null temperature?: number | null translate_to_user_language?: boolean | null updated_at?: string | null @@ -304,6 +412,13 @@ export type Database = { referencedRelation: "system_prompts" referencedColumns: ["id"] }, + { + foreignKeyName: "bots_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, { foreignKeyName: "fk_bots_model" columns: ["model"] @@ -348,18 +463,29 @@ export type Database = { created_at: string | null id: string name: string + team_id: string | null } Insert: { created_at?: string | null id?: string name: string + team_id?: string | null } Update: { created_at?: string | null id?: string name?: string + team_id?: string | null } - Relationships: [] + Relationships: [ + { + foreignKeyName: "collections_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] } live_data_elements: { Row: { @@ -371,6 +497,7 @@ export type Database = { metadata: Json | null source_config_id: string status: string + team_id: string | null vector: string | null version: string } @@ -383,6 +510,7 @@ export type Database = { metadata?: Json | null source_config_id: string status?: string + team_id?: string | null vector?: string | null version?: string } @@ -395,6 +523,7 @@ export type Database = { metadata?: Json | null source_config_id?: string status?: string + team_id?: string | null vector?: string | null version?: string } @@ -406,8 +535,45 @@ export type Database = { referencedRelation: "source_configs" referencedColumns: ["source"] }, + { + foreignKeyName: "live_data_elements_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } + logs: { + Row: { + agent: string | null + created_at: string + handles: Json[] | null + id: string + state: Json | null + team_id: string | null + worker: string | null + } + Insert: { + agent?: string | null + created_at?: string + handles?: Json[] | null + id?: string + state?: Json | null + team_id?: string | null + worker?: string | null + } + Update: { + agent?: string | null + created_at?: string + handles?: Json[] | null + id?: string + state?: Json | null + team_id?: string | null + worker?: string | null + } + Relationships: [] + } models: { Row: { created_at: string | null @@ -415,6 +581,7 @@ export type Database = { model_id: string name: string provider: string | null + team_id: string | null } Insert: { created_at?: string | null @@ -422,6 +589,7 @@ export type Database = { model_id: string name: string provider?: string | null + team_id?: string | null } Update: { created_at?: string | null @@ -429,8 +597,17 @@ export type Database = { model_id?: string name?: string provider?: string | null + team_id?: string | null } - Relationships: [] + Relationships: [ + { + foreignKeyName: "models_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] } projects: { Row: { @@ -473,20 +650,37 @@ export type Database = { description: string | null id: string name: string | null + permissions: Json[] | null + team_id: string | null + teams_id: string[] | null } Insert: { created_at?: string description?: string | null id?: string name?: string | null + permissions?: Json[] | null + team_id?: string | null + teams_id?: string[] | null } Update: { created_at?: string description?: string | null id?: string name?: string | null + permissions?: Json[] | null + team_id?: string | null + teams_id?: string[] | null } - Relationships: [] + Relationships: [ + { + foreignKeyName: "roles_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] } service_categories: { Row: { @@ -527,6 +721,7 @@ export type Database = { sitemap: string | null source: string subdomain: string | null + team_id: string | null type: string | null url: string | null } @@ -544,6 +739,7 @@ export type Database = { sitemap?: string | null source: string subdomain?: string | null + team_id?: string | null type?: string | null url?: string | null } @@ -561,6 +757,7 @@ export type Database = { sitemap?: string | null source?: string subdomain?: string | null + team_id?: string | null type?: string | null url?: string | null } @@ -572,6 +769,13 @@ export type Database = { referencedRelation: "sources" referencedColumns: ["id"] }, + { + foreignKeyName: "source_configs_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, ] } sources: { @@ -582,6 +786,7 @@ export type Database = { last_updated: string | null name: string tags: string[] | null + team_id: string | null type: string | null vector: string | null } @@ -592,6 +797,7 @@ export type Database = { last_updated?: string | null name: string tags?: string[] | null + team_id?: string | null type?: string | null vector?: string | null } @@ -602,9 +808,33 @@ export type Database = { last_updated?: string | null name?: string tags?: string[] | null + team_id?: string | null type?: string | null vector?: string | null } + Relationships: [ + { + foreignKeyName: "sources_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] + } + states: { + Row: { + id: string + state: Json | null + } + Insert: { + id?: string + state?: Json | null + } + Update: { + id?: string + state?: Json | null + } Relationships: [] } system_prompts: { @@ -615,6 +845,7 @@ export type Database = { language: string | null name: string status: string | null + team_id: string | null updated_at: string | null version: string | null } @@ -625,6 +856,7 @@ export type Database = { language?: string | null name: string status?: string | null + team_id?: string | null updated_at?: string | null version?: string | null } @@ -635,10 +867,19 @@ export type Database = { language?: string | null name?: string status?: string | null + team_id?: string | null updated_at?: string | null version?: string | null } - Relationships: [] + Relationships: [ + { + foreignKeyName: "system_prompts_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + ] } tags: { Row: { @@ -679,6 +920,42 @@ export type Database = { } Relationships: [] } + user_teams: { + Row: { + created_at: string + id: string + team_id: string | null + user_id: string | null + } + Insert: { + created_at?: string + id?: string + team_id?: string | null + user_id?: string | null + } + Update: { + created_at?: string + id?: string + team_id?: string | null + user_id?: string | null + } + Relationships: [ + { + foreignKeyName: "user_teams_team_id_fkey" + columns: ["team_id"] + isOneToOne: false + referencedRelation: "teams" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_teams_user_id_fkey" + columns: ["user_id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + ] + } users: { Row: { created_at: string @@ -689,7 +966,6 @@ export type Database = { language: Json | null last_name: string | null location: string | null - password: string | null role: string | null status: string | null team: string | null @@ -704,7 +980,6 @@ export type Database = { language?: Json | null last_name?: string | null location?: string | null - password?: string | null role?: string | null status?: string | null team?: string | null @@ -719,7 +994,6 @@ export type Database = { language?: Json | null last_name?: string | null location?: string | null - password?: string | null role?: string | null status?: string | null team?: string | null @@ -742,47 +1016,6 @@ export type Database = { }, ] } - workers: { - Row: { - agent: number | null - created_at: string - handles: Json | null - id: string - parameters: Json | null - type: string | null - x: number | null - y: number | null - } - Insert: { - agent?: number | null - created_at?: string - handles?: Json | null - id: string - parameters?: Json | null - type?: string | null - x?: number | null - y?: number | null - } - Update: { - agent?: number | null - created_at?: string - handles?: Json | null - id?: string - parameters?: Json | null - type?: string | null - x?: number | null - y?: number | null - } - Relationships: [ - { - foreignKeyName: "workers_agent_fkey" - columns: ["agent"] - isOneToOne: false - referencedRelation: "agents" - referencedColumns: ["id"] - }, - ] - } } Views: { [_ in never]: never @@ -802,9 +1035,7 @@ export type Database = { }[] } match_documents: { - Args: { - query_text: string - } + Args: { query_text: string } Returns: { id: string content: string @@ -816,9 +1047,13 @@ export type Database = { similarity_search: { Args: { query_vector: string + target_collection_id: string + match_threshold: number + match_count: number } Returns: { id: string + name: string content: string similarity: number source_type: string @@ -826,7 +1061,7 @@ export type Database = { } } Enums: { - [_ in never]: never + agent_types: "conversational" | "data" } CompositeTypes: { [_ in never]: never @@ -834,27 +1069,29 @@ export type Database = { } } -type PublicSchema = Database[Extract] +type DefaultSchema = Database[Extract] export type Tables< - PublicTableNameOrOptions extends - | keyof (PublicSchema["Tables"] & PublicSchema["Views"]) + DefaultSchemaTableNameOrOptions extends + | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) | { schema: keyof Database }, - TableName extends PublicTableNameOrOptions extends { schema: keyof Database } - ? keyof (Database[PublicTableNameOrOptions["schema"]]["Tables"] & - Database[PublicTableNameOrOptions["schema"]]["Views"]) + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, -> = PublicTableNameOrOptions extends { schema: keyof Database } - ? (Database[PublicTableNameOrOptions["schema"]]["Tables"] & - Database[PublicTableNameOrOptions["schema"]]["Views"])[TableName] extends { +> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R : never - : PublicTableNameOrOptions extends keyof (PublicSchema["Tables"] & - PublicSchema["Views"]) - ? (PublicSchema["Tables"] & - PublicSchema["Views"])[PublicTableNameOrOptions] extends { + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & + DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & + DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { Row: infer R } ? R @@ -862,20 +1099,22 @@ export type Tables< : never export type TablesInsert< - PublicTableNameOrOptions extends - | keyof PublicSchema["Tables"] + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] | { schema: keyof Database }, - TableName extends PublicTableNameOrOptions extends { schema: keyof Database } - ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"] + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, -> = PublicTableNameOrOptions extends { schema: keyof Database } - ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends { +> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I : never - : PublicTableNameOrOptions extends keyof PublicSchema["Tables"] - ? PublicSchema["Tables"][PublicTableNameOrOptions] extends { + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { Insert: infer I } ? I @@ -883,20 +1122,22 @@ export type TablesInsert< : never export type TablesUpdate< - PublicTableNameOrOptions extends - | keyof PublicSchema["Tables"] + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] | { schema: keyof Database }, - TableName extends PublicTableNameOrOptions extends { schema: keyof Database } - ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"] + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, -> = PublicTableNameOrOptions extends { schema: keyof Database } - ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends { +> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U : never - : PublicTableNameOrOptions extends keyof PublicSchema["Tables"] - ? PublicSchema["Tables"][PublicTableNameOrOptions] extends { + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { Update: infer U } ? U @@ -904,21 +1145,23 @@ export type TablesUpdate< : never export type Enums< - PublicEnumNameOrOptions extends - | keyof PublicSchema["Enums"] + DefaultSchemaEnumNameOrOptions extends + | keyof DefaultSchema["Enums"] | { schema: keyof Database }, - EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database } - ? keyof Database[PublicEnumNameOrOptions["schema"]]["Enums"] + EnumName extends DefaultSchemaEnumNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, -> = PublicEnumNameOrOptions extends { schema: keyof Database } - ? Database[PublicEnumNameOrOptions["schema"]]["Enums"][EnumName] - : PublicEnumNameOrOptions extends keyof PublicSchema["Enums"] - ? PublicSchema["Enums"][PublicEnumNameOrOptions] +> = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never export type CompositeTypes< PublicCompositeTypeNameOrOptions extends - | keyof PublicSchema["CompositeTypes"] + | keyof DefaultSchema["CompositeTypes"] | { schema: keyof Database }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { schema: keyof Database @@ -927,6 +1170,17 @@ export type CompositeTypes< : never = never, > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] - : PublicCompositeTypeNameOrOptions extends keyof PublicSchema["CompositeTypes"] - ? PublicSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never + +export const Constants = { + graphql_public: { + Enums: {}, + }, + public: { + Enums: { + agent_types: ["conversational", "data"], + }, + }, +} as const diff --git a/src/agents/worker.ts b/src/agents/worker.ts index 253b656..1054d53 100644 --- a/src/agents/worker.ts +++ b/src/agents/worker.ts @@ -1,25 +1,30 @@ import { ulid } from "ulid" -// import { app } from "../app" import { loadAgent } from "./agentfactory" -import { error } from "console" export const inputOutputTypes = { string: "Text", + "string[]": "Text List", number: "Number", + "number[]": "Number List", + enum: "Enumeration", boolean: "Boolean", unknown: "Unknown", doc: "Documents", references: "References", chat: "Chat", - // audio: "Audio", + json: "JSON", + audio: "Audio", // image: "Image", // video: "Video", - execute: "Execute", + // execute: "Execute", } + + interface WorkerCondition { - operator?: "equals" + operator?: WorkerOperators value?: any + value2?: any // For "between" operator } declare global { @@ -29,6 +34,8 @@ declare global { type WorkerHandles = { [index: string]: NodeIO } type IOTypes = keyof typeof inputOutputTypes + type WorkerOperators = "equals" | "notEquals" | "gt" | "lt" | "gte" | "lte" | "between" | "contains" | "notContains" | "isEmpty" | "isNotEmpty" + interface NodeIO { id?: string title?: string @@ -36,11 +43,15 @@ declare global { prompt?: string direction: "output" | "input" type: IOTypes + enum?: string[] system?: boolean condition?: boolean - // persistent?: boolean value?: any default?: any + mock?: string | number + operator?: WorkerOperators + conditionValue1?: any + conditionValue2?: any } interface WorkerConfig { @@ -51,6 +62,7 @@ declare global { x?: number y?: number condition?: WorkerCondition + conditionable?: boolean } } @@ -58,7 +70,6 @@ declare global { export function buildWorker(w: WorkerConfig) { w.handles = w.handles || {} - w.condition ||= {} const fields: { [index: string]: NodeIO } = {} const worker = { @@ -67,6 +78,9 @@ export function buildWorker(w: WorkerConfig) { registry: null as WorkerRegistryItem, executed: false, error: null as string, + get conditionable() { + return w.conditionable + }, referencedAgent: null, //this cannot be typed because it causes circular references. Cast to Agent when needed @@ -105,17 +119,34 @@ export function buildWorker(w: WorkerConfig) { await worker.getValues(p) console.log("Worker - Executing: ", w.type) + p.logWriter({ + worker, + state: p.state, + }) + + const conditions = worker.handlersArray.filter(h => h.condition) + + if (conditions.length > 0) { - const cond = Object.values(worker.handles).filter(h => h.condition)[0] - if (cond) { - // console.log("Worker - Condition: ", cond) - if ((!!worker.condition.value) !== (!!cond.value)) { - console.log(`Worker ${w.type} - Condition not met`) + let someConditionsMet = false + + for (const cond of conditions) { + console.log(`Worker '${w.type}' condition:`, cond) + const conditionMet = worker.evaluateCondition(cond) + if (conditionMet) { + someConditionsMet = true + break + } + // allConditionsMet = allConditionsMet && conditionMet + } + if (!someConditionsMet) { + console.log(`Worker '${w.type}' - Conditions not met`) worker.updateWorker() p.agent.currentWorker = null p.agent.update() return } + } p.agent.currentWorker = worker @@ -133,19 +164,99 @@ export function buildWorker(w: WorkerConfig) { } p.agent.update() - worker.updateWorker() p.agent.currentWorker = null p.agent.update() + }, + evaluateCondition(handle: NodeIO): boolean { + let { value, conditionValue1, conditionValue2, operator, type } = handle + + if (type === "boolean") { + value = !!value + conditionValue1 = !!conditionValue1 + if (operator === "equals") return conditionValue1 == value + if (operator === "notEquals") return conditionValue1 != value + } + + if (type === "string" || type === "enum") { + value ||= "" + conditionValue1 ||= "" + if (typeof value !== "string") value = String(value) + if (typeof conditionValue1 !== "string") conditionValue1 = String(conditionValue1) + + if (operator === "equals") return conditionValue1 == value + if (operator === "notEquals") return conditionValue1 != value + if (operator === "contains") return conditionValue1.includes(value) + if (operator === "notContains") return !conditionValue1.includes(value) + if (operator === "isEmpty") return value == "" + if (operator === "isNotEmpty") return value != "" + } + + if (type === "number") { + if (typeof value !== "number") value = Number(value) + if (typeof conditionValue1 !== "number") conditionValue1 = Number(conditionValue1) + if (conditionValue2 && typeof conditionValue2 !== "number") conditionValue2 = Number(conditionValue2) + if (value) value = 0 + if (conditionValue1) conditionValue1 = 0 + if (conditionValue2) conditionValue2 = 0 + if (operator === "equals") return conditionValue1 == value + if (operator === "notEquals") return conditionValue1 != value + if (operator === "gt") return value > conditionValue1 + if (operator === "lt") return value < conditionValue1 + if (operator === "gte") return value >= conditionValue1 + if (operator === "lte") return value <= conditionValue1 + if (operator === "between") return value >= conditionValue1 && value <= conditionValue2 + } }, + // evaluateCondition(condValue: any): boolean { + // let { operator, value, value2 } = worker.condition + + // if (operator === "equals") return condValue == value + // if (operator === "notEquals") return condValue != value + + // // String operators + // if (typeof condValue === "string" && typeof value === "string") { + // if (!value) value = "" + // if (typeof value !== "string") value = String(value) + // if (operator === "contains") return condValue.includes(value) + // if (operator === "notContains") return !condValue.includes(value) + // if (operator === "isEmpty") return !!value + // if (operator === "isNotEmpty") return !value + // } + + // // Number operators + // if (typeof condValue === "number" && typeof value === "number") { + // if (operator === "gt") { + // return condValue > value + // } + // if (operator === "lt") { + // return condValue < value + // } + // if (operator === "gte") { + // return condValue >= value + // } + // if (operator === "lte") { + // return condValue <= value + // } + // if (operator === "between" && typeof value2 === "number") { + // return condValue >= value && condValue <= value2 + // } + // } + + // // Boolean fallback (for backward compatibility) + // return (!!value) === (!!condValue) + // }, + async getValues(p: AgentParameters) { const connw = worker.getConnectedWokers(p) for (const { worker, source, target } of connw) { await worker.execute(p) - target.value = source.value || target.default + if (target.value === undefined) { + target.value = source.value || target.default + } } }, @@ -167,6 +278,25 @@ export function buildWorker(w: WorkerConfig) { return connwh }, + getInputHandlersByName() { + const handlers: { [index: string]: NodeIO } = {} + for (const e of Object.values(worker.handles)) { + if (e.direction === "input") { + handlers[e.name] = e + } + } + return handlers + }, + getOutputHandlersByName() { + const handlers: { [index: string]: NodeIO } = {} + for (const e of Object.values(worker.handles)) { + if (e.direction === "output") { + handlers[e.name] = e + } + } + return handlers + }, + getConnectedHandler(h: NodeIO, curAgent: any) { return worker.getConnectedHandlers(h, curAgent)[0] || null }, @@ -180,17 +310,17 @@ export function buildWorker(w: WorkerConfig) { getConnectedWokers(p: AgentParameters) { const { agent, agent: { workers } } = p - const connected: AIWorker[] = [] const connwh: { worker: AIWorker, source: NodeIO, target: NodeIO }[] = [] for (const e of Object.values(agent.edges)) { if (e.targetHandle in w.handles) { const cw = workers[e.source] - connected.push(cw) + const source = cw.handles[e.sourceHandle] + const target = w.handles[e.targetHandle] connwh.push({ worker: cw, - source: cw.handles[e.sourceHandle], - target: w.handles[e.targetHandle] + source, + target, }) } @@ -199,17 +329,21 @@ export function buildWorker(w: WorkerConfig) { }, updateWorker() { - worker.lastUpdate = Date.now().valueOf() + worker.lastUpdate++ // = Date.now().valueOf() }, addHandler(h: NodeIO): NodeIO { if (!h.id) h.id = ulid() w.handles[h.id] = h fields[h.name] = h - worker.lastUpdate = Date.now().valueOf() + worker.lastUpdate++ //= Date.now().valueOf() return h }, + createHandlerId() { + return ulid() + }, + addHandlers(handlers: NodeIO[]) { for (const h of handlers) { worker.addHandler(h) @@ -220,18 +354,30 @@ export function buildWorker(w: WorkerConfig) { if (w.handles[id]) { Object.assign(w.handles[id], h) } - worker.lastUpdate = Date.now().valueOf() + worker.lastUpdate++ // = Date.now().valueOf() + }, + + upsertHandler(id: string, h: Partial) { + if (id) { + worker.updateHandler(id, h) + } else { + worker.addHandler(h as NodeIO) + } }, deleteHandler(id: string) { delete w.handles[id] - worker.lastUpdate = Date.now().valueOf() + worker.lastUpdate++ // = Date.now().valueOf() }, getUserHandlers() { return Object.values(w.handles || {}).filter(h => !h.system) }, + getHandlersArray() { + return Object.values(w.handles || {}) + }, + async loadAgent() { if (!worker.parameters || !worker.parameters.agent) return const agent = await loadAgent(worker.parameters.agent) @@ -260,9 +406,3 @@ export function buildWorker(w: WorkerConfig) { return worker } - - - - - - diff --git a/src/agents/workers/agent.ts b/src/agents/workers/agent.ts index e1e0856..4c963ec 100644 --- a/src/agents/workers/agent.ts +++ b/src/agents/workers/agent.ts @@ -51,11 +51,11 @@ export const agentWorker: WorkerRegistryItem = { description: "This encapsulates an agent to be executed as a worker", create(agent: Agent) { return agent.initializeWorker( - { type: "agentWorker" }, - [ - // { type: "unknown", direction: "input", title: "Input", name: "input" }, - // { type: "unknown", direction: "output", title: "Ouput", name: "output" }, - ], + { + type: "agentWorker", + conditionable: true, + }, + [], agentWorker ) }, diff --git a/src/agents/workers/ai.ts b/src/agents/workers/ai.ts index 46e7f82..d296432 100644 --- a/src/agents/workers/ai.ts +++ b/src/agents/workers/ai.ts @@ -20,7 +20,7 @@ declare global { documents: NodeIO history: NodeIO answer: NodeIO - condition: NodeIO + // condition: NodeIO } parameters: { temperature?: number @@ -34,18 +34,18 @@ function create(agent: Agent) { return agent.initializeWorker( { type: "ai", + conditionable: true, parameters: { temperature: 0, } }, [ - // { type: "execute", direction: "input", title: "Execute", name: "execute" }, { type: "string", direction: "input", title: "Prompt", name: "prompt" }, { type: "string", direction: "input", title: "Input", name: "input" }, { type: "doc", direction: "input", title: "Documents", name: "documents" }, { type: "chat", direction: "input", title: "History", name: "history" }, { type: "string", direction: "output", title: "Answer", name: "answer" }, - { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, + // { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, ], ai, ) @@ -57,7 +57,6 @@ function create(agent: Agent) { async function execute(worker: BotWorker, { apikeys }: AgentParameters) { - let model: any = null const paramModel = worker.parameters.model || "" diff --git a/src/agents/workers/api.ts b/src/agents/workers/api.ts index 2fcf700..cca87bc 100644 --- a/src/agents/workers/api.ts +++ b/src/agents/workers/api.ts @@ -5,7 +5,6 @@ export interface ApiWorker extends AIWorker { body: NodeIO response: NodeIO error: NodeIO - condition: NodeIO // Ensure condition field exists if using ConditionHandler endpointUrlInput: NodeIO // Added new input field handle } parameters: { @@ -23,291 +22,291 @@ export interface ApiWorker extends AIWorker { function create(agent: Agent) { const worker = agent.initializeWorker( - { + { type: "api", - parameters: { - // REMOVED localApiKeys: {}, - endpoint: '', - method: 'GET', - params: '{}', - headers: '{}', - timeout: 10000, - authType: 'none', - username: '', - selectedKeyName: '' - } + conditionable: true, + parameters: { + // REMOVED localApiKeys: {}, + endpoint: '', + method: 'GET', + params: '{}', + headers: '{}', + timeout: 10000, + authType: 'none', + username: '', + selectedKeyName: '' + } }, [ { type: "string", direction: "input", title: "Body", name: "body" }, { type: "string", direction: "output", title: "Response", name: "response" }, { type: "string", direction: "output", title: "Error", name: "error" }, - { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, - { type: "string", direction: "input", title: "Endpoint URL", name: "endpointUrlInput" }, // Added handle definition + { type: "string", direction: "input", title: "Endpoint URL", name: "endpointUrlInput" }, ], api - ); - return worker; + ) + return worker } async function execute(worker: ApiWorker, p: AgentParameters) { - const logPrefix = `[API Worker (${worker.id})]`; - console.log(`${logPrefix} - Executing with parameters:`, worker.parameters); - console.log(`${logPrefix} - Input body value:`, worker.fields.body.value); - console.log(`${logPrefix} - Provided AgentParameters.apikeys:`, p.apikeys ? Object.keys(p.apikeys) : 'None'); + const logPrefix = `[API Worker (${worker.id})]` + console.log(`${logPrefix} - Executing with parameters:`, worker.parameters) + console.log(`${logPrefix} - Input body value:`, worker.fields.body.value) + console.log(`${logPrefix} - Provided AgentParameters.apikeys:`, p.apikeys ? Object.keys(p.apikeys) : 'None') try { // --- Common Setup --- - const runtimeEndpoint = worker.fields.endpointUrlInput?.value as string | undefined; - const fallbackEndpoint = worker.parameters.endpoint || ''; - const endpoint = runtimeEndpoint && runtimeEndpoint.trim() !== '' ? runtimeEndpoint.trim() : fallbackEndpoint; - console.log(`${logPrefix} - Determined Endpoint: ${endpoint} (Runtime: ${runtimeEndpoint}, Fallback: ${fallbackEndpoint})`); + const runtimeEndpoint = worker.fields.endpointUrlInput?.value as string | undefined + const fallbackEndpoint = worker.parameters.endpoint || '' + const endpoint = runtimeEndpoint && runtimeEndpoint.trim() !== '' ? runtimeEndpoint.trim() : fallbackEndpoint + console.log(`${logPrefix} - Determined Endpoint: ${endpoint} (Runtime: ${runtimeEndpoint}, Fallback: ${fallbackEndpoint})`) if (!endpoint) { - console.error(`${logPrefix} - Endpoint is missing!`); - throw new Error("API endpoint is required. Provide it either via the 'Endpoint URL' input handle or in the node parameters."); + console.error(`${logPrefix} - Endpoint is missing!`) + throw new Error("API endpoint is required. Provide it either via the 'Endpoint URL' input handle or in the node parameters.") } - const method = (worker.parameters.method || 'GET').toUpperCase(); - const paramsString = worker.parameters.params || '{}'; - const headersString = worker.parameters.headers || '{}'; - const timeout = worker.parameters.timeout || 10000; - const authType = worker.parameters.authType || 'none'; - const username = worker.parameters.username; - const selectedKeyName = worker.parameters.selectedKeyName || ''; - const bodyValue = worker.fields.body.value; - console.log(`${logPrefix} - Method: ${method}, Timeout: ${timeout}, AuthType: ${authType}, SelectedKey: ${selectedKeyName || 'None'}`); + const method = (worker.parameters.method || 'GET').toUpperCase() + const paramsString = worker.parameters.params || '{}' + const headersString = worker.parameters.headers || '{}' + const timeout = worker.parameters.timeout || 10000 + const authType = worker.parameters.authType || 'none' + const username = worker.parameters.username + const selectedKeyName = worker.parameters.selectedKeyName || '' + const bodyValue = worker.fields.body.value + console.log(`${logPrefix} - Method: ${method}, Timeout: ${timeout}, AuthType: ${authType}, SelectedKey: ${selectedKeyName || 'None'}`) - let params = {}; - try { - params = JSON.parse(paramsString); - console.log(`${logPrefix} - Parsed Params:`, params); - } catch (e) { - console.error(`${logPrefix} - Failed to parse Params JSON: ${paramsString}`, e); - throw new Error("Invalid params JSON in parameters."); + let params = {} + try { + params = JSON.parse(paramsString) + console.log(`${logPrefix} - Parsed Params:`, params) + } catch (e) { + console.error(`${logPrefix} - Failed to parse Params JSON: ${paramsString}`, e) + throw new Error("Invalid params JSON in parameters.") } - let headers: Record = {}; - try { - headers = JSON.parse(headersString); - console.log(`${logPrefix} - Parsed Headers (initial):`, headers); - } catch (e) { - console.error(`${logPrefix} - Failed to parse Headers JSON: ${headersString}`, e); - throw new Error("Invalid headers JSON in parameters."); + let headers: Record = {} + try { + headers = JSON.parse(headersString) + console.log(`${logPrefix} - Parsed Headers (initial):`, headers) + } catch (e) { + console.error(`${logPrefix} - Failed to parse Headers JSON: ${headersString}`, e) + throw new Error("Invalid headers JSON in parameters.") } // Clean sensitive headers before logging/using - delete headers['Authorization']; - delete headers['X-API-Key']; + delete headers['Authorization'] + delete headers['X-API-Key'] Object.keys(headers).forEach(key => { - if (key.toLowerCase() === 'x-api-key' || key.toLowerCase() === 'authorization') delete headers[key]; - }); - console.log(`${logPrefix} - Headers after cleaning sensitive ones:`, headers); + if (key.toLowerCase() === 'x-api-key' || key.toLowerCase() === 'authorization') delete headers[key] + }) + console.log(`${logPrefix} - Headers after cleaning sensitive ones:`, headers) - let actualValue: string | undefined; + let actualValue: string | undefined if (authType !== 'none' && authType) { - console.log(`${logPrefix} - Auth required (${authType}). Selected Key Name: ${selectedKeyName}`); - if (!selectedKeyName) { - console.error(`${logPrefix} - Auth type specified but no key name selected.`); - throw new Error(`Auth type '${authType}' selected, but no Stored Key Name chosen.`); + console.log(`${logPrefix} - Auth required (${authType}). Selected Key Name: ${selectedKeyName}`) + if (!selectedKeyName) { + console.error(`${logPrefix} - Auth type specified but no key name selected.`) + throw new Error(`Auth type '${authType}' selected, but no Stored Key Name chosen.`) } // 1. Check Environment Variables first - const envVarValue = process.env[selectedKeyName]; + const envVarValue = process.env[selectedKeyName] if (envVarValue !== undefined && envVarValue !== '') { - actualValue = envVarValue; + actualValue = envVarValue // DO NOT log the actual key value here for security - console.log(`${logPrefix} - Found key '${selectedKeyName}' in environment variable.`); + console.log(`${logPrefix} - Found key '${selectedKeyName}' in environment variable.`) } else { - console.log(`${logPrefix} - Key '${selectedKeyName}' not found or empty in environment variables. Checking AgentParameters...`); + console.log(`${logPrefix} - Key '${selectedKeyName}' not found or empty in environment variables. Checking AgentParameters...`) // 2. If not in env, check AgentParameters.apikeys - const paramKeyValue = p.apikeys?.[selectedKeyName]; + const paramKeyValue = p.apikeys?.[selectedKeyName] if (paramKeyValue !== undefined) { - actualValue = paramKeyValue; + actualValue = paramKeyValue // DO NOT log the actual key value here for security - console.log(`${logPrefix} - Found key '${selectedKeyName}' in AgentParameters.apikeys.`); + console.log(`${logPrefix} - Found key '${selectedKeyName}' in AgentParameters.apikeys.`) } else { // 3. If not found in either, throw error - console.error(`${logPrefix} - Key '${selectedKeyName}' not found in environment or AgentParameters.`); - throw new Error(`Selected stored key "${selectedKeyName}" not found in environment variables or provided AgentParameters.apikeys.`); + console.error(`${logPrefix} - Key '${selectedKeyName}' not found in environment or AgentParameters.`) + throw new Error(`Selected stored key "${selectedKeyName}" not found in environment variables or provided AgentParameters.apikeys.`) } } } switch (authType) { case 'basic': - console.log(`${logPrefix} - Applying Basic Auth.`); - if (!username) { - console.error(`${logPrefix} - Basic Auth selected but username is missing.`); - throw new Error("Username required for Basic Auth."); + console.log(`${logPrefix} - Applying Basic Auth.`) + if (!username) { + console.error(`${logPrefix} - Basic Auth selected but username is missing.`) + throw new Error("Username required for Basic Auth.") } - if (actualValue !== undefined) { - headers.Authorization = `Basic ${btoa(`${username}:${actualValue}`)}`; - console.log(`${logPrefix} - Added Basic Auth header.`); + if (actualValue !== undefined) { + headers.Authorization = `Basic ${btoa(`${username}:${actualValue}`)}` + console.log(`${logPrefix} - Added Basic Auth header.`) } else { - console.warn(`${logPrefix} - Basic Auth selected but key value was not found.`); + console.warn(`${logPrefix} - Basic Auth selected but key value was not found.`) } - break; + break case 'bearer': - console.log(`${logPrefix} - Applying Bearer Auth.`); - if (actualValue !== undefined) { - headers.Authorization = `Bearer ${actualValue}`; - console.log(`${logPrefix} - Added Bearer Auth header.`); + console.log(`${logPrefix} - Applying Bearer Auth.`) + if (actualValue !== undefined) { + headers.Authorization = `Bearer ${actualValue}` + console.log(`${logPrefix} - Added Bearer Auth header.`) } else { - console.warn(`${logPrefix} - Bearer Auth selected but key value was not found.`); + console.warn(`${logPrefix} - Bearer Auth selected but key value was not found.`) } - break; + break case 'api_key': - console.log(`${logPrefix} - Applying API Key Auth (X-API-Key header).`); - if (actualValue !== undefined) { - headers['X-API-Key'] = actualValue; - console.log(`${logPrefix} - Added X-API-Key header.`); + console.log(`${logPrefix} - Applying API Key Auth (X-API-Key header).`) + if (actualValue !== undefined) { + headers['X-API-Key'] = actualValue + console.log(`${logPrefix} - Added X-API-Key header.`) } else { - console.warn(`${logPrefix} - API Key Auth selected but key value was not found.`); + console.warn(`${logPrefix} - API Key Auth selected but key value was not found.`) } - break; + break default: - console.log(`${logPrefix} - No Auth or unknown auth type: ${authType}.`); + console.log(`${logPrefix} - No Auth or unknown auth type: ${authType}.`) } - let data = undefined; + let data = undefined if (method !== 'GET' && bodyValue) { - data = bodyValue; - console.log(`${logPrefix} - Method is ${method}, using body value.`); + data = bodyValue + console.log(`${logPrefix} - Method is ${method}, using body value.`) // Attempt to set Content-Type if not present and body looks like JSON if (!headers['Content-Type'] && typeof data === 'string' && (data.trim().startsWith('{') || data.trim().startsWith('['))) { - headers['Content-Type'] = 'application/json'; - console.log(`${logPrefix} - Auto-detected JSON body, setting Content-Type to application/json.`); + headers['Content-Type'] = 'application/json' + console.log(`${logPrefix} - Auto-detected JSON body, setting Content-Type to application/json.`) } } // --- End Common Setup --- - console.log(`${logPrefix} - Final Headers before request:`, headers); - console.log(`${logPrefix} - Final Params before request:`, params); - console.log(`${logPrefix} - Final Data before request:`, data ? (typeof data === 'string' ? data.substring(0, 100) + '...' : '[Non-string data]') : 'None'); + console.log(`${logPrefix} - Final Headers before request:`, headers) + console.log(`${logPrefix} - Final Params before request:`, params) + console.log(`${logPrefix} - Final Data before request:`, data ? (typeof data === 'string' ? data.substring(0, 100) + '...' : '[Non-string data]') : 'None') // --- Environment Check and Call --- - const isBrowser = typeof window !== 'undefined'; - let apiResponseData: any; - let apiResponseStatus: number | undefined; - let apiResponseStatusText: string | undefined; + const isBrowser = typeof window !== 'undefined' + let apiResponseData: any + let apiResponseStatus: number | undefined + let apiResponseStatusText: string | undefined if (isBrowser) { // FRONTEND: Use the proxy - console.log(`${logPrefix} [Browser] - Using proxy /api/axiosFetch for: ${endpoint}`); - const proxyPayload = { url: endpoint, method, headers, params, data, timeout }; - console.log(`${logPrefix} [Browser] - Proxy payload:`, proxyPayload); + console.log(`${logPrefix} [Browser] - Using proxy /api/axiosFetch for: ${endpoint}`) + const proxyPayload = { url: endpoint, method, headers, params, data, timeout } + console.log(`${logPrefix} [Browser] - Proxy payload:`, proxyPayload) const proxyResponse = await axios({ method: 'POST', - url: '/api/axiosFetch', + url: '/api/axiosFetch', data: proxyPayload - }); - console.log(`${logPrefix} [Browser] - Proxy response status: ${proxyResponse.status}`); + }) + console.log(`${logPrefix} [Browser] - Proxy response status: ${proxyResponse.status}`) if (proxyResponse.status !== 200) { - console.error(`${logPrefix} [Browser] - Proxy service request failed! Status: ${proxyResponse.status}`); - throw new Error(`Proxy service request failed: ${proxyResponse.status} ${proxyResponse.statusText}`); + console.error(`${logPrefix} [Browser] - Proxy service request failed! Status: ${proxyResponse.status}`) + throw new Error(`Proxy service request failed: ${proxyResponse.status} ${proxyResponse.statusText}`) } // Assuming proxy returns { status, statusText, data, error? } - const proxyResult = proxyResponse.data; - console.log(`${logPrefix} [Browser] - Proxy result:`, proxyResult); + const proxyResult = proxyResponse.data + console.log(`${logPrefix} [Browser] - Proxy result:`, proxyResult) if (proxyResult?.error) { - console.error(`${logPrefix} [Browser] - Error received from proxy: ${proxyResult.error}`); - throw new Error(`Proxy error: ${proxyResult.error} ${proxyResult.message || ''}`); + console.error(`${logPrefix} [Browser] - Error received from proxy: ${proxyResult.error}`) + throw new Error(`Proxy error: ${proxyResult.error} ${proxyResult.message || ''}`) } - apiResponseData = proxyResult?.data; - apiResponseStatus = proxyResult?.status; - apiResponseStatusText = proxyResult?.statusText; + apiResponseData = proxyResult?.data + apiResponseStatus = proxyResult?.status + apiResponseStatusText = proxyResult?.statusText } else { // BACKEND: Make direct call - console.log(`${logPrefix} [Node.js] - Making direct ${method} request to: ${endpoint}`); + console.log(`${logPrefix} [Node.js] - Making direct ${method} request to: ${endpoint}`) const axiosConfig = { - method: method as any, - url: endpoint, - headers: headers, - params: params, - data: data, - timeout: timeout, - validateStatus: (status) => status >= 200 && status < 500, // Handle 4xx locally - }; + method: method as any, + url: endpoint, + headers: headers, + params: params, + data: data, + timeout: timeout, + validateStatus: (status) => status >= 200 && status < 500, // Handle 4xx locally + } console.log(`${logPrefix} [Node.js] - Axios config:`, { - ...axiosConfig, + ...axiosConfig, headers: { ...axiosConfig.headers, Authorization: '[REDACTED]', 'X-API-Key': '[REDACTED]' } // Redact sensitive headers for logging - }); - const directResponse = await axios(axiosConfig); - console.log(`${logPrefix} [Node.js] - Direct response status: ${directResponse.status} ${directResponse.statusText}`); - apiResponseData = directResponse.data; - apiResponseStatus = directResponse.status; - apiResponseStatusText = directResponse.statusText; + }) + const directResponse = await axios(axiosConfig) + console.log(`${logPrefix} [Node.js] - Direct response status: ${directResponse.status} ${directResponse.statusText}`) + apiResponseData = directResponse.data + apiResponseStatus = directResponse.status + apiResponseStatusText = directResponse.statusText } // --- End Environment Check and Call --- // --- Common Response Handling --- - console.log(`${logPrefix} - Handling response. Status: ${apiResponseStatus}, StatusText: ${apiResponseStatusText}`); - worker.fields.error.value = ''; // Clear previous error + console.log(`${logPrefix} - Handling response. Status: ${apiResponseStatus}, StatusText: ${apiResponseStatusText}`) + worker.fields.error.value = '' // Clear previous error if (apiResponseStatus && apiResponseStatus >= 200 && apiResponseStatus < 300) { // Success (2xx) - console.log(`${logPrefix} - Success Response Status: ${apiResponseStatus}`); + console.log(`${logPrefix} - Success Response Status: ${apiResponseStatus}`) if (apiResponseData !== undefined && apiResponseData !== null) { - const responseString = typeof apiResponseData === 'object' - ? JSON.stringify(apiResponseData, null, 2) - : String(apiResponseData); - worker.fields.response.value = responseString; - console.log(`${logPrefix} - Setting response field (first 200 chars):`, responseString.substring(0, 200) + '...'); + const responseString = typeof apiResponseData === 'object' + ? JSON.stringify(apiResponseData, null, 2) + : String(apiResponseData) + worker.fields.response.value = responseString + console.log(`${logPrefix} - Setting response field (first 200 chars):`, responseString.substring(0, 200) + '...') } else { - const successMsg = `Request successful: ${apiResponseStatus} ${apiResponseStatusText || ''}`; - worker.fields.response.value = successMsg; - console.log(`${logPrefix} - Setting response field: ${successMsg}`); + const successMsg = `Request successful: ${apiResponseStatus} ${apiResponseStatusText || ''}` + worker.fields.response.value = successMsg + console.log(`${logPrefix} - Setting response field: ${successMsg}`) } } else { // Non-Success (3xx, 4xx, or error from proxy) - const errorStatus = apiResponseStatus || 'Unknown'; - const errorStatusText = apiResponseStatusText || 'Error'; - const errorMsg = `Request failed with status code ${errorStatus} (${errorStatusText})`; - let errorDataString = ''; + const errorStatus = apiResponseStatus || 'Unknown' + const errorStatusText = apiResponseStatusText || 'Error' + const errorMsg = `Request failed with status code ${errorStatus} (${errorStatusText})` + let errorDataString = '' if (apiResponseData) { - try { - errorDataString = typeof apiResponseData === 'object' ? JSON.stringify(apiResponseData) : String(apiResponseData); - } catch (e) { errorDataString = '[Could not stringify error data]'; } + try { + errorDataString = typeof apiResponseData === 'object' ? JSON.stringify(apiResponseData) : String(apiResponseData) + } catch (e) { errorDataString = '[Could not stringify error data]' } } - console.warn(`${logPrefix} - Non-Success Response Status: ${errorStatus}. Data: ${errorDataString}`); - const fullErrorMsg = errorDataString ? `${errorMsg} (Response Data: ${errorDataString})` : errorMsg; - worker.fields.error.value = fullErrorMsg; - worker.fields.response.value = ''; - console.log(`${logPrefix} - Setting error field: ${fullErrorMsg}`); + console.warn(`${logPrefix} - Non-Success Response Status: ${errorStatus}. Data: ${errorDataString}`) + const fullErrorMsg = errorDataString ? `${errorMsg} (Response Data: ${errorDataString})` : errorMsg + worker.fields.error.value = fullErrorMsg + worker.fields.response.value = '' + console.log(`${logPrefix} - Setting error field: ${fullErrorMsg}`) } // --- End Common Response Handling --- } catch (error) { // --- Common Error Handling (Network errors, 5xx, setup errors, proxy errors) --- - console.error(`${logPrefix} - Caught error in main try block:`, error); - worker.fields.response.value = ''; - let errorMessage = "An unknown error occurred."; + console.error(`${logPrefix} - Caught error in main try block:`, error) + worker.fields.response.value = '' + let errorMessage = "An unknown error occurred." if (axios.isAxiosError(error)) { - errorMessage = error.message; - if (error.response) { - // Error with a response (e.g., 5xx from direct call, or maybe proxy itself failed) - errorMessage = `Request failed with status code ${error.response.status} (${error.response.statusText || 'Error'})`; - console.error(`${logPrefix} - Axios error with response: Status=${error.response.status}, Data=`, error.response.data); - if (error.response.data) { - try { errorMessage += ` (Response Data: ${typeof error.response.data === 'object' ? JSON.stringify(error.response.data) : String(error.response.data)})`; } - catch (e) { /* ignore stringify error */ } - } - } else if (error.request) { - // No response received or setup failed - errorMessage = `No response received or request setup failed: ${error.message}`; - console.error(`${logPrefix} - Axios error without response:`, error.request); - } else { - console.error(`${logPrefix} - Axios error during setup: ${error.message}`); + errorMessage = error.message + if (error.response) { + // Error with a response (e.g., 5xx from direct call, or maybe proxy itself failed) + errorMessage = `Request failed with status code ${error.response.status} (${error.response.statusText || 'Error'})` + console.error(`${logPrefix} - Axios error with response: Status=${error.response.status}, Data=`, error.response.data) + if (error.response.data) { + try { errorMessage += ` (Response Data: ${typeof error.response.data === 'object' ? JSON.stringify(error.response.data) : String(error.response.data)})` } + catch (e) { /* ignore stringify error */ } } - } else if (error instanceof Error) { - errorMessage = error.message; - console.error(`${logPrefix} - Non-Axios error: ${error.message}`, error.stack); - } else { - try { errorMessage = JSON.stringify(error); } catch (e) { errorMessage = String(error); } - console.error(`${logPrefix} - Unknown error type:`, error); + } else if (error.request) { + // No response received or setup failed + errorMessage = `No response received or request setup failed: ${error.message}` + console.error(`${logPrefix} - Axios error without response:`, error.request) + } else { + console.error(`${logPrefix} - Axios error during setup: ${error.message}`) + } + } else if (error instanceof Error) { + errorMessage = error.message + console.error(`${logPrefix} - Non-Axios error: ${error.message}`, error.stack) + } else { + try { errorMessage = JSON.stringify(error) } catch (e) { errorMessage = String(error) } + console.error(`${logPrefix} - Unknown error type:`, error) } - worker.fields.error.value = errorMessage; - console.log(`${logPrefix} - Setting error field from catch block: ${errorMessage}`); + worker.fields.error.value = errorMessage + console.log(`${logPrefix} - Setting error field from catch block: ${errorMessage}`) // --- End Common Error Handling --- } } @@ -315,7 +314,7 @@ async function execute(worker: ApiWorker, p: AgentParameters) { export const api: WorkerRegistryItem = { title: "API Call", type: "api", - category: "tool", + category: "io", description: "This worker allows you to make external API calls to use other external services.", // Updated description execute, create, diff --git a/src/agents/workers/combine.ts b/src/agents/workers/combine.ts index 7ef069e..0108f7e 100644 --- a/src/agents/workers/combine.ts +++ b/src/agents/workers/combine.ts @@ -6,41 +6,52 @@ declare global { input1: NodeIO input2: NodeIO output: NodeIO + [key: string]: NodeIO } parameters: { mode?: CombineWorkerMode + inputCount?: number } } } async function execute(worker: CombineWorker) { - if (worker.parameters.mode === "nonempty") { + // Get all input fields + const inputFields = Object.entries(worker.fields).filter(([key, field]) => key.startsWith('input') && field.direction === 'input') + // Sort by input number for consistent ordering + .sort((a, b) => { + const aNum = parseInt(a[0].replace('input', '')) || 0 + const bNum = parseInt(b[0].replace('input', '')) || 0 + return aNum - bNum + }) + .map(([_, field]) => field) - if (Array.isArray(worker.fields.input1.value)) { - if (worker.fields.input1.value.length > 0) { - worker.fields.output.value = worker.fields.input1.value - } else { - worker.fields.output.value = worker.fields.input2.value + if (worker.parameters.mode === "nonempty") { + // Find the first non-empty input + for (const input of inputFields) { + if (Array.isArray(input.value)) { + if (input.value.length > 0) { + worker.fields.output.value = input.value + return + } + } else if (input.value) { + worker.fields.output.value = input.value + return } - return } - if (worker.fields.input1.value) { - worker.fields.output.value = worker.fields.input1.value - } else { - worker.fields.output.value = worker.fields.input2.value - } + // If all inputs are empty, use the last input (which will be empty) + worker.fields.output.value = inputFields[inputFields.length - 1]?.value || null } if (worker.parameters.mode === "concat") { - - if (typeof worker.fields.input1.value == "string") { - worker.fields.output.value = `${worker.fields.input1.value || ""}${worker.fields.input2.value || ""}` - } else if (Array.isArray(worker.fields.input1.value)) { - worker.fields.output.value = [...(worker.fields.input1.value || []), ...(worker.fields.input2.value || [])] + if (typeof inputFields[0]?.value === "string") { + worker.fields.output.value = inputFields.map(input => input.value || "").join("").trim() + } + else if (Array.isArray(inputFields[0]?.value)) { + worker.fields.output.value = inputFields.flatMap(input => Array.isArray(input.value) ? input.value : []) } - } } @@ -50,21 +61,20 @@ export const combine: WorkerRegistryItem = { execute, category: "tool", type: "combine", - description: "This worker allows you to combine two inputs into one output", + description: "This worker allows you to combine multiple inputs into one output", create(agent: Agent) { const w = agent.initializeWorker( { type: "combine" }, [ - { type: "unknown", direction: "input", title: "Input", name: "input1" }, - { type: "unknown", direction: "input", title: "Input", name: "input2" }, + { type: "unknown", direction: "input", title: "Input 1", name: "input1" }, + { type: "unknown", direction: "input", title: "Input 2", name: "input2" }, { type: "unknown", direction: "output", title: "Result", name: "output" }, ], combine ) as CombineWorker w.parameters.mode = "nonempty" + w.parameters.inputCount = 2 // Default to 2 inputs for backward compatibility return w }, get registry() { return combine }, } - - diff --git a/src/agents/workers/display.ts b/src/agents/workers/display.ts index 83a1385..0b83598 100644 --- a/src/agents/workers/display.ts +++ b/src/agents/workers/display.ts @@ -9,7 +9,9 @@ declare global { } async function execute(worker: AIWorker) { + worker.fields.output.value = worker.fields.input.value + } export const display: WorkerRegistryItem = { diff --git a/src/agents/workers/documentselector.ts b/src/agents/workers/documentselector.ts new file mode 100644 index 0000000..8813868 --- /dev/null +++ b/src/agents/workers/documentselector.ts @@ -0,0 +1,118 @@ +import { createJsonTranslator, createLanguageModel } from "typechat" +import { createTypeScriptJsonValidator } from "typechat/ts" +import { createZodJsonValidator } from "typechat/zod" +import { z } from "zod" + +declare global { + interface DocumentSelectorWorker extends AIWorker { + fields: { + input: NodeIO + output: NodeIO + prompt: NodeIO + documents: NodeIO + }, + parameters: { + results?: number + } + + } +} + +async function execute(worker: DocumentSelectorWorker, p: AgentParameters) { + + let question = worker.fields.input.value + const results = worker.parameters.results || 8 + const documents = worker.fields.documents.value || [] + const prompt = worker.fields.prompt.value || "" + const searchResults = [] + + worker.fields.output.value = searchResults + + if (!question || !documents || documents.length === 0) return + + let context = documents.map((doc: VectorDocument, id) => ` +id:${id} +Title: ${doc.title || ""} +Content: ${doc.body || ""} +`).join("") + + const schema = + ` + The input contains a context surrounded by .... + The context contains a list of articles with an id, a title and a content. + After the context, there is a question surrounded by .... + Use the question to search the context to pick and prioritize the ${results} most relevant article's id using the following criteria: + ${prompt} +` + + question = ` + +${context} + + + +${question} + +` + const schemaModel = createLanguageModel({ + OPENAI_MODEL: "gpt-4o", + OPENAI_API_KEY: p.apikeys.openai, + }) + + const documentsResponse = z.object({ + articles: z.number().array().describe(schema) + }) + + const documentSelectorSchema = { + Response: documentsResponse + } + + + const validator = createZodJsonValidator(documentSelectorSchema, "Response") + const translator = createJsonTranslator(schemaModel, validator) + + const response = await translator.translate(question) + + if (response.success) { + response.data.articles ||= [] + console.log(response.data) + response.data.articles ||= [] + for (const doc of response.data.articles) { + searchResults.push(documents[doc]) + } + } else { + const error = (response as any).error + console.error("Error: ", error) + worker.error = error + } + +} + +export const documentSelector: WorkerRegistryItem = { + title: "Selector", + execute, + category: "tool", + type: "documentSelector", + description: "This worker select Documents based on a prompt.", + create(agent: Agent) { + return agent.initializeWorker( + { + type: "documentSelector", + conditionable: true, + parameters: { + results: 8, + }, + }, + [ + { type: "doc", direction: "input", title: "Documents", name: "documents" }, + { type: "doc", direction: "output", title: "Output", name: "output" }, + { type: "string", direction: "input", title: "Input", name: "input" }, + { type: "string", direction: "input", title: "Prompt", name: "prompt" }, + ], + documentSelector + ) + + }, + get registry() { return documentSelector }, +} + diff --git a/src/agents/workers/input.ts b/src/agents/workers/input.ts index 642afe5..0cf6fff 100644 --- a/src/agents/workers/input.ts +++ b/src/agents/workers/input.ts @@ -7,12 +7,9 @@ function create(agent: Agent) { return agent.initializeWorker( { type: "request" }, - [ - // { type: "execute", direction: "output", title: "Next", name: "next" }, - ], + [], request ) - } async function execute(worker: InputWorker, p: AgentParameters) { @@ -20,6 +17,7 @@ async function execute(worker: InputWorker, p: AgentParameters) { for (const key in worker.handles) { const h = worker.handles[key] h.value = p.input[h.name] + if (p.debug && !h.value) h.value = h.mock } } diff --git a/src/agents/workers/schema.ts b/src/agents/workers/schema.ts index 81b4f4b..f942c63 100644 --- a/src/agents/workers/schema.ts +++ b/src/agents/workers/schema.ts @@ -1,15 +1,17 @@ import { createJsonTranslator, createLanguageModel } from "typechat" -import { createTypeScriptJsonValidator } from "typechat/ts" +import { createZodJsonValidator } from "typechat/zod" +import { z } from "zod" declare global { interface SchemaWorker extends AIWorker { fields: { input: NodeIO - condition: NodeIO + json: NodeIO + // condition: NodeIO } parameters: { - temperature?: number + model?: string } } @@ -18,74 +20,86 @@ declare global { function create(agent: Agent) { return agent.initializeWorker( - { type: "schema" }, + { + type: "schema", + parameters: { + model: "gpt-4o", + }, + conditionable: true, + }, [ { type: "string", direction: "input", title: "Input", name: "input" }, - { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, + // { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, + { type: "json", direction: "output", title: "JSON", name: "json", system: true }, ], schema ) } -async function execute(worker: AIWorker, p: AgentParameters) { +async function execute(worker: SchemaWorker, p: AgentParameters) { const handlers = worker.getUserHandlers() const input = worker.fields.input.value if (!input) return - let schema = ` - - export interface Schema { - - ` + const schemaFields: Record = {} for (let s of handlers) { - let type = "" + let fieldSchema: z.ZodTypeAny + if (s.type == "boolean") { - type = "boolean" + fieldSchema = z.boolean() } else if (s.type == "number") { - type = "number" + fieldSchema = z.number() } else if (s.type == "string") { - type = "string" + fieldSchema = z.string() + } else if (s.type == "string[]") { + fieldSchema = z.array(z.string()) + } else if (s.type == "number[]") { + fieldSchema = z.array(z.number()) + } else if (s.type == "enum" && s.enum && s.enum.length > 0) { + fieldSchema = z.enum(s.enum as [string, ...string[]]) } else { - type = "any" + fieldSchema = z.any() } - schema += ` - - /* - ${s.prompt} - */ - ${s.name}?: ${type} - - ` - - } - - schema += ` - + schemaFields[s.name] = fieldSchema.optional().describe(s.prompt || "") } - ` + const schema = z.object(schemaFields) + const OPENAI_MODEL = worker.parameters.model || "gpt-4o" const schemaModel = createLanguageModel({ - OPENAI_MODEL: "gpt-4o", + OPENAI_MODEL, OPENAI_API_KEY: p.apikeys.openai, }) - const validator = createTypeScriptJsonValidator(schema, "Schema") - const translator = createJsonTranslator(schemaModel, validator) + const dataExtractionSchema = { + Response: schema + } + + const validator = createZodJsonValidator(dataExtractionSchema, "Response") + const translator = createJsonTranslator>(schemaModel, validator) const routeresponse = await translator.translate(input) + const jsonout = {} if (routeresponse.success) { for (const key in routeresponse.data) { const h = handlers.find((h) => h.name == key) - if (h) worker.fields[h.name].value = routeresponse.data[key] + if (h) { + worker.fields[h.name].value = routeresponse.data[key] + if (worker.fields[h.name].value) { + jsonout[h.name] = worker.fields[h.name].value + } + } } } + worker.fields.json.value = jsonout + + } @@ -98,4 +112,3 @@ export const schema: WorkerRegistryItem = { create, get registry() { return schema }, } - diff --git a/src/agents/workers/search.ts b/src/agents/workers/search.ts index 58d135e..668285e 100644 --- a/src/agents/workers/search.ts +++ b/src/agents/workers/search.ts @@ -7,9 +7,8 @@ import { createClient, SupabaseClient } from '@supabase/supabase-js' // Initialize Supabase client using environment variables // IMPORTANT: Ensure SUPABASE_URL, SUPABASE_ANON_KEY, and OPENAI_API_KEY are set in the worker's environment -const supabaseUrl = process.env.SUPABASE_URL -const supabaseAnonKey = process.env.SUPABASE_ANON_KEY -const openaiApiKey = process.env.OPENAI_API_KEY +const supabaseUrl = process.env.VITE_SUPABASE_URL +const supabaseAnonKey = process.env.VITE_SUPABASE_ANON_KEY let supabase: SupabaseClient @@ -42,21 +41,21 @@ if (!supabaseUrl || !supabaseAnonKey) { * @param {string} text - The text to generate an embedding for * @returns {Promise<{ data: number[] | null, error: Error | null }>} The embedding vector or null/error */ -async function generateEmbedding(text: string): Promise<{ +async function generateEmbedding(text: string, apiKeys: APIKeys): Promise<{ data: number[] | null, error: Error | null }> { - if (!openaiApiKey) { - const errorMsg = 'Missing OpenAI API key. Please set OPENAI_API_KEY environment variable.'; - console.error(`[Worker Embedding] ${errorMsg}`); - return { data: null, error: new Error(errorMsg) }; - } - try { console.log('[Worker Embedding] Starting OpenAI embedding generation...') - + + const openaiApiKey = apiKeys.openai + if (!openaiApiKey) { + const errorMsg = 'Missing OpenAI API key. Please set OPENAI_API_KEY environment variable.' + console.error(`[Worker Embedding] ${errorMsg}`) + return { data: null, error: new Error(errorMsg) } + } const input = text.replace(/\n/g, ' ') // OpenAI recommendation - + const response = await fetch('https://api.openai.com/v1/embeddings', { method: 'POST', headers: { @@ -68,40 +67,40 @@ async function generateEmbedding(text: string): Promise<{ input }) }) - + if (!response.ok) { const errorData = await response.json().catch(() => ({ // Catch potential JSON parsing errors error: { message: `HTTP error ${response.status} - ${response.statusText}` } })) - console.error('[Worker Embedding] OpenAI API error response:', errorData); + console.error('[Worker Embedding] OpenAI API error response:', errorData) throw new Error(`OpenAI API error: ${errorData.error?.message || 'Unknown error'}`) } - + const data = await response.json() - + if (!data.data || !data.data[0] || !data.data[0].embedding) { - throw new Error('Invalid response structure from OpenAI API.'); + throw new Error('Invalid response structure from OpenAI API.') } const embedding = data.data[0].embedding console.log('[Worker Embedding] OpenAI embedding generated successfully.') - + // Verify the embedding size if (embedding.length !== 1536) { throw new Error(`Expected embedding dimension of 1536, but got ${embedding.length}`) } - + return { data: embedding, error: null } } catch (error) { console.error('[Worker Embedding] Error generating OpenAI embedding:', error) let errorMessage = 'Unknown error occurred during OpenAI embedding generation.' - + if (error instanceof Error) { - errorMessage = error.message; // Use the specific error message + errorMessage = error.message // Use the specific error message } - - return { - data: null, + + return { + data: null, error: new Error(errorMessage) // Ensure an Error object is returned } } @@ -124,13 +123,11 @@ declare global { distance: NodeIO maxResults: NodeIO collections: NodeIO - - condition: NodeIO }, parameters: { engine?: "weaviate" | "exa" | "supabase" maxResults?: number - domain?: string + domain?: string[] distance?: number collections?: string[] } @@ -148,7 +145,7 @@ declare global { ref?: string title: string body: string - domain?: string + domain?: string[] source?: string locale?: string lat?: number @@ -160,7 +157,7 @@ function deduplicateDocuments(array: VectorDocument[]): VectorDocument[] { const seen: Record = {} const deduped: VectorDocument[] = [] for (const d of array) { - const key = `${d.source || 'unknown'}-${d.ref || d.title || 'untitled'}`; + const key = `${d.source || 'unknown'}-${d.ref || d.title || 'untitled'}` if (!seen[key]) { deduped.push(d) seen[key] = true @@ -169,21 +166,21 @@ function deduplicateDocuments(array: VectorDocument[]): VectorDocument[] { return deduped } -async function execute(worker: SearchWorker) { - console.log("Executing search worker with parameters:", worker.parameters); +async function execute(worker: SearchWorker, { apikeys }: AgentParameters) { + console.log("Executing search worker with parameters:", worker.parameters) - worker.fields.output.value = []; - worker.fields.references.value = []; + worker.fields.output.value = [] + worker.fields.references.value = [] - const query = worker.fields.input.value || ""; + const query = worker.fields.input.value || "" if (!query) { - console.log("Search worker: No query provided."); - return; + console.log("Search worker: No query provided.") + return } - const engine = worker.parameters.engine || "weaviate"; // Default to an external engine - let finalResults: VectorDocument[] = []; - let deduped: VectorDocument[] = []; + const engine = worker.parameters.engine || "weaviate" // Default to an external engine + let finalResults: VectorDocument[] = [] + let deduped: VectorDocument[] = [] // --- Engine-Specific Search Logic --- @@ -193,109 +190,109 @@ async function execute(worker: SearchWorker) { ? worker.parameters.collections : typeof worker.parameters.collections === 'string' ? [worker.parameters.collections] - : []; + : [] - const limit = worker.parameters.maxResults || 5; - const similarityThreshold = worker.parameters.distance ?? 0.3; + const limit = worker.parameters.maxResults || 5 + const similarityThreshold = worker.parameters.distance ?? 0.3 if (collectionIds && collectionIds.length > 0) { - console.log(`[Supabase Path] Searching ${collectionIds.length} collections:`, collectionIds); + console.log(`[Supabase Path] Searching ${collectionIds.length} collections:`, collectionIds) try { - const { data: queryEmbedding, error: embeddingError } = await generateEmbedding(query); + const { data: queryEmbedding, error: embeddingError } = await generateEmbedding(query, apikeys) if (embeddingError || !queryEmbedding) { - throw embeddingError || new Error("Failed to generate query embedding."); + throw embeddingError || new Error("Failed to generate query embedding.") } - console.log(`[Supabase Path] Generated query embedding (length: ${queryEmbedding.length}).`); + console.log(`[Supabase Path] Generated query embedding (length: ${queryEmbedding.length}).`) const searchPromises = collectionIds.map(async (collectionId) => { - console.log(` - Searching collection: ${collectionId} (Limit: ${limit}, Threshold: ${similarityThreshold})`); - try { - const { data: supabaseMatches, error: rpcError } = await supabase.rpc('similarity_search', { - query_vector: queryEmbedding, - target_collection_id: collectionId, - match_threshold: similarityThreshold, - match_count: limit - }); - if (rpcError) throw rpcError; - - return (supabaseMatches || []).map((match: any) => { - const sim = match.similarity !== null && match.similarity !== undefined ? match.similarity.toFixed(3) : 'N/A'; - const title = `${match.name || `[DB] ${match.source_type || 'unknown'}:${(match.id || 'no-id').substring(0, 8)}`} (Sim: ${sim})`; - return { - ref: `supabase:${match.source_type || 'unknown'}:${match.id || 'no-id'}`, - title: title, - body: match.content || '', - source: `supabase_collection:${collectionId}`, - }; - }); - } catch (error) { - console.error(` - Error searching collection ${collectionId}:`, error); - return []; - } - }); - - const resultsFromAllCollections = await Promise.all(searchPromises); - finalResults = resultsFromAllCollections.flat(); - console.log(`[Supabase Path] Total results: ${finalResults.length}`); + console.log(` - Searching collection: ${collectionId} (Limit: ${limit}, Threshold: ${similarityThreshold})`) + try { + const { data: supabaseMatches, error: rpcError } = await supabase.rpc('similarity_search', { + query_vector: queryEmbedding, + target_collection_id: collectionId, + match_threshold: similarityThreshold, + match_count: limit + }) + if (rpcError) throw rpcError + + return (supabaseMatches || []).map((match: any) => { + const sim = match.similarity !== null && match.similarity !== undefined ? match.similarity.toFixed(3) : 'N/A' + const title = `${match.name || `[DB] ${match.source_type || 'unknown'}:${(match.id || 'no-id').substring(0, 8)}`} (Sim: ${sim})` + return { + ref: `supabase:${match.source_type || 'unknown'}:${match.id || 'no-id'}`, + title: title, + body: match.content || '', + source: `supabase_collection:${collectionId}`, + } + }) + } catch (error) { + console.error(` - Error searching collection ${collectionId}:`, error) + return [] + } + }) + + const resultsFromAllCollections = await Promise.all(searchPromises) + finalResults = resultsFromAllCollections.flat() + console.log(`[Supabase Path] Total results: ${finalResults.length}`) } catch (error) { - console.error("[Supabase Path] Error during search process:", error); - finalResults = []; + console.error("[Supabase Path] Error during search process:", error) + finalResults = [] } } else { - console.log("[Supabase Path] No collection IDs provided."); - finalResults = []; + console.log("[Supabase Path] No collection IDs provided.") + finalResults = [] } } else { // --- External Engine Search Path (Weaviate, Exa, etc.) --- - const domain = worker.parameters.domain; - const limit = worker.parameters.maxResults || 5; - const externalSearchDistance = worker.parameters.distance ?? 0.5; // Use distance, default 0.5 + const domain = Array.isArray(worker.parameters.domain) ? worker.parameters.domain : [worker.parameters.domain] + const limit = worker.parameters.maxResults || 5 + const externalSearchDistance = worker.parameters.distance ?? 0.5 // Use distance, default 0.5 if (domain) { - console.log(`[External Path] Searching engine '${engine}' in domain: ${domain} (Limit: ${limit}, Distance: ${externalSearchDistance})`); + console.log(`[External Path] Searching engine '${engine}' in domain: ${domain} (Limit: ${limit}, Distance: ${externalSearchDistance})`) try { - const externalSearchUrl = "https://directus-qa-support.azurewebsites.net/search"; // Assuming this URL handles different engines or we need logic here + const externalSearchUrl = "https://directus-qa-support.azurewebsites.net/search" // Assuming this URL handles different engines or we need logic here const r = await axios.post(externalSearchUrl, { query, - domains: [domain], // Pass domain in array + domains: domain, // Pass domain in array limit, distance: externalSearchDistance, // We might need to pass the 'engine' type to the backend if it handles multiple engines // engine: engine, - }); - finalResults = r.data as VectorDocument[] || []; // Assign directly - console.log(`[External Path] Total results: ${finalResults.length}`); + }) + finalResults = r.data as VectorDocument[] || [] // Assign directly + console.log(`[External Path] Total results: ${finalResults.length}`) } catch (error) { - console.error("[External Path] Error during search:", error); - finalResults = []; // Ensure empty results on error + console.error("[External Path] Error during search:", error) + finalResults = [] // Ensure empty results on error } } else { - console.log("[External Path] No domain provided."); - finalResults = []; + console.log("[External Path] No domain provided.") + finalResults = [] } } // --- Post-Search Processing (Deduplication) --- - console.log("Total results before deduplication:", finalResults.length); - deduped = deduplicateDocuments(finalResults); - console.log("Deduplicated results:", deduped.length); + console.log("Total results before deduplication:", finalResults.length) + deduped = deduplicateDocuments(finalResults) + console.log("Deduplicated results:", deduped.length) if (deduped.length === 0) { - console.log("Search worker: No results found after search and deduplication."); - return; + console.log("Search worker: No results found after search and deduplication.") + return } // --- Set Output --- - worker.fields.output.value = deduped; + worker.fields.output.value = deduped worker.fields.references.value = deduped.map(d => ({ link: d.ref || d.source || "", title: d.title || "Search Result" - })); + })) - console.log("Search worker execution finished."); + console.log("Search worker execution finished.") } @@ -309,6 +306,7 @@ export const search: WorkerRegistryItem = { return agent.initializeWorker( { type: "search", + conditionable: true, parameters: {}, }, [ @@ -317,12 +315,11 @@ export const search: WorkerRegistryItem = { { type: "references", direction: "output", title: "References", name: "references" }, { type: "string", direction: "input", title: "Engine", name: "engine" }, - { type: "string", direction: "input", title: "Domain", name: "domain" }, + { type: "string[]", direction: "input", title: "Domain", name: "domain" }, { type: "number", direction: "input", title: "Distance", name: "distance" }, { type: "number", direction: "input", title: "Max Results", name: "maxResults" }, { type: "string", direction: "input", title: "Collections", name: "collections" }, - { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, ], search ) diff --git a/src/agents/workers/state.ts b/src/agents/workers/state.ts new file mode 100644 index 0000000..df8e0bb --- /dev/null +++ b/src/agents/workers/state.ts @@ -0,0 +1,84 @@ +import { z } from "zod" +import { supabase } from "../db" + +declare global { + interface StateWorker extends AIWorker { + fields: { + input: NodeIO + output: NodeIO + } + } +} + +async function execute(worker: StateWorker, p: AgentParameters) { + + if (worker.fields.input.value != null) { + p.state[worker.id] = worker.fields.input.value + } + + worker.fields.output.value = p.state[worker.id] + + // const inputs = worker.getInputHandlersByName() + // const outputs = worker.getOutputHandlersByName() + // const json = {} + + // if (!p.uid || z.string().uuid().safeParse(p.uid).success === false) { + // for (const key in inputs) { + // const i = inputs[key] + // const o = outputs[key] + // if (i && o) { + // o.value = i.value + // json[i.name] = i.value + // } + // } + // return + // } + + // const dbState = await supabase.from("states").select("*").eq("id", p.uid).single() + + // const finalState = {} + // const state = (dbState.data?.state || {}) as {} + + // for (const key in inputs) { + // const i = inputs[key] + // if (i.value != null) { + // finalState[i.name] = i.value + // } else { + // finalState[i.name] = state[i.name] + // } + // } + + // for (const key in finalState) { + // const o = outputs[key] + // if (o) { + // o.value = finalState[key] + // json[o.name] = finalState[key] + // } + // } + + // await supabase.from("states").upsert({ id: p.uid, state: finalState }) + +} + + +export const state: WorkerRegistryItem = { + title: "Persist", + execute, + category: "tool", + type: "state", + description: "This worker allows you to persist data for later use.", + create(agent: Agent) { + const w = agent.initializeWorker( + { type: "state" }, + [ + { type: "unknown", direction: "input", title: "Input", name: "input", system: true }, + { type: "unknown", direction: "output", title: "Output", name: "output", system: true }, + ], + state + ) as StateWorker + return w + }, + get registry() { return state }, +} + + diff --git a/src/agents/workers/stt.ts b/src/agents/workers/stt.ts new file mode 100644 index 0000000..84ed695 --- /dev/null +++ b/src/agents/workers/stt.ts @@ -0,0 +1,61 @@ +import OpenAI from "openai" + +declare global { + type STTEngine = "whisper-1" + interface STTWorker extends AIWorker { + fields: { + input: NodeIO + output: NodeIO + } + parameters: { + engine?: STTEngine + } + } +} + +async function execute(worker: STTWorker, { apikeys }: AgentParameters) { + const audio = worker.fields.input.value as { audio: string, ext: string } + const engine = worker.parameters.engine || "whisper-1" + const openai = new OpenAI({ apiKey: apikeys.openai, dangerouslyAllowBrowser: true }) + + const binaryString = atob(audio.audio) + const bytes = new Uint8Array(binaryString.length) + for (let i = 0; i < binaryString.length; i++) { + bytes[i] = binaryString.charCodeAt(i) + } + const file = new File([bytes], `audio.${audio.ext}`, { type: `audio/${audio.ext}` }) + + const response = await openai.audio.transcriptions.create({ + file, + model: engine, + }) + + const message = response.text + + worker.fields.output.value = message +} + + +export const stt: WorkerRegistryItem = { + title: "STT", + execute, + category: "tool", + type: "stt", + description: "This worker converts speach to text", + create(agent: Agent) { + const w = agent.initializeWorker( + { + type: "stt", + conditionable: true, + }, + [ + { type: "audio", direction: "input", title: "Input", name: "input" }, + { type: "string", direction: "output", title: "Output", name: "output" }, + ], + stt + ) as STTWorker + w.parameters.engine = "whisper-1" + return w + }, + get registry() { return stt }, +} diff --git a/src/agents/workers/text.ts b/src/agents/workers/text.ts index cdf1caa..0d6385c 100644 --- a/src/agents/workers/text.ts +++ b/src/agents/workers/text.ts @@ -3,35 +3,57 @@ declare global { interface TextWorker extends AIWorker { fields: { output: NodeIO - condition: NodeIO + // condition: NodeIO }, parameters: { text?: string + contentType?: "text" | "number" | "audio" | "image" | "file" + contentUri?: string + numberValue?: number } } } async function execute(worker: TextWorker) { - worker.fields.output.value = worker.parameters.text || "" + const { contentType, text, contentUri, numberValue } = worker.parameters + + switch (contentType) { + case "number": + worker.fields.output.value = numberValue !== undefined ? numberValue : 0 + break + case "audio": + case "image": + case "file": + worker.fields.output.value = contentUri || "" + break + case "text": + default: + worker.fields.output.value = text || "" + break + } } export const text: WorkerRegistryItem = { - title: "Text", + title: "Content", execute, category: "generator", type: "text", - description: "This worker generates static text", + description: "This worker generates static content (text, numbers, audio, images, files)", create(agent: Agent) { - return agent.initializeWorker( - { type: "text" }, + { + type: "text", + conditionable: true, + parameters: { + contentType: "text", + }, + }, [ { type: "string", direction: "output", title: "Output", name: "output" }, - { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, + // { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, ], text ) - }, get registry() { return text }, } diff --git a/src/agents/workers/translate.ts b/src/agents/workers/translate.ts new file mode 100644 index 0000000..4c5a28d --- /dev/null +++ b/src/agents/workers/translate.ts @@ -0,0 +1,72 @@ +import axios from "axios" + +declare global { + + interface TranslateWorker extends AIWorker { + fields: { + input: NodeIO + language: NodeIO + output: NodeIO + } + } + +} + + +async function execute(worker: TranslateWorker, { apikeys }: AgentParameters) { + + const apiKey = apikeys["googleTranslateApiKey"] + const project = apikeys["googleTranslateProjectId"] + const targetLanguageCode = worker.fields.language.value || "" + const content = worker.fields.input.value || "" + + if (!apiKey) { + worker.error = `No Google Translate API key found` + return + } + if (!project) { + worker.error = `No Google Translate Project ID found` + return + } + + const r = await axios.post(`https://translation.googleapis.com/v3/projects/${project}:translateText`, { + targetLanguageCode, + contents: [content] + }, { + headers: { + Authorization: `Bearer ${apiKey}`, + "Content-Type": "application/json", + charset: "utf-8", + }, + }) + + worker.fields.output.value = r.data?.translations[0].translatedText || content + +} + + +export const translate: WorkerRegistryItem = { + title: "Translate", + category: "tool", + type: "translate", + description: "Translate text using Google Translate API", + execute, + create(agent) { + return agent.initializeWorker( + { + type: "translate", + conditionable: true, + }, + [ + { type: "string", direction: "input", title: "Input", name: "input" }, + { type: "string", direction: "output", title: "Output", name: "output" }, + { type: "string", direction: "input", title: "Language", name: "language" }, + { type: "unknown", direction: "input", title: "Condition", name: "condition", condition: true }, + ], + translate, + ) + }, + get registry() { return translate }, + +} + diff --git a/src/agents/workers/tts.ts b/src/agents/workers/tts.ts index b047c17..81ae892 100644 --- a/src/agents/workers/tts.ts +++ b/src/agents/workers/tts.ts @@ -1,26 +1,146 @@ +declare global { + type TTSEngine = "whisper" + interface TTSWorker extends AIWorker { + fields: { + input: NodeIO + output: NodeIO + } + parameters: {} + } +} -// function create(agent: Agent) { +const GOOGLE_TRANSLATE_API_KEY = import.meta.env.VITE_GOOGLE_TRANSLATE_API_KEY +const GOOGLE_KEY = import.meta.env.VITE_GOOGLE_KEY -// return agent.initializeWorker( -// { type: "stt" }, -// [ -// { type: "audio", direction: "input", title: "Input", name: "input" }, -// { type: "string", direction: "output", title: "Output", name: "output" }, -// ], -// stt -// ) +async function detectLanguage(text: string) { + try { + const url = `https://translation.googleapis.com/language/translate/v2/detect?key=${GOOGLE_TRANSLATE_API_KEY}` + const response = await fetch(url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + q: text + }) + }) + const data = await response.json() + const detection = data.data.detections[0][0] + console.log(`Detected language: ${detection.language}`) + return detection.language + } catch (error) { + console.error('Error detecting language: ', error) + return 'en' + } +} -// } +async function getAvailableVoices() { + try { + const url = `https://texttospeech.googleapis.com/v1/voices?key=${GOOGLE_KEY}` + const response = await fetch(url, { + method: 'GET', + }) + const responseJson = await response.json() + return responseJson?.voices + } catch (error) { + console.error('Error fetching voices: ', error) + } +} -// async function execute(worker: AIWorker) { +async function findBestVoiceForLanguage(languageCode: string, voices: any[]): Promise { + const matchingVoices = voices.filter(voice => voice.languageCodes?.includes(languageCode)) + if (matchingVoices.length > 0) { + return matchingVoices[0].languageCodes![0] + } + const generalLanguage = languageCode.split('-')[0] + const fallbackVoices = voices.filter(voice => voice.languageCodes?.some(code => code.startsWith(generalLanguage))) + if (fallbackVoices.length > 0) { + return fallbackVoices[0].languageCodes![0] + } + return 'en-US' +} -// } +async function googletextToSpeech(text: string, format: string): Promise { + try { + const language = await detectLanguage(text) + const voices = await getAvailableVoices() + const bestVoice = findBestVoiceForLanguage(language, voices) + let voiceName = null -// export const stt: WorkerRegistryItem = { -// title: "Speech to Text", -// execute, -// create, -// get registry() { return stt }, -// } + if (language == "en" || language == "en-US") { + voiceName = { + "languageCode": "en-US", + name: "en-US-Journey-O", + ssmlGender: "FEMALE", + } + } else if (language == "es" || language == "es-ES") { + voiceName = { + "languageCode": "es-US", + name: "es-US-Journey-F", + ssmlGender: "FEMALE", + } + } + if (!GOOGLE_KEY || !text) return + + const url = `https://texttospeech.googleapis.com/v1/text:synthesize?key=${GOOGLE_KEY}` + + const data = { + 'input': { + 'text': text + }, + 'voice': voiceName || { + 'languageCode': bestVoice, + }, + 'audioConfig': { + 'audioEncoding': format + } + } + + const response = await fetch(url, { + method: 'POST', + body: JSON.stringify(data) + }) + + const responseJson = await response.json() + return responseJson?.audioContent + + } catch (error) { + throw new Error(error) + } + +} + +async function execute(worker: TTSWorker, p: AgentParameters) { + + const text = worker.fields.input.value + + const textToSpeech = await googletextToSpeech(text, "MP3") + + worker.fields.output.value = { audio: textToSpeech, ext: "mp3" } +} + + +export const tts: WorkerRegistryItem = { + title: "TTS", + execute, + category: "tool", + type: "tts", + description: "This worker converts text to speach", + create(agent: Agent) { + const w = agent.initializeWorker( + { + type: "tts", + conditionable: true, + }, + [ + { type: "string", direction: "input", title: "Input", name: "input" }, + { type: "audio", direction: "output", title: "Output", name: "output" }, + ], + tts + ) as TTSWorker + return w + }, + get registry() { return tts }, +}