diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000..abe2a6f
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,5 @@
+{
+ "semi": true,
+ "singleQuote": true
+ }
+
\ No newline at end of file
diff --git a/apps/web/app/components/nodes/BaseNode.tsx b/apps/web/app/components/nodes/BaseNode.tsx
index a632549..ad90be6 100644
--- a/apps/web/app/components/nodes/BaseNode.tsx
+++ b/apps/web/app/components/nodes/BaseNode.tsx
@@ -40,15 +40,15 @@ export default function BaseNode({ id, type, data }: BaseNodeProps) {
onClick={onConfigure}
className="
group
- w-[240px]
+ w-[140px]
px-4 py-6
- bg-gray-800/40
+ bg-white
border-2 border-dashed border-gray-600
rounded-lg
cursor-pointer
transition-all duration-200
hover:border-blue-500
- hover:bg-gray-800/60
+ hover:bg-white
hover:shadow-lg hover:shadow-blue-500/20
flex flex-col items-center gap-3
"
@@ -56,13 +56,9 @@ export default function BaseNode({ id, type, data }: BaseNodeProps) {
{/* Icon */}
{/* Label */}
-
+
{label}
-
Click to configure
+ {/*
Click to configure
*/}
{/* Handles */}
diff --git a/apps/web/app/workflows/[id]/components/nodes/PlaceholderNode.tsx b/apps/web/app/workflows/[id]/components/nodes/PlaceholderNode.tsx
index b564710..28938d9 100644
--- a/apps/web/app/workflows/[id]/components/nodes/PlaceholderNode.tsx
+++ b/apps/web/app/workflows/[id]/components/nodes/PlaceholderNode.tsx
@@ -20,7 +20,7 @@ export function PlaceholderNode({ data }: PlaceholderNodeProps) {
➕
-
Add Action
+
Add what the hell is Action
);
diff --git a/apps/web/app/workflows/[id]/page.tsx b/apps/web/app/workflows/[id]/page.tsx
index 166e597..ead8bb9 100644
--- a/apps/web/app/workflows/[id]/page.tsx
+++ b/apps/web/app/workflows/[id]/page.tsx
@@ -551,7 +551,7 @@ export default function WorkflowCanvas() {
nodeTypes={nodeTypes}
fitView
>
-
+
diff --git a/packages/LLM/src/Agent/executor.d.ts b/packages/LLM/src/Agent/executor.d.ts
new file mode 100644
index 0000000..e96ba55
--- /dev/null
+++ b/packages/LLM/src/Agent/executor.d.ts
@@ -0,0 +1,7 @@
+import { ExecuteParams, ExecuteResult } from "./types.js";
+export declare class AgentExecution {
+ private llmClinet;
+ constructor();
+ Execute(paramas: ExecuteParams): Promise;
+}
+//# sourceMappingURL=executor.d.ts.map
\ No newline at end of file
diff --git a/packages/LLM/src/Agent/executor.d.ts.map b/packages/LLM/src/Agent/executor.d.ts.map
new file mode 100644
index 0000000..1462514
--- /dev/null
+++ b/packages/LLM/src/Agent/executor.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"executor.d.ts","sourceRoot":"","sources":["executor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAE,aAAa,EAAiB,MAAM,YAAY,CAAC;AAEzE,qBAAa,cAAc;IACvB,OAAO,CAAC,SAAS,CAAY;;IAKvB,OAAO,CAAC,OAAO,EAAG,aAAa,GAAI,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC;CAqD9E"}
\ No newline at end of file
diff --git a/packages/LLM/src/Agent/executor.js b/packages/LLM/src/Agent/executor.js
new file mode 100644
index 0000000..ceb0a15
--- /dev/null
+++ b/packages/LLM/src/Agent/executor.js
@@ -0,0 +1,51 @@
+import LLMClient from "../LlmClient.js";
+export class AgentExecution {
+ constructor() {
+ this.llmClinet = new LLMClient();
+ }
+ async Execute(paramas) {
+ if (!paramas)
+ return;
+ const prompt = paramas.task;
+ try {
+ const context = {
+ task: prompt,
+ model: paramas.model,
+ maxIterations: paramas.maxIterations,
+ systemPrompt: paramas.systemPrompt,
+ messages: [],
+ iterationCount: 0,
+ totalTokens: 0
+ };
+ context.messages.push({
+ role: "system",
+ content: context.systemPrompt
+ }, {
+ role: "user",
+ content: prompt
+ });
+ while (context.iterationCount < context.maxIterations) {
+ try {
+ const input = context.messages;
+ const result = await this.llmClinet.call(input);
+ // if(!stopReason.){
+ // return {
+ // success: true,
+ // result: result.text,
+ // iterations: context.iterationCount + 1,
+ // tokensUsed: result.inputTokens + result.outputTokens,
+ // stopReason: "completed"
+ // }
+ // }
+ }
+ catch (e) {
+ console.warn("Internal Server Error");
+ }
+ }
+ }
+ catch (e) {
+ console.warn("Internal Server Error");
+ }
+ return;
+ }
+}
diff --git a/packages/LLM/src/Agent/types.d.ts b/packages/LLM/src/Agent/types.d.ts
new file mode 100644
index 0000000..90b350a
--- /dev/null
+++ b/packages/LLM/src/Agent/types.d.ts
@@ -0,0 +1,55 @@
+export interface SystemMessage {
+ role: "system";
+ content: string;
+}
+export interface UserMessage {
+ role: "user";
+ content: string;
+}
+export interface ToolCall {
+ id: string;
+ type: "function";
+ function: {
+ name: string;
+ arguments: string;
+ };
+}
+export interface AssistantMessage {
+ role: "assistant";
+ content: string | null;
+ tool_calls?: ToolCall[];
+}
+export interface ToolMessage {
+ role: "tool";
+ tool_call_id: string;
+ content: string;
+}
+export type Message = SystemMessage | UserMessage | AssistantMessage | ToolMessage;
+export interface ExecuteParams {
+ task: string;
+ toolNames: string[];
+ model: string;
+ systemPrompt?: string;
+ maxIterations?: number;
+}
+export interface ExecuteResult {
+ success: boolean;
+ result: string;
+ iterations: number;
+ tokensUsed: number;
+ stopReason: "completed" | "max_iterations" | "error";
+}
+export interface AgentContext {
+ task: string;
+ model: string;
+ maxIterations: number;
+ systemPrompt: string;
+ messages: Message[];
+ iterationCount: number;
+ totalTokens: number;
+}
+export interface StopCheck {
+ shouldStop: boolean;
+ reason: "completed" | "max_iterations" | "error" | "continue";
+}
+//# sourceMappingURL=types.d.ts.map
\ No newline at end of file
diff --git a/packages/LLM/src/Agent/types.d.ts.map b/packages/LLM/src/Agent/types.d.ts.map
new file mode 100644
index 0000000..a6190d9
--- /dev/null
+++ b/packages/LLM/src/Agent/types.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["types.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,aAAa;IAC1B,IAAI,EAAE,QAAQ,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;CACH;AAED,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,MAAM,OAAO,GAAG,aAAa,GAAG,WAAW,GAAG,gBAAgB,GAAG,WAAW,CAAC;AAMnF,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,KAAK,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,WAAW,GAAG,gBAAgB,GAAG,OAAO,CAAC;CACtD;AAED,MAAM,WAAW,YAAY;IAE3B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IAGrB,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,OAAO,CAAC;IACpB,MAAM,EAAE,WAAW,GAAG,gBAAgB,GAAG,OAAO,GAAG,UAAU,CAAC;CAC/D"}
\ No newline at end of file
diff --git a/packages/LLM/src/Agent/types.js b/packages/LLM/src/Agent/types.js
new file mode 100644
index 0000000..2a8978d
--- /dev/null
+++ b/packages/LLM/src/Agent/types.js
@@ -0,0 +1,2 @@
+// apps/worker/src/agent/types.ts
+export {};
diff --git a/packages/LLM/src/LlmClient.d.ts b/packages/LLM/src/LlmClient.d.ts
new file mode 100644
index 0000000..20e8d44
--- /dev/null
+++ b/packages/LLM/src/LlmClient.d.ts
@@ -0,0 +1,13 @@
+declare class LLMClient {
+ call(prompt: any[], options?: {
+ temperature: number;
+ maxOutputTokens: number;
+ }): Promise<{
+ text: string;
+ inputTokens: number;
+ outputTokens: number;
+ totalCount: number;
+ }>;
+}
+export default LLMClient;
+//# sourceMappingURL=LlmClient.d.ts.map
\ No newline at end of file
diff --git a/packages/LLM/src/LlmClient.d.ts.map b/packages/LLM/src/LlmClient.d.ts.map
new file mode 100644
index 0000000..bbe0567
--- /dev/null
+++ b/packages/LLM/src/LlmClient.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"LlmClient.d.ts","sourceRoot":"","sources":["LlmClient.ts"],"names":[],"mappings":"AAIA,cAAM,SAAS;IACP,IAAI,CACR,MAAM,EAAE,GAAG,EAAE,EACb,OAAO,CAAC,EAAE;QACR,WAAW,EAAE,MAAM,CAAC;QACpB,eAAe,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC;QACT,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,EAAE,MAAM,CAAC;QACpB,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,MAAM,CAAC;KACpB,CAAC;CA2EH;AAED,eAAe,SAAS,CAAC"}
\ No newline at end of file
diff --git a/packages/LLM/src/LlmClient.js b/packages/LLM/src/LlmClient.js
new file mode 100644
index 0000000..8190dee
--- /dev/null
+++ b/packages/LLM/src/LlmClient.js
@@ -0,0 +1,64 @@
+import axios from "axios";
+import dotenv from "dotenv";
+dotenv.config();
+class LLMClient {
+ async call(prompt, options) {
+ const GEMINI_URL = process.env.GEMINI_URL ||
+ "https://generativelanguage.googleapis.com/v1beta/models/gemini-3-flash-preview:generateContent";
+ const GEMINI_API_KEY = process.env.GEMINI_API_KEY;
+ if (!GEMINI_API_KEY)
+ throw new Error("GEMINI API KEY not specified (why env is not working?)");
+ if (!GEMINI_URL)
+ throw new Error("GEMINI URL not specified (why env is not working?)");
+ console.log("making the gemini call");
+ const payload = {
+ contents: [
+ {
+ parts: [
+ {
+ text: prompt,
+ },
+ ],
+ },
+ ],
+ generationConfig: {
+ // stopSequencies: [
+ // "Title"
+ // ],
+ temperature: options.temperature,
+ maxOutputTokens: options.maxOutputTokens
+ }
+ };
+ try {
+ const response = await axios.post(`${GEMINI_URL}?key=${GEMINI_API_KEY}`, payload, {
+ headers: {
+ "Content-Type": "application/json",
+ },
+ });
+ console.log("LLM Response:", response.data);
+ const actuaResponse = response.data.candidates[0].content.parts[0];
+ // console.log("THe outpusssst is ", actuaResponse.text);
+ const inputToknes = response.data.usageMetadata.promptTokenCount;
+ const outputTOkens = response.data.usageMetadata.candidatesTokenCount;
+ const totalTokenCount = response.data.usageMetadata.totalTokenCount;
+ return {
+ text: actuaResponse.text,
+ inputTokens: inputToknes,
+ outputTokens: outputTOkens,
+ totalCount: totalTokenCount
+ };
+ }
+ catch (e) {
+ if (e?.response) {
+ console.error(`LLM call failed with status ${e.response.status}:`);
+ console.log(e.response.data.error.message);
+ }
+ else {
+ console.error("Error in calling the LLM:", e);
+ }
+ throw new Error("Failed to fetch response from Gemini API: " +
+ (e?.message || "Unknown error"));
+ }
+ }
+}
+export default LLMClient;