Skip to content

Commit a67789a

Browse files
committed
Release 1.1.0
1 parent 76c3788 commit a67789a

40 files changed

+431
-229
lines changed

BUILD

Lines changed: 1 addition & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,5 @@
11
# Main library build recipe
22

3-
# C++ Requests
4-
cc_import(
5-
name = "cpr",
6-
shared_library = select({
7-
"@platforms//os:macos": "lib/macos/libcpr.1.dylib",
8-
"@platforms//os:linux": "lib/linux/libcpr.so.1",
9-
"//conditions:default": None,
10-
}),
11-
)
12-
13-
# Curl
14-
cc_import(
15-
name = "curl",
16-
shared_library = select({
17-
"@platforms//os:macos": "lib/macos/libcurl.4.dylib",
18-
"@platforms//os:linux": "lib/linux/libcurl.so.4.11.0",
19-
"//conditions:default": None,
20-
}),
21-
)
22-
233
# Python for python execution tool
244
cc_import(
255
name = "python",
@@ -46,8 +26,7 @@ cc_import(
4626
"//conditions:default": None,
4727
}),
4828
deps = [
49-
":cpr",
50-
":curl",
29+
"@nlohmann_json//:json",
5130
":python"
5231
],
5332
visibility = ["//visibility:public"]

MODULE.bazel

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,7 @@ module(name = "agents", version = "0.1.0")
88
bazel_dep(name = "platforms", version = "0.0.11")
99
bazel_dep(name = "rules_cc", version = "0.1.4")
1010

11+
# JSON library
12+
bazel_dep(name = "nlohmann_json", version = "3.11.3")
13+
1114
# End of MODULE.bazel

MODULE.bazel.lock

Lines changed: 2 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

README.md

Lines changed: 22 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -9,26 +9,26 @@
99

1010
## 🚀 Features
1111

12-
- ⚙️ **Modular Architecture** — Compose agents from interchangeable components.
13-
- 🧩 **Multi-LLM Support** — Connect to multiple providers seamlessly:
14-
- **OpenAI** (GPT-4o, GPT-4, GPT-3.5 Turbo)
15-
- **Anthropic** (Claude 3 family models (Opus, Sonnet, Haiku)
16-
- **Google** (Gemini family models (Pro, Flash)
17-
- **Ollama/llama-cpp** (local models like Llama, Mistral, etc.)
18-
-**Optimized for Speed and Memory** — Built in C++ with focus on performance.
19-
- 🔁 **Built-In Workflow Patterns**
20-
- Prompt Chaining
21-
- Routing
22-
- Parallelization
23-
- Orchestrator-Workers
24-
- Evaluator-Optimizer
25-
- 🤖 **Autonomous Agents** — Supports modern reasoning strategies:
26-
- ReAct (Reason + Act)
12+
- ⚙️ **Modular Architecture** — Compose agents from interchangeable components.
13+
- 🧩 **Multi-LLM Support** — Connect to multiple providers seamlessly:
14+
- **OpenAI** (GPT-4o, GPT-4, GPT-3.5 Turbo)
15+
- **Anthropic** (Claude 3 family models (Opus, Sonnet, Haiku)
16+
- **Google** (Gemini family models (Pro, Flash)
17+
- **Ollama/llama-cpp** (local models like Llama, Mistral, etc.)
18+
-**Optimized for Speed and Memory** — Built in C++ with focus on performance.
19+
- 🔁 **Built-In Workflow Patterns**
20+
- Prompt Chaining
21+
- Routing
22+
- Parallelization
23+
- Orchestrator-Workers
24+
- Evaluator-Optimizer
25+
- 🤖 **Autonomous Agents** — Supports modern reasoning strategies:
26+
- ReAct (Reason + Act)
2727
- CoT (Chain-of-Thought) [In Development]
28-
- Plan and Execute
29-
- Zero-Shot [In Development]
28+
- Plan and Execute
29+
- Zero-Shot [In Development]
3030
- Reflexion [In Development]
31-
- 🧠 **Extensible Tooling System** — Plug in your own tools or use built-in ones (Web Search, Wikipedia, Python Executor, etc).
31+
- 🧠 **Extensible Tooling System** — Plug in your own tools or use built-in ones (Web Search, Wikipedia, Python Executor, etc).
3232

3333
## ⚙️ Requirements
3434

@@ -37,8 +37,6 @@
3737

3838
- Dependencies (already provided for convenience)
3939
- python3 (3.11+)
40-
- libcpr (C++ Requests)
41-
- libcurl
4240
- nlohmann/json
4341
- spdlog
4442

@@ -105,7 +103,7 @@ The framework will check for API keys in the following order:
105103
Here's a simple example of creating and running an autonomous agent:
106104

107105
```cpp
108-
#include <agents-cpp/agent_context.h>
106+
#include <agents-cpp/context.h>
109107
#include <agents-cpp/agents/autonomous_agent.h>
110108
#include <agents-cpp/llm_interface.h>
111109
#include <agents-cpp/tools/tool_registry.h>
@@ -117,7 +115,7 @@ int main() {
117115
auto llm = createLLM("anthropic", "<your_api_key_here>", "claude-3-5-sonnet-20240620");
118116

119117
// Create agent context
120-
auto context = std::make_shared<AgentContext>();
118+
auto context = std::make_shared<Context>();
121119
context->setLLM(llm);
122120

123121
// Register tools
@@ -238,7 +236,7 @@ bazel run examples:<simple_agent> -- your_api_key_here
238236
- `lib/`: Public library for SDK
239237
- `include/agents-cpp/`: Public headers
240238
- `types.h`: Common type definitions
241-
- `agent_context.h`: Context for agent execution
239+
- `context.h`: Context for agent execution
242240
- `llm_interface.h`: Interface for LLM providers
243241
- `tool.h`: Tool interface
244242
- `memory.h`: Agent memory interface
@@ -283,7 +281,7 @@ You can create custom workflows by extending the `Workflow` base class or combin
283281
```cpp
284282
class CustomWorkflow : public Workflow {
285283
public:
286-
CustomWorkflow(std::shared_ptr<AgentContext> context)
284+
CustomWorkflow(std::shared_ptr<Context> context)
287285
: Workflow(context) {}
288286
289287
JsonObject run(const String& input) override {

examples/actor_agent_example.cpp

Lines changed: 12 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,6 @@
1414
#include <agents-cpp/workflows/parallelization_workflow.h>
1515
#include <agents-cpp/workflows/prompt_chaining_workflow.h>
1616

17-
#include <chrono>
18-
#include <iostream>
19-
2017
using namespace agents;
2118
using namespace agents::workflows;
2219

@@ -122,7 +119,7 @@ int main(int argc, char* argv[]) {
122119

123120
try {
124121
// Create LLM interface
125-
auto llm = createLLM("google", api_key, "gemini-1.5-flash");
122+
auto llm = createLLM("google", api_key, "gemini-2.0-flash");
126123

127124
// Set up options
128125
LLMOptions options;
@@ -150,13 +147,13 @@ int main(int argc, char* argv[]) {
150147
);
151148

152149
// Create agent context
153-
auto context = std::make_shared<AgentContext>();
150+
auto context = std::make_shared<Context>();
154151
context->setLLM(llm);
155152
context->registerTool(calculator);
156153
context->registerTool(weather);
157154

158155
// Example 1: Using the prompt chaining workflow
159-
std::cout << "\n=== Example 1: Prompt Chaining Workflow ===\n\n";
156+
Logger::info("\n=== Example 1: Prompt Chaining Workflow ===\n\n");
160157

161158
auto chaining_workflow = std::make_shared<PromptChainingWorkflow>(context);
162159

@@ -179,10 +176,10 @@ int main(int argc, char* argv[]) {
179176
// Initialize and execute the workflow
180177
auto result = chaining_workflow->run();
181178

182-
std::cout << "Prompt chaining result: " << result.dump(2) << "\n\n";
179+
Logger::info("Prompt chaining result: {}", result.dump(2));
183180

184181
// Example 2: Using the parallelization workflow
185-
std::cout << "\n=== Example 2: Parallelization Workflow (Sectioning) ===\n\n";
182+
Logger::info("\n=== Example 2: Parallelization Workflow (Sectioning) ===\n\n");
186183

187184
auto parallel_workflow = std::make_shared<ParallelizationWorkflow>(
188185
context, ParallelizationWorkflow::Strategy::SECTIONING
@@ -208,10 +205,10 @@ int main(int argc, char* argv[]) {
208205
parallel_workflow->init();
209206
result = parallel_workflow->run();
210207

211-
std::cout << "Parallelization result: " << result.dump(2) << "\n\n";
208+
Logger::info("Parallelization result: {}", result.dump(2));
212209

213210
// Example 3: Using the actor agent
214-
std::cout << "\n=== Example 3: Actor Agent with Tools ===\n\n";
211+
Logger::info("\n=== Example 3: Actor Agent with Tools ===\n\n");
215212

216213
auto agent = std::make_shared<ActorAgent>(context);
217214

@@ -229,7 +226,7 @@ int main(int argc, char* argv[]) {
229226

230227
// Register status callback
231228
agent->setStatusCallback([](const String& status) {
232-
std::cout << "Agent status: " << status << "\n";
229+
Logger::info("Agent status: {}", status);
233230
});
234231

235232
// Initialize and run the agent
@@ -243,17 +240,17 @@ int main(int argc, char* argv[]) {
243240
};
244241

245242
for (const auto& task : tasks) {
246-
std::cout << "\nTask: " << task << "\n";
243+
Logger::info("Task: {}", task);
247244
result = blockingWait(agent->run(task));
248-
std::cout << "Result: " << result.dump(2) << "\n";
245+
Logger::info("Result: {}", result.dump(2));
249246

250247
// Small delay between tasks
251248
std::this_thread::sleep_for(std::chrono::seconds(1));
252249
}
253250

254251
return EXIT_SUCCESS;
255252
} catch (const std::exception& e) {
256-
std::cerr << "Error: " << e.what() << "\n";
253+
Logger::error("Error: {}", e.what());
257254
return EXIT_FAILURE;
258255
}
259-
}
256+
}

examples/autonomous_agent_example.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ int main() {
116116
llm->setOptions(options);
117117

118118
// Create agent context
119-
auto context = std::make_shared<AgentContext>();
119+
auto context = std::make_shared<Context>();
120120
context->setLLM(llm);
121121

122122
// Set system prompt for the context
@@ -146,7 +146,7 @@ int main() {
146146
int max_length = params.contains("max_length") ? params["max_length"].get<int>() : 100;
147147

148148
// Create a specific context for summarization
149-
auto summary_context = std::make_shared<AgentContext>(*context);
149+
auto summary_context = std::make_shared<Context>(*context);
150150
summary_context->setSystemPrompt(
151151
"You are a summarization assistant. Your task is to create concise, accurate summaries "
152152
"that capture the main points of the provided text."
@@ -274,4 +274,4 @@ int main() {
274274
}
275275

276276
return EXIT_SUCCESS;
277-
}
277+
}

examples/coroutine_example.cpp

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ bool humanApproval(const String& message, const JsonObject& context, String& mod
4242
}
4343

4444
// Example coroutine that performs a multi-step task using tools
45-
Task<JsonObject> performResearchTask(std::shared_ptr<AgentContext> context, const String& topic) {
45+
Task<JsonObject> performResearchTask(std::shared_ptr<Context> context, const String& topic) {
4646
Logger::info("Starting research on topic: {}", topic);
4747

4848
// Perform a search to get initial information
@@ -80,7 +80,7 @@ Task<JsonObject> performResearchTask(std::shared_ptr<AgentContext> context, cons
8080
}
8181

8282
// Example coroutine that generates content in parallel
83-
Task<JsonObject> generateContentInParallel(std::shared_ptr<AgentContext> context, const String& topic) {
83+
Task<JsonObject> generateContentInParallel(std::shared_ptr<Context> context, const String& topic) {
8484
Logger::info("Generating content for topic: {}", topic);
8585

8686
// Create a prompt for the introduction
@@ -123,7 +123,7 @@ Task<JsonObject> generateContentInParallel(std::shared_ptr<AgentContext> context
123123
}
124124

125125
// Example showing streaming text with coroutines
126-
Task<void> streamText(std::shared_ptr<AgentContext> context, const String& prompt) {
126+
Task<void> streamText(std::shared_ptr<Context> context, const String& prompt) {
127127
Logger::info("Streaming response for prompt: {}", prompt);
128128

129129
// Get a streaming generator
@@ -136,8 +136,8 @@ Task<void> streamText(std::shared_ptr<AgentContext> context, const String& promp
136136
String chunk = *item;
137137
std::cout << chunk << std::flush;
138138
}
139-
140139
std::cout << std::endl;
140+
141141
Logger::info("Streaming complete!");
142142
co_return;
143143
}
@@ -177,36 +177,36 @@ int main(int argc, char* argv[]) {
177177
llm->setOptions(options);
178178

179179
// Create agent context
180-
auto context = std::make_shared<AgentContext>();
180+
auto context = std::make_shared<Context>();
181181
context->setLLM(llm);
182182

183183
// Register tools
184184
context->registerTool(tools::createWebSearchTool());
185185
context->registerTool(tools::createWikipediaTool());
186-
context->registerTool(tools::createSummarizationTool(context));
186+
context->registerTool(tools::createSummarizationTool(llm));
187187

188188
// Menu-driven example to demonstrate various coroutines
189189
while (true) {
190-
std::cout << "\n========== COROUTINE EXAMPLES ==========\n";
191-
std::cout << "1. Run autonomous agent with coroutines\n";
192-
std::cout << "2. Perform research with parallel tool use\n";
193-
std::cout << "3. Generate content in parallel\n";
194-
std::cout << "4. Stream text example\n";
195-
std::cout << "5. Exit\n";
196-
std::cout << "Enter your choice: ";
190+
Logger::info("\n========== COROUTINE EXAMPLES ==========");
191+
Logger::info("1. Run autonomous agent with coroutines");
192+
Logger::info("2. Perform research with parallel tool use");
193+
Logger::info("3. Generate content in parallel");
194+
Logger::info("4. Stream text example");
195+
Logger::info("5. Exit");
196+
Logger::info("Enter your choice:");
197197

198198
int choice;
199199
std::cin >> choice;
200200
std::cin.ignore(); // Clear the newline
201201

202-
if (choice == 5) {
202+
if (choice >= 5 || choice < 1) {
203203
break;
204204
}
205205

206206
// Get topic from user
207207
std::string topic;
208208
if (choice >= 1 && choice <= 4) {
209-
std::cout << "Enter a topic: ";
209+
Logger::info("Enter a topic: ");
210210
std::getline(std::cin, topic);
211211
}
212212

@@ -273,4 +273,4 @@ int main(int argc, char* argv[]) {
273273
}
274274

275275
return EXIT_SUCCESS;
276-
}
276+
}

0 commit comments

Comments
 (0)