|
62 | 62 | },
|
63 | 63 | {
|
64 | 64 | "cell_type": "code",
|
65 |
| - "execution_count": 4, |
| 65 | + "execution_count": null, |
66 | 66 | "metadata": {},
|
67 |
| - "outputs": [ |
68 |
| - { |
69 |
| - "name": "stdout", |
70 |
| - "output_type": "stream", |
71 |
| - "text": [ |
72 |
| - "Adding dependency \u001b[0m\u001b[1m\u001b[32morg.slf4j:slf4j-jdk14:2.0.9\n", |
73 |
| - "\u001b[0mAdding dependency \u001b[0m\u001b[1m\u001b[32morg.bsc.langgraph4j:langgraph4j-core:1.2-SNAPSHOT\n", |
74 |
| - "\u001b[0mAdding dependency \u001b[0m\u001b[1m\u001b[32morg.bsc.langgraph4j:langgraph4j-langchain4j:1.2-SNAPSHOT\n", |
75 |
| - "\u001b[0mAdding dependency \u001b[0m\u001b[1m\u001b[32mdev.langchain4j:langchain4j:0.36.2\n", |
76 |
| - "\u001b[0mAdding dependency \u001b[0m\u001b[1m\u001b[32mdev.langchain4j:langchain4j-open-ai:0.36.2\n", |
77 |
| - "\u001b[0mSolving dependencies\n", |
78 |
| - "Resolved artifacts count: 26\n", |
79 |
| - "Add to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/slf4j/slf4j-jdk14/2.0.9/slf4j-jdk14-2.0.9.jar\u001b[0m\n", |
80 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/slf4j/slf4j-api/2.0.9/slf4j-api-2.0.9.jar\u001b[0m\n", |
81 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/langgraph4j/langgraph4j-core/1.2-SNAPSHOT/langgraph4j-core-1.2-SNAPSHOT.jar\u001b[0m\n", |
82 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/async/async-generator-jdk8/2.3.0/async-generator-jdk8-2.3.0.jar\u001b[0m\n", |
83 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/bsc/langgraph4j/langgraph4j-langchain4j/1.2-SNAPSHOT/langgraph4j-langchain4j-1.2-SNAPSHOT.jar\u001b[0m\n", |
84 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j/0.36.2/langchain4j-0.36.2.jar\u001b[0m\n", |
85 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-core/0.36.2/langchain4j-core-0.36.2.jar\u001b[0m\n", |
86 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/google/code/gson/gson/2.10.1/gson-2.10.1.jar\u001b[0m\n", |
87 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/apache/opennlp/opennlp-tools/1.9.4/opennlp-tools-1.9.4.jar\u001b[0m\n", |
88 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/langchain4j/langchain4j-open-ai/0.36.2/langchain4j-open-ai-0.36.2.jar\u001b[0m\n", |
89 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/dev/ai4j/openai4j/0.23.0/openai4j-0.23.0.jar\u001b[0m\n", |
90 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/retrofit2/retrofit/2.9.0/retrofit-2.9.0.jar\u001b[0m\n", |
91 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/retrofit2/converter-jackson/2.9.0/converter-jackson-2.9.0.jar\u001b[0m\n", |
92 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-databind/2.17.2/jackson-databind-2.17.2.jar\u001b[0m\n", |
93 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-annotations/2.17.2/jackson-annotations-2.17.2.jar\u001b[0m\n", |
94 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/fasterxml/jackson/core/jackson-core/2.17.2/jackson-core-2.17.2.jar\u001b[0m\n", |
95 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/okhttp3/okhttp/4.12.0/okhttp-4.12.0.jar\u001b[0m\n", |
96 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/okio/okio/3.6.0/okio-3.6.0.jar\u001b[0m\n", |
97 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/okio/okio-jvm/3.6.0/okio-jvm-3.6.0.jar\u001b[0m\n", |
98 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jetbrains/kotlin/kotlin-stdlib-common/1.9.10/kotlin-stdlib-common-1.9.10.jar\u001b[0m\n", |
99 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/squareup/okhttp3/okhttp-sse/4.12.0/okhttp-sse-4.12.0.jar\u001b[0m\n", |
100 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jetbrains/kotlin/kotlin-stdlib-jdk8/1.9.25/kotlin-stdlib-jdk8-1.9.25.jar\u001b[0m\n", |
101 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jetbrains/kotlin/kotlin-stdlib/1.9.25/kotlin-stdlib-1.9.25.jar\u001b[0m\n", |
102 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jetbrains/annotations/13.0/annotations-13.0.jar\u001b[0m\n", |
103 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/org/jetbrains/kotlin/kotlin-stdlib-jdk7/1.9.25/kotlin-stdlib-jdk7-1.9.25.jar\u001b[0m\n", |
104 |
| - "\u001b[0mAdd to classpath: \u001b[0m\u001b[32m/Users/bsorrentino/Library/Jupyter/kernels/rapaio-jupyter-kernel/mima_cache/com/knuddels/jtokkit/1.1.0/jtokkit-1.1.0.jar\u001b[0m\n", |
105 |
| - "\u001b[0m" |
106 |
| - ] |
107 |
| - } |
108 |
| - ], |
| 67 | + "outputs": [], |
109 | 68 | "source": [
|
110 | 69 | "%dependency /add org.slf4j:slf4j-jdk14:2.0.9\n",
|
111 | 70 | "%dependency /add org.bsc.langgraph4j:langgraph4j-core:\\{langgraph4jVersion}\n",
|
|
120 | 79 | "cell_type": "markdown",
|
121 | 80 | "metadata": {},
|
122 | 81 | "source": [
|
123 |
| - "### Initialize Logger" |
| 82 | + "**Initialize Logger**" |
124 | 83 | ]
|
125 | 84 | },
|
126 | 85 | {
|
|
138 | 97 | "var log = org.slf4j.LoggerFactory.getLogger(\"llm-streaming\");\n"
|
139 | 98 | ]
|
140 | 99 | },
|
| 100 | + { |
| 101 | + "cell_type": "markdown", |
| 102 | + "metadata": {}, |
| 103 | + "source": [ |
| 104 | + "## How to use LLMStreamingGenerator" |
| 105 | + ] |
| 106 | + }, |
141 | 107 | {
|
142 | 108 | "cell_type": "code",
|
143 |
| - "execution_count": 6, |
| 109 | + "execution_count": null, |
144 | 110 | "metadata": {},
|
145 |
| - "outputs": [ |
146 |
| - { |
147 |
| - "name": "stderr", |
148 |
| - "output_type": "stream", |
149 |
| - "text": [ |
150 |
| - "StreamingOutput{chunk=} \n", |
151 |
| - "StreamingOutput{chunk=Why} \n", |
152 |
| - "StreamingOutput{chunk= don't} \n", |
153 |
| - "StreamingOutput{chunk= scientists} \n", |
154 |
| - "StreamingOutput{chunk= trust} \n", |
155 |
| - "StreamingOutput{chunk= atoms} \n", |
156 |
| - "StreamingOutput{chunk=?\n", |
157 |
| - "\n", |
158 |
| - "} \n", |
159 |
| - "StreamingOutput{chunk=Because} \n", |
160 |
| - "StreamingOutput{chunk= they} \n", |
161 |
| - "StreamingOutput{chunk= make} \n", |
162 |
| - "StreamingOutput{chunk= up} \n", |
163 |
| - "StreamingOutput{chunk= everything} \n", |
164 |
| - "StreamingOutput{chunk=!} \n", |
165 |
| - "RESULT: {content=AiMessage { text = \"Why don't scientists trust atoms?\n", |
166 |
| - "\n", |
167 |
| - "Because they make up everything!\" toolExecutionRequests = null }} \n" |
168 |
| - ] |
169 |
| - } |
170 |
| - ], |
| 111 | + "outputs": [], |
171 | 112 | "source": [
|
172 | 113 | "import dev.langchain4j.model.StreamingResponseHandler;\n",
|
173 | 114 | "import dev.langchain4j.model.chat.StreamingChatLanguageModel;\n",
|
|
200 | 141 | " \n",
|
201 | 142 | "//Thread.sleep( 1000 );"
|
202 | 143 | ]
|
| 144 | + }, |
| 145 | + { |
| 146 | + "cell_type": "markdown", |
| 147 | + "metadata": {}, |
| 148 | + "source": [ |
| 149 | + "## Use LLMStreamGenerator in Agent" |
| 150 | + ] |
| 151 | + }, |
| 152 | + { |
| 153 | + "cell_type": "markdown", |
| 154 | + "metadata": {}, |
| 155 | + "source": [ |
| 156 | + "### Define State" |
| 157 | + ] |
| 158 | + }, |
| 159 | + { |
| 160 | + "cell_type": "code", |
| 161 | + "execution_count": 7, |
| 162 | + "metadata": {}, |
| 163 | + "outputs": [], |
| 164 | + "source": [ |
| 165 | + "import org.bsc.langgraph4j.state.AgentState;\n", |
| 166 | + "import org.bsc.langgraph4j.state.Channel;\n", |
| 167 | + "import org.bsc.langgraph4j.state.AppenderChannel;\n", |
| 168 | + "import dev.langchain4j.data.message.ChatMessage;\n", |
| 169 | + "import dev.langchain4j.data.message.UserMessage;\n", |
| 170 | + "\n", |
| 171 | + "public class MessageState extends AgentState {\n", |
| 172 | + "\n", |
| 173 | + " static Map<String, Channel<?>> SCHEMA = Map.of(\n", |
| 174 | + " \"messages\", AppenderChannel.<ChatMessage>of(ArrayList::new)\n", |
| 175 | + " );\n", |
| 176 | + "\n", |
| 177 | + " public MessageState(Map<String, Object> initData) {\n", |
| 178 | + " super( initData );\n", |
| 179 | + " }\n", |
| 180 | + "\n", |
| 181 | + " List<ChatMessage> messages() {\n", |
| 182 | + " return this.<List<ChatMessage>>value( \"messages\" )\n", |
| 183 | + " .orElseThrow( () -> new RuntimeException( \"messages not found\" ) );\n", |
| 184 | + " }\n", |
| 185 | + "\n", |
| 186 | + " // utility method to quick access to last message\n", |
| 187 | + " Optional<ChatMessage> lastMessage() {\n", |
| 188 | + " List<ChatMessage> messages = messages();\n", |
| 189 | + " return ( messages.isEmpty() ) ? \n", |
| 190 | + " Optional.empty() :\n", |
| 191 | + " Optional.of(messages.get( messages.size() - 1 ));\n", |
| 192 | + " }\n", |
| 193 | + "}" |
| 194 | + ] |
| 195 | + }, |
| 196 | + { |
| 197 | + "cell_type": "markdown", |
| 198 | + "metadata": {}, |
| 199 | + "source": [ |
| 200 | + "### Define Serializers" |
| 201 | + ] |
| 202 | + }, |
| 203 | + { |
| 204 | + "cell_type": "code", |
| 205 | + "execution_count": null, |
| 206 | + "metadata": {}, |
| 207 | + "outputs": [], |
| 208 | + "source": [ |
| 209 | + "import dev.langchain4j.data.message.AiMessage;\n", |
| 210 | + "import dev.langchain4j.data.message.SystemMessage;\n", |
| 211 | + "import dev.langchain4j.data.message.UserMessage;\n", |
| 212 | + "import dev.langchain4j.data.message.ToolExecutionResultMessage;\n", |
| 213 | + "import dev.langchain4j.agent.tool.ToolExecutionRequest;\n", |
| 214 | + "import org.bsc.langgraph4j.serializer.std.ObjectStreamStateSerializer;\n", |
| 215 | + "import org.bsc.langgraph4j.langchain4j.serializer.std.ChatMesssageSerializer;\n", |
| 216 | + "import org.bsc.langgraph4j.langchain4j.serializer.std.ToolExecutionRequestSerializer;\n", |
| 217 | + "import org.bsc.langgraph4j.state.AgentStateFactory;\n", |
| 218 | + "\n", |
| 219 | + "var stateSerializer = new ObjectStreamStateSerializer<MessageState>( MessageState::new );\n", |
| 220 | + "stateSerializer.mapper()\n", |
| 221 | + " // Setup custom serializer for Langchain4j ToolExecutionRequest\n", |
| 222 | + " .register(ToolExecutionRequest.class, new ToolExecutionRequestSerializer() )\n", |
| 223 | + " // Setup custom serializer for Langchain4j AiMessage\n", |
| 224 | + " .register(ChatMessage.class, new ChatMesssageSerializer() );\n" |
| 225 | + ] |
| 226 | + }, |
| 227 | + { |
| 228 | + "cell_type": "markdown", |
| 229 | + "metadata": {}, |
| 230 | + "source": [ |
| 231 | + "## Set up the tools\n", |
| 232 | + "\n", |
| 233 | + "Using [langchain4j], We will first define the tools we want to use. For this simple example, we will\n", |
| 234 | + "use create a placeholder search engine. However, it is really easy to create\n", |
| 235 | + "your own tools - see documentation\n", |
| 236 | + "[here][tools] on how to do\n", |
| 237 | + "that.\n", |
| 238 | + "\n", |
| 239 | + "[langchain4j]: https://docs.langchain4j.dev\n", |
| 240 | + "[tools]: https://docs.langchain4j.dev/tutorials/tools" |
| 241 | + ] |
| 242 | + }, |
| 243 | + { |
| 244 | + "cell_type": "code", |
| 245 | + "execution_count": 9, |
| 246 | + "metadata": {}, |
| 247 | + "outputs": [], |
| 248 | + "source": [ |
| 249 | + "import dev.langchain4j.agent.tool.P;\n", |
| 250 | + "import dev.langchain4j.agent.tool.Tool;\n", |
| 251 | + "\n", |
| 252 | + "import java.util.Optional;\n", |
| 253 | + "\n", |
| 254 | + "import static java.lang.String.format;\n", |
| 255 | + "\n", |
| 256 | + "public class SearchTool {\n", |
| 257 | + "\n", |
| 258 | + " @Tool(\"Use to surf the web, fetch current information, check the weather, and retrieve other information.\")\n", |
| 259 | + " String execQuery(@P(\"The query to use in your search.\") String query) {\n", |
| 260 | + "\n", |
| 261 | + " // This is a placeholder for the actual implementation\n", |
| 262 | + " return \"Cold, with a low of 13 degrees\";\n", |
| 263 | + " }\n", |
| 264 | + "}" |
| 265 | + ] |
| 266 | + }, |
| 267 | + { |
| 268 | + "cell_type": "code", |
| 269 | + "execution_count": 10, |
| 270 | + "metadata": {}, |
| 271 | + "outputs": [], |
| 272 | + "source": [ |
| 273 | + "import static org.bsc.langgraph4j.StateGraph.START;\n", |
| 274 | + "import static org.bsc.langgraph4j.StateGraph.END;\n", |
| 275 | + "import org.bsc.langgraph4j.StateGraph;\n", |
| 276 | + "import org.bsc.langgraph4j.action.EdgeAction;\n", |
| 277 | + "import static org.bsc.langgraph4j.action.AsyncEdgeAction.edge_async;\n", |
| 278 | + "import org.bsc.langgraph4j.action.NodeAction;\n", |
| 279 | + "import static org.bsc.langgraph4j.action.AsyncNodeAction.node_async;\n", |
| 280 | + "import dev.langchain4j.service.tool.DefaultToolExecutor;\n", |
| 281 | + "import org.bsc.langgraph4j.langchain4j.tool.ToolNode;\n", |
| 282 | + "import dev.langchain4j.agent.tool.ToolSpecification;\n", |
| 283 | + "import dev.langchain4j.model.openai.OpenAiStreamingChatModel;\n", |
| 284 | + "\n", |
| 285 | + "\n", |
| 286 | + "// setup streaming model\n", |
| 287 | + "var model = OpenAiStreamingChatModel.builder()\n", |
| 288 | + " .apiKey( System.getenv(\"OPENAI_API_KEY\") )\n", |
| 289 | + " .modelName( \"gpt-4o-mini\" )\n", |
| 290 | + " .logResponses(true)\n", |
| 291 | + " .temperature(0.0)\n", |
| 292 | + " .maxTokens(2000)\n", |
| 293 | + " .build();\n", |
| 294 | + "\n", |
| 295 | + "// setup tools \n", |
| 296 | + "var tools = ToolNode.builder()\n", |
| 297 | + " .specification( new SearchTool() ) \n", |
| 298 | + " .build(); \n", |
| 299 | + "\n", |
| 300 | + "NodeAction<MessageState> callModel = state -> {\n", |
| 301 | + " log.info(\"CallModel:\\n{}\", state.messages());\n", |
| 302 | + "\n", |
| 303 | + " var generator = LLMStreamingGenerator.<AiMessage, MessageState>builder()\n", |
| 304 | + " .mapResult(response -> {\n", |
| 305 | + " log.info(\"MapResult: {}\", response);\n", |
| 306 | + " return Map.of(\"messages\", response.content());\n", |
| 307 | + " })\n", |
| 308 | + " .startingNode(\"agent\")\n", |
| 309 | + " .startingState(state)\n", |
| 310 | + " .build();\n", |
| 311 | + "\n", |
| 312 | + " model.generate(\n", |
| 313 | + " state.messages(),\n", |
| 314 | + " tools.toolSpecifications(),\n", |
| 315 | + " generator.handler());\n", |
| 316 | + "\n", |
| 317 | + " return Map.of(\"messages\", generator);\n", |
| 318 | + "};\n", |
| 319 | + " \n", |
| 320 | + "// Route Message\n", |
| 321 | + "EdgeAction<MessageState> routeMessage = state -> {\n", |
| 322 | + " log.info(\"routeMessage:\\n{}\", state.messages());\n", |
| 323 | + "\n", |
| 324 | + " var lastMessage = state.lastMessage()\n", |
| 325 | + " .orElseThrow(() -> (new IllegalStateException(\"last message not found!\")));\n", |
| 326 | + "\n", |
| 327 | + " if (lastMessage instanceof AiMessage message) {\n", |
| 328 | + " // If tools should be called\n", |
| 329 | + " if (message.hasToolExecutionRequests()) return \"next\";\n", |
| 330 | + " }\n", |
| 331 | + "\n", |
| 332 | + " // If no tools are called, we can finish (respond to the user)\n", |
| 333 | + " return \"exit\";\n", |
| 334 | + "};\n", |
| 335 | + " \n", |
| 336 | + "// Invoke Tool\n", |
| 337 | + "NodeAction<MessageState> invokeTool = state -> {\n", |
| 338 | + " log.info(\"invokeTool:\\n{}\", state.messages());\n", |
| 339 | + "\n", |
| 340 | + " var lastMessage = state.lastMessage()\n", |
| 341 | + " .orElseThrow(() -> (new IllegalStateException(\"last message not found!\")));\n", |
| 342 | + "\n", |
| 343 | + "\n", |
| 344 | + " if (lastMessage instanceof AiMessage lastAiMessage) {\n", |
| 345 | + "\n", |
| 346 | + " var result = tools.execute(lastAiMessage.toolExecutionRequests(), null)\n", |
| 347 | + " .orElseThrow(() -> (new IllegalStateException(\"no tool found!\")));\n", |
| 348 | + "\n", |
| 349 | + " return Map.of(\"messages\", result);\n", |
| 350 | + "\n", |
| 351 | + " }\n", |
| 352 | + "\n", |
| 353 | + " throw new IllegalStateException(\"invalid last message\");\n", |
| 354 | + "};\n", |
| 355 | + " \n", |
| 356 | + "// Define Graph\n", |
| 357 | + "var workflow = new StateGraph<MessageState>(MessageState.SCHEMA, stateSerializer)\n", |
| 358 | + " .addNode(\"agent\", node_async(callModel))\n", |
| 359 | + " .addNode(\"tools\", node_async(invokeTool))\n", |
| 360 | + " .addEdge(START, \"agent\")\n", |
| 361 | + " .addConditionalEdges(\"agent\",\n", |
| 362 | + " edge_async(routeMessage),\n", |
| 363 | + " Map.of(\"next\", \"tools\", \"exit\", END))\n", |
| 364 | + " .addEdge(\"tools\", \"agent\");\n", |
| 365 | + " \n" |
| 366 | + ] |
| 367 | + }, |
| 368 | + { |
| 369 | + "cell_type": "code", |
| 370 | + "execution_count": null, |
| 371 | + "metadata": {}, |
| 372 | + "outputs": [], |
| 373 | + "source": [ |
| 374 | + "var app = workflow.compile();\n", |
| 375 | + "\n", |
| 376 | + "for( var out : app.stream( Map.of( \"messages\", UserMessage.from( \"what is the whether today?\")) ) ) {\n", |
| 377 | + " log.info( \"{}\", out );\n", |
| 378 | + "}\n" |
| 379 | + ] |
203 | 380 | }
|
204 | 381 | ],
|
205 | 382 | "metadata": {
|
|
215 | 392 | "name": "java",
|
216 | 393 | "nbconvert_exporter": "script",
|
217 | 394 | "pygments_lexer": "java",
|
218 |
| - "version": "22.0.2+9-70" |
| 395 | + "version": "17.0.2+8-86" |
219 | 396 | }
|
220 | 397 | },
|
221 | 398 | "nbformat": 4,
|
|
0 commit comments