Skip to content

Commit d4b14a7

Browse files
committed
Add a2a support
Signed-off-by: David Gageot <[email protected]>
1 parent d9b1f7e commit d4b14a7

19 files changed

+1016
-143
lines changed

cmd/root/a2a.go

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
package root
2+
3+
import (
4+
"fmt"
5+
6+
"github.com/spf13/cobra"
7+
8+
"github.com/docker/cagent/pkg/a2a"
9+
"github.com/docker/cagent/pkg/cli"
10+
"github.com/docker/cagent/pkg/config"
11+
"github.com/docker/cagent/pkg/server"
12+
"github.com/docker/cagent/pkg/telemetry"
13+
)
14+
15+
type a2aFlags struct {
16+
agentName string
17+
workingDir string
18+
port int
19+
runConfig config.RuntimeConfig
20+
}
21+
22+
func newA2ACmd() *cobra.Command {
23+
var flags a2aFlags
24+
25+
cmd := &cobra.Command{
26+
Use: "a2a <agent-file>",
27+
Short: "Expose an agent over A2A (Agent-to-Agent)",
28+
Example: ` cagent a2a ./agent.yaml
29+
cagent a2a ./team.yaml --port 8080
30+
cagent a2a agentcatalog/pirate --port 9000`,
31+
Args: cobra.ExactArgs(1),
32+
RunE: flags.runA2ACommand,
33+
}
34+
35+
cmd.PersistentFlags().StringVarP(&flags.agentName, "agent", "a", "root", "Name of the agent to run")
36+
cmd.PersistentFlags().StringVar(&flags.workingDir, "working-dir", "", "Set the working directory for the session (applies to tools and relative paths)")
37+
cmd.PersistentFlags().IntVar(&flags.port, "port", 0, "Port to listen on (default: random available port)")
38+
addRuntimeConfigFlags(cmd, &flags.runConfig)
39+
40+
return cmd
41+
}
42+
43+
func (f *a2aFlags) runA2ACommand(cmd *cobra.Command, args []string) error {
44+
telemetry.TrackCommand("a2a", args)
45+
46+
ctx := cmd.Context()
47+
out := cli.NewPrinter(cmd.OutOrStdout())
48+
agentFilename := args[0]
49+
50+
if err := setupWorkingDirectory(f.workingDir); err != nil {
51+
return err
52+
}
53+
54+
// Listen as early as possible
55+
ln, err := server.Listen(ctx, fmt.Sprintf(":%d", f.port))
56+
if err != nil {
57+
return fmt.Errorf("failed to bind to port %d: %w", f.port, err)
58+
}
59+
go func() {
60+
<-ctx.Done()
61+
_ = ln.Close()
62+
}()
63+
64+
return a2a.Start(ctx, out, agentFilename, f.agentName, f.runConfig, ln)
65+
}

cmd/root/root.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ func NewRootCmd() *cobra.Command {
8585
cmd.AddCommand(newAPICmd())
8686
cmd.AddCommand(newACPCmd())
8787
cmd.AddCommand(newMCPCmd())
88+
cmd.AddCommand(newA2ACmd())
8889
cmd.AddCommand(newEvalCmd())
8990
cmd.AddCommand(newPushCmd())
9091
cmd.AddCommand(newPullCmd())

e2e/cagent_a2a_test.go

Lines changed: 248 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,248 @@
1+
package e2e_test
2+
3+
import (
4+
"bytes"
5+
"encoding/json"
6+
"fmt"
7+
"io"
8+
"net"
9+
"net/http"
10+
"testing"
11+
12+
"github.com/a2aproject/a2a-go/a2a"
13+
"github.com/a2aproject/a2a-go/a2asrv"
14+
"github.com/stretchr/testify/assert"
15+
"github.com/stretchr/testify/require"
16+
17+
a2aserver "github.com/docker/cagent/pkg/a2a"
18+
"github.com/docker/cagent/pkg/cli"
19+
"github.com/docker/cagent/pkg/config"
20+
)
21+
22+
type Response struct {
23+
Jsonrpc string `json:"jsonrpc"`
24+
ID string `json:"id"`
25+
Result *Result `json:"result,omitempty"`
26+
Error any `json:"error,omitempty"`
27+
}
28+
29+
type Result struct {
30+
Artifacts []Artifact `json:"artifacts"`
31+
}
32+
33+
type Artifact struct {
34+
Parts []Part `json:"parts"`
35+
}
36+
37+
type Part struct {
38+
Kind string `json:"kind"`
39+
Text string `json:"text"`
40+
}
41+
42+
func TestA2AServer_AgentCard(t *testing.T) {
43+
t.Parallel()
44+
45+
_, runtimeConfig := startRecordingAIProxy(t)
46+
agentCard := startA2AServer(t, "testdata/basic.yaml", runtimeConfig)
47+
48+
assert.Equal(t, "root", agentCard.Name)
49+
assert.NotEmpty(t, agentCard.Description)
50+
assert.Equal(t, a2a.TransportProtocolJSONRPC, agentCard.PreferredTransport)
51+
assert.Contains(t, agentCard.URL, "/invoke")
52+
assert.True(t, agentCard.Capabilities.Streaming)
53+
assert.NotEmpty(t, agentCard.Version)
54+
}
55+
56+
func TestA2AServer_Invoke(t *testing.T) {
57+
t.Parallel()
58+
59+
_, runtimeConfig := startRecordingAIProxy(t)
60+
agentCard := startA2AServer(t, "testdata/basic.yaml", runtimeConfig)
61+
62+
requestID := "test-request-1"
63+
jsonRPCRequest := map[string]any{
64+
"jsonrpc": "2.0",
65+
"id": requestID,
66+
"method": "message/send",
67+
"params": map[string]any{
68+
"message": map[string]any{
69+
"role": "user",
70+
"parts": []map[string]any{
71+
{
72+
"kind": "text",
73+
"text": "What is 2+2? Answer with just the number.",
74+
},
75+
},
76+
},
77+
},
78+
}
79+
80+
requestBody, err := json.Marshal(jsonRPCRequest)
81+
require.NoError(t, err)
82+
83+
req, err := http.NewRequestWithContext(t.Context(), http.MethodPost, agentCard.URL, bytes.NewReader(requestBody))
84+
require.NoError(t, err)
85+
req.Header.Set("Content-Type", "application/json")
86+
87+
resp, err := http.DefaultClient.Do(req)
88+
require.NoError(t, err)
89+
defer resp.Body.Close()
90+
91+
assert.Equal(t, http.StatusOK, resp.StatusCode)
92+
93+
responseBody, err := io.ReadAll(resp.Body)
94+
require.NoError(t, err)
95+
96+
var jsonRPCResponse Response
97+
err = json.Unmarshal(responseBody, &jsonRPCResponse)
98+
require.NoError(t, err)
99+
100+
assert.Equal(t, "2.0", jsonRPCResponse.Jsonrpc)
101+
assert.Equal(t, requestID, jsonRPCResponse.ID)
102+
assert.Nil(t, jsonRPCResponse.Error)
103+
require.NotNil(t, jsonRPCResponse.Result)
104+
assert.Len(t, jsonRPCResponse.Result.Artifacts, 1)
105+
assert.Len(t, jsonRPCResponse.Result.Artifacts[0].Parts, 2)
106+
assert.Equal(t, "text", jsonRPCResponse.Result.Artifacts[0].Parts[0].Kind)
107+
assert.Equal(t, "4", jsonRPCResponse.Result.Artifacts[0].Parts[0].Text)
108+
assert.Equal(t, "text", jsonRPCResponse.Result.Artifacts[0].Parts[1].Kind)
109+
assert.Equal(t, "4", jsonRPCResponse.Result.Artifacts[0].Parts[1].Text)
110+
}
111+
112+
func TestA2AServer_MultipleRequests(t *testing.T) {
113+
t.Parallel()
114+
115+
_, runtimeConfig := startRecordingAIProxy(t)
116+
agentCard := startA2AServer(t, "testdata/basic.yaml", runtimeConfig)
117+
118+
messages := []string{
119+
"Say 'hello' in one word.",
120+
"Say 'goodbye' in one word.",
121+
}
122+
123+
for i, message := range messages {
124+
t.Run(fmt.Sprintf("request_%d", i), func(t *testing.T) {
125+
requestID := fmt.Sprintf("test-request-%d", i)
126+
jsonRPCRequest := map[string]any{
127+
"jsonrpc": "2.0",
128+
"id": requestID,
129+
"method": "message/send",
130+
"params": map[string]any{
131+
"message": map[string]any{
132+
"role": "user",
133+
"parts": []map[string]any{
134+
{
135+
"kind": "text",
136+
"text": message,
137+
},
138+
},
139+
},
140+
},
141+
}
142+
143+
requestBody, err := json.Marshal(jsonRPCRequest)
144+
require.NoError(t, err)
145+
146+
req, err := http.NewRequestWithContext(t.Context(), http.MethodPost, agentCard.URL, bytes.NewReader(requestBody))
147+
require.NoError(t, err)
148+
req.Header.Set("Content-Type", "application/json")
149+
150+
resp, err := http.DefaultClient.Do(req)
151+
require.NoError(t, err)
152+
defer resp.Body.Close()
153+
154+
assert.Equal(t, http.StatusOK, resp.StatusCode)
155+
156+
responseBody, err := io.ReadAll(resp.Body)
157+
require.NoError(t, err)
158+
159+
var jsonRPCResponse Response
160+
err = json.Unmarshal(responseBody, &jsonRPCResponse)
161+
require.NoError(t, err)
162+
163+
assert.Equal(t, requestID, jsonRPCResponse.ID)
164+
assert.Nil(t, jsonRPCResponse.Error)
165+
assert.NotNil(t, jsonRPCResponse.Result)
166+
})
167+
}
168+
}
169+
170+
func TestA2AServer_MultiAgent(t *testing.T) {
171+
t.Parallel()
172+
173+
_, runtimeConfig := startRecordingAIProxy(t)
174+
agentCard := startA2AServer(t, "testdata/multi.yaml", runtimeConfig)
175+
176+
requestID := "test-multi-1"
177+
jsonRPCRequest := map[string]any{
178+
"jsonrpc": "2.0",
179+
"id": requestID,
180+
"method": "message/send",
181+
"params": map[string]any{
182+
"message": map[string]any{
183+
"role": "user",
184+
"parts": []map[string]any{
185+
{
186+
"kind": "text",
187+
"text": "Say hello.",
188+
},
189+
},
190+
},
191+
},
192+
}
193+
194+
requestBody, err := json.Marshal(jsonRPCRequest)
195+
require.NoError(t, err)
196+
197+
req, err := http.NewRequestWithContext(t.Context(), http.MethodPost, agentCard.URL, bytes.NewReader(requestBody))
198+
require.NoError(t, err)
199+
req.Header.Set("Content-Type", "application/json")
200+
201+
resp, err := http.DefaultClient.Do(req)
202+
require.NoError(t, err)
203+
defer resp.Body.Close()
204+
205+
assert.Equal(t, http.StatusOK, resp.StatusCode)
206+
207+
responseBody, err := io.ReadAll(resp.Body)
208+
require.NoError(t, err)
209+
210+
var jsonRPCResponse Response
211+
err = json.Unmarshal(responseBody, &jsonRPCResponse)
212+
require.NoError(t, err)
213+
214+
assert.Equal(t, requestID, jsonRPCResponse.ID)
215+
assert.Nil(t, jsonRPCResponse.Error)
216+
assert.NotNil(t, jsonRPCResponse.Result)
217+
218+
t.Logf("Multi-agent response: %s", string(responseBody))
219+
}
220+
221+
func startA2AServer(t *testing.T, agentFile string, runConfig config.RuntimeConfig) a2a.AgentCard {
222+
t.Helper()
223+
224+
var lc net.ListenConfig
225+
ln, err := lc.Listen(t.Context(), "tcp", ":0")
226+
227+
require.NoError(t, err)
228+
229+
go func() {
230+
out := cli.NewPrinter(io.Discard)
231+
_ = a2aserver.Start(t.Context(), out, agentFile, "root", runConfig, ln)
232+
}()
233+
234+
port := ln.Addr().(*net.TCPAddr).Port
235+
serverURL := fmt.Sprintf("http://localhost:%d", port)
236+
237+
resp, err := http.Get(serverURL + a2asrv.WellKnownAgentCardPath)
238+
require.NoError(t, err)
239+
defer resp.Body.Close()
240+
241+
require.Equal(t, http.StatusOK, resp.StatusCode)
242+
243+
var agentCard a2a.AgentCard
244+
err = json.NewDecoder(resp.Body).Decode(&agentCard)
245+
require.NoError(t, err)
246+
247+
return agentCard
248+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
---
2+
version: 2
3+
interactions: []
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
---
2+
version: 2
3+
interactions:
4+
- id: 0
5+
request:
6+
proto: HTTP/1.1
7+
proto_major: 1
8+
proto_minor: 1
9+
content_length: 0
10+
host: api.openai.com
11+
body: "{\"messages\":[{\"content\":\"You are a knowledgeable assistant that helps users with various tasks.\\nBe helpful, accurate, and concise in your responses.\\n\",\"role\":\"system\"},{\"content\":\"What is 2+2? Answer with just the number.\",\"role\":\"user\"}],\"model\":\"gpt-3.5-turbo\",\"stream_options\":{\"include_usage\":true},\"stream\":true}"
12+
url: https://api.openai.com/v1/chat/completions
13+
method: POST
14+
response:
15+
proto: HTTP/2.0
16+
proto_major: 2
17+
proto_minor: 0
18+
content_length: -1
19+
body: "data: {\"id\":\"chatcmpl-Cb1VVndPQ10wMqZ2h3wHvfsfsveiL\",\"object\":\"chat.completion.chunk\",\"created\":1762939965,\"model\":\"gpt-3.5-turbo-0125\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"logprobs\":null,\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"OXsApkRT\"}\n\ndata: {\"id\":\"chatcmpl-Cb1VVndPQ10wMqZ2h3wHvfsfsveiL\",\"object\":\"chat.completion.chunk\",\"created\":1762939965,\"model\":\"gpt-3.5-turbo-0125\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\"4\"},\"logprobs\":null,\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"zcIC8hejx\"}\n\ndata: {\"id\":\"chatcmpl-Cb1VVndPQ10wMqZ2h3wHvfsfsveiL\",\"object\":\"chat.completion.chunk\",\"created\":1762939965,\"model\":\"gpt-3.5-turbo-0125\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{},\"logprobs\":null,\"finish_reason\":\"stop\"}],\"usage\":null,\"obfuscation\":\"qwrN\"}\n\ndata: {\"id\":\"chatcmpl-Cb1VVndPQ10wMqZ2h3wHvfsfsveiL\",\"object\":\"chat.completion.chunk\",\"created\":1762939965,\"model\":\"gpt-3.5-turbo-0125\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[],\"usage\":{\"prompt_tokens\":47,\"completion_tokens\":1,\"total_tokens\":48,\"prompt_tokens_details\":{\"cached_tokens\":0,\"audio_tokens\":0},\"completion_tokens_details\":{\"reasoning_tokens\":0,\"audio_tokens\":0,\"accepted_prediction_tokens\":0,\"rejected_prediction_tokens\":0}},\"obfuscation\":\"Cx9K0Rkvvc\"}\n\ndata: [DONE]\n\n"
20+
headers: {}
21+
status: 200 OK
22+
code: 200
23+
duration: 1.255363875s
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
---
2+
version: 2
3+
interactions:
4+
- id: 0
5+
request:
6+
proto: HTTP/1.1
7+
proto_major: 1
8+
proto_minor: 1
9+
content_length: 0
10+
host: api.openai.com
11+
body: "{\"messages\":[{\"content\":\"You are a multi-agent system, make sure to answer the user query in the most helpful way possible. You have access to these sub-agents:\\nID: web | Name: web | Description: \\n\\nIMPORTANT: You can ONLY transfer tasks to the agents listed above using their ID. The valid agent IDs are: web. You MUST NOT attempt to transfer to any other agent IDs - doing so will cause system errors.\\n\\nIf you are the best to answer the question according to your description, you can answer it.\\n\\nIf another agent is better for answering the question according to its description, call `transfer_task` function to transfer the question to that agent using the agent's ID. When transferring, do not generate any text other than the function call.\\n\\n\",\"role\":\"system\"},{\"content\":\"You are a knowledgeable assistant that helps users with various tasks.\\nBe helpful, accurate, and concise in your responses.\\n\",\"role\":\"system\"},{\"content\":\"Say hello.\",\"role\":\"user\"}],\"model\":\"gpt-5-mini\",\"stream_options\":{\"include_usage\":true},\"tools\":[{\"function\":{\"name\":\"transfer_task\",\"description\":\"Use this function to transfer a task to the selected team member.\\n You must provide a clear and concise description of the task the member should achieve AND the expected output.\",\"parameters\":{\"additionalProperties\":false,\"properties\":{\"agent\":{\"description\":\"The name of the agent to transfer the task to.\",\"type\":\"string\"},\"expected_output\":{\"description\":\"The expected output from the member (optional).\",\"type\":\"string\"},\"task\":{\"description\":\"A clear and concise description of the task the member should achieve.\",\"type\":\"string\"}},\"required\":[\"agent\",\"task\",\"expected_output\"],\"type\":\"object\"}},\"type\":\"function\"}],\"stream\":true}"
12+
url: https://api.openai.com/v1/chat/completions
13+
method: POST
14+
response:
15+
proto: HTTP/2.0
16+
proto_major: 2
17+
proto_minor: 0
18+
content_length: -1
19+
body: "data: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"aITD5\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"Dv\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\"!\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"WcbBz6\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" How\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"hAl\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" can\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"wU9\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" I\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"LRQIZ\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" help\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"Yp\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" you\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"Jgk\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\" today\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"h\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\"?\"},\"finish_reason\":null}],\"usage\":null,\"obfuscation\":\"gRANiH\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{},\"finish_reason\":\"stop\"}],\"usage\":null,\"obfuscation\":\"U\"}\n\ndata: {\"id\":\"chatcmpl-Cb1d503ZnlKSuEd0rxNJCKfhTY1i2\",\"object\":\"chat.completion.chunk\",\"created\":1762940435,\"model\":\"gpt-5-mini-2025-08-07\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[],\"usage\":{\"prompt_tokens\":377,\"completion_tokens\":18,\"total_tokens\":395,\"prompt_tokens_details\":{\"cached_tokens\":0,\"audio_tokens\":0},\"completion_tokens_details\":{\"reasoning_tokens\":0,\"audio_tokens\":0,\"accepted_prediction_tokens\":0,\"rejected_prediction_tokens\":0}},\"obfuscation\":\"Y9Pz\"}\n\ndata: [DONE]\n\n"
20+
headers: {}
21+
status: 200 OK
22+
code: 200
23+
duration: 1.981831959s

0 commit comments

Comments
 (0)