Skip to content

Commit 4d929d4

Browse files
Refactor processStreamWithTags to use single params object with logger parameter
- Changed function signature from 7 parameters to single params object - Added destructuring at top of function body - Updated all callers in tests and stream-parser - Added Logger type import from @codebuff/types/logger 🤖 Generated with Codebuff Co-Authored-By: Codebuff <noreply@codebuff.com>
1 parent 04d7823 commit 4d929d4

File tree

3 files changed

+72
-40
lines changed

3 files changed

+72
-40
lines changed

backend/src/__tests__/xml-stream-parser.test.ts

Lines changed: 45 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,15 @@ import { describe, expect, it } from 'bun:test'
55
import { globalStopSequence } from '../tools/constants'
66
import { processStreamWithTags } from '../xml-stream-parser'
77

8+
import type { Logger } from '@codebuff/types/logger'
9+
10+
const logger: Logger = {
11+
debug: () => {},
12+
info: () => {},
13+
warn: () => {},
14+
error: () => {},
15+
}
16+
817
describe('processStreamWithTags', () => {
918
async function* createMockStream(chunks: string[]) {
1019
for (const chunk of chunks) {
@@ -52,13 +61,14 @@ describe('processStreamWithTags', () => {
5261
}
5362
}
5463

55-
for await (const chunk of processStreamWithTags(
64+
for await (const chunk of processStreamWithTags({
5665
stream,
5766
processors,
5867
defaultProcessor,
5968
onError,
6069
onResponseChunk,
61-
)) {
70+
logger,
71+
})) {
6272
if (chunk.type === 'text') {
6373
result.push(chunk.text)
6474
}
@@ -119,13 +129,14 @@ describe('processStreamWithTags', () => {
119129
}
120130
}
121131

122-
for await (const chunk of processStreamWithTags(
132+
for await (const chunk of processStreamWithTags({
123133
stream,
124134
processors,
125135
defaultProcessor,
126136
onError,
127137
onResponseChunk,
128-
)) {
138+
logger,
139+
})) {
129140
if (chunk.type === 'text') {
130141
result.push(chunk.text)
131142
}
@@ -195,13 +206,14 @@ describe('processStreamWithTags', () => {
195206
}
196207
}
197208

198-
for await (const chunk of processStreamWithTags(
209+
for await (const chunk of processStreamWithTags({
199210
stream,
200211
processors,
201212
defaultProcessor,
202213
onError,
203214
onResponseChunk,
204-
)) {
215+
logger,
216+
})) {
205217
if (chunk.type === 'text') {
206218
result.push(chunk.text)
207219
}
@@ -270,13 +282,14 @@ describe('processStreamWithTags', () => {
270282
}
271283
}
272284

273-
for await (const chunk of processStreamWithTags(
285+
for await (const chunk of processStreamWithTags({
274286
stream,
275287
processors,
276288
defaultProcessor,
277289
onError,
278290
onResponseChunk,
279-
)) {
291+
logger,
292+
})) {
280293
if (chunk.type === 'text') {
281294
result.push(chunk.text)
282295
}
@@ -336,13 +349,14 @@ describe('processStreamWithTags', () => {
336349
}
337350
}
338351

339-
for await (const chunk of processStreamWithTags(
352+
for await (const chunk of processStreamWithTags({
340353
stream,
341354
processors,
342355
defaultProcessor,
343356
onError,
344357
onResponseChunk,
345-
)) {
358+
logger,
359+
})) {
346360
if (chunk.type === 'text') {
347361
result.push(chunk.text)
348362
}
@@ -401,13 +415,14 @@ describe('processStreamWithTags', () => {
401415
}
402416
}
403417

404-
for await (const chunk of processStreamWithTags(
418+
for await (const chunk of processStreamWithTags({
405419
stream,
406420
processors,
407421
defaultProcessor,
408422
onError,
409423
onResponseChunk,
410-
)) {
424+
logger,
425+
})) {
411426
if (chunk.type === 'text') {
412427
result.push(chunk.text)
413428
}
@@ -473,13 +488,14 @@ describe('processStreamWithTags', () => {
473488
}
474489
}
475490

476-
for await (const chunk of processStreamWithTags(
491+
for await (const chunk of processStreamWithTags({
477492
stream,
478493
processors,
479494
defaultProcessor,
480495
onError,
481496
onResponseChunk,
482-
)) {
497+
logger,
498+
})) {
483499
if (chunk.type === 'text') {
484500
result.push(chunk.text)
485501
}
@@ -539,13 +555,14 @@ describe('processStreamWithTags', () => {
539555
}
540556
}
541557

542-
for await (const chunk of processStreamWithTags(
558+
for await (const chunk of processStreamWithTags({
543559
stream,
544560
processors,
545561
defaultProcessor,
546562
onError,
547563
onResponseChunk,
548-
)) {
564+
logger,
565+
})) {
549566
if (chunk.type === 'text') {
550567
result.push(chunk.text)
551568
}
@@ -595,13 +612,14 @@ describe('processStreamWithTags', () => {
595612
}
596613
}
597614

598-
for await (const chunk of processStreamWithTags(
615+
for await (const chunk of processStreamWithTags({
599616
stream,
600617
processors,
601618
defaultProcessor,
602619
onError,
603620
onResponseChunk,
604-
)) {
621+
logger,
622+
})) {
605623
if (chunk.type === 'text') {
606624
result.push(chunk.text)
607625
}
@@ -637,13 +655,14 @@ describe('processStreamWithTags', () => {
637655
}
638656
}
639657

640-
for await (const chunk of processStreamWithTags(
658+
for await (const chunk of processStreamWithTags({
641659
stream,
642660
processors,
643661
defaultProcessor,
644662
onError,
645663
onResponseChunk,
646-
)) {
664+
logger,
665+
})) {
647666
if (chunk.type === 'text') {
648667
result.push(chunk.text)
649668
}
@@ -697,13 +716,14 @@ describe('processStreamWithTags', () => {
697716
}
698717
}
699718

700-
for await (const chunk of processStreamWithTags(
719+
for await (const chunk of processStreamWithTags({
701720
stream,
702721
processors,
703722
defaultProcessor,
704723
onError,
705724
onResponseChunk,
706-
)) {
725+
logger,
726+
})) {
707727
if (chunk.type === 'text') {
708728
result.push(chunk.text)
709729
}
@@ -767,13 +787,14 @@ describe('processStreamWithTags', () => {
767787
}
768788
}
769789

770-
for await (const chunk of processStreamWithTags(
790+
for await (const chunk of processStreamWithTags({
771791
stream,
772792
processors,
773793
defaultProcessor,
774794
onError,
775795
onResponseChunk,
776-
)) {
796+
logger,
797+
})) {
777798
if (chunk.type === 'text') {
778799
result.push(chunk.text)
779800
}

backend/src/tools/stream-parser.ts

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -230,17 +230,17 @@ export async function processStreamWithTools(options: {
230230
}
231231
}
232232

233-
const streamWithTags = processStreamWithTags(
233+
const streamWithTags = processStreamWithTags({
234234
stream,
235-
Object.fromEntries([
235+
processors: Object.fromEntries([
236236
...toolNames.map((toolName) => [toolName, toolCallback(toolName)]),
237237
...Object.keys(fileContext.customToolDefinitions).map((toolName) => [
238238
toolName,
239239
customToolCallback(toolName),
240240
]),
241241
]),
242-
customToolCallback,
243-
(toolName, error) => {
242+
defaultProcessor: customToolCallback,
243+
onError: (toolName, error) => {
244244
const toolResult: ToolResultPart = {
245245
type: 'tool-result',
246246
toolName,
@@ -251,12 +251,13 @@ export async function processStreamWithTools(options: {
251251
toolResultsToAddAfterStream.push(cloneDeep(toolResult))
252252
},
253253
onResponseChunk,
254-
{
254+
logger,
255+
loggerOptions: {
255256
userId,
256257
model: agentTemplate.model,
257258
agentName: agentTemplate.id,
258259
},
259-
)
260+
})
260261

261262
let reasoning = false
262263
for await (const chunk of streamWithTags) {

backend/src/xml-stream-parser.ts

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,14 @@ import {
77
toolNameParam,
88
} from '@codebuff/common/tools/constants'
99

10-
import { logger } from './util/logger'
11-
1210
import type { StreamChunk } from './llm-apis/vercel-ai-sdk/ai-sdk'
1311
import type { Model } from '@codebuff/common/old-constants'
1412
import type {
1513
PrintModeError,
1614
PrintModeText,
1715
PrintModeToolCall,
1816
} from '@codebuff/common/types/print-mode'
17+
import type { Logger } from '@codebuff/types/logger'
1918

2019
const toolExtractionPattern = new RegExp(
2120
`${startToolTag}(.*?)${endToolTag}`,
@@ -24,29 +23,40 @@ const toolExtractionPattern = new RegExp(
2423

2524
const completionSuffix = `${JSON.stringify(endsAgentStepParam)}: true\n}${endToolTag}`
2625

27-
export async function* processStreamWithTags(
28-
stream: AsyncGenerator<StreamChunk, string | null>,
26+
export async function* processStreamWithTags(params: {
27+
stream: AsyncGenerator<StreamChunk, string | null>
2928
processors: Record<
3029
string,
3130
{
3231
onTagStart: (tagName: string, attributes: Record<string, string>) => void
3332
onTagEnd: (tagName: string, params: Record<string, any>) => void
3433
}
35-
>,
34+
>
3635
defaultProcessor: (toolName: string) => {
3736
onTagStart: (tagName: string, attributes: Record<string, string>) => void
3837
onTagEnd: (tagName: string, params: Record<string, any>) => void
39-
},
40-
onError: (tagName: string, errorMessage: string) => void,
38+
}
39+
onError: (tagName: string, errorMessage: string) => void
4140
onResponseChunk: (
4241
chunk: PrintModeText | PrintModeToolCall | PrintModeError,
43-
) => void,
42+
) => void
43+
logger: Logger
4444
loggerOptions?: {
4545
userId?: string
4646
model?: Model
4747
agentName?: string
48-
},
49-
): AsyncGenerator<StreamChunk, string | null> {
48+
}
49+
}): AsyncGenerator<StreamChunk, string | null> {
50+
const {
51+
stream,
52+
processors,
53+
defaultProcessor,
54+
onError,
55+
onResponseChunk,
56+
logger,
57+
loggerOptions,
58+
} = params
59+
5060
let streamCompleted = false
5161
let buffer = ''
5262
let autocompleted = false

0 commit comments

Comments
 (0)