Skip to content

Commit e206713

Browse files
committed
Group response formatting methods together
1 parent ca03ead commit e206713

File tree

1 file changed

+37
-38
lines changed

1 file changed

+37
-38
lines changed

packages/vertexai/src/methods/chrome-adapter.ts

Lines changed: 37 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -87,19 +87,6 @@ export class ChromeAdapter {
8787
const result = await session.prompt(prompt.content);
8888
return ChromeAdapter.toResponse(result);
8989
}
90-
private static toResponse(text: string): Response {
91-
return {
92-
json: async () => ({
93-
candidates: [
94-
{
95-
content: {
96-
parts: [{ text }]
97-
}
98-
}
99-
]
100-
})
101-
} as Response;
102-
}
10390
async generateContentStreamOnDevice(
10491
request: GenerateContentRequest
10592
): Promise<Response> {
@@ -115,31 +102,6 @@ export class ChromeAdapter {
115102
const stream = await session.promptStreaming(prompt.content);
116103
return ChromeAdapter.toStreamResponse(stream);
117104
}
118-
// Formats string stream returned by Chrome as SSE returned by Vertex.
119-
private static async toStreamResponse(
120-
stream: ReadableStream<string>
121-
): Promise<Response> {
122-
const encoder = new TextEncoder();
123-
return {
124-
body: stream.pipeThrough(
125-
new TransformStream({
126-
transform(chunk, controller) {
127-
const json = JSON.stringify({
128-
candidates: [
129-
{
130-
content: {
131-
role: 'model',
132-
parts: [{ text: chunk }]
133-
}
134-
}
135-
]
136-
});
137-
controller.enqueue(encoder.encode(`data: ${json}\n\n`));
138-
}
139-
})
140-
)
141-
} as Response;
142-
}
143105
private static isOnDeviceRequest(request: GenerateContentRequest): boolean {
144106
// Returns false if the prompt is empty.
145107
if (request.contents.length === 0) {
@@ -202,4 +164,41 @@ export class ChromeAdapter {
202164
this.oldSession = newSession;
203165
return newSession;
204166
}
167+
private static toResponse(text: string): Response {
168+
return {
169+
json: async () => ({
170+
candidates: [
171+
{
172+
content: {
173+
parts: [{ text }]
174+
}
175+
}
176+
]
177+
})
178+
} as Response;
179+
}
180+
private static toStreamResponse(
181+
stream: ReadableStream<string>
182+
): Response {
183+
const encoder = new TextEncoder();
184+
return {
185+
body: stream.pipeThrough(
186+
new TransformStream({
187+
transform(chunk, controller) {
188+
const json = JSON.stringify({
189+
candidates: [
190+
{
191+
content: {
192+
role: 'model',
193+
parts: [{ text: chunk }]
194+
}
195+
}
196+
]
197+
});
198+
controller.enqueue(encoder.encode(`data: ${json}\n\n`));
199+
}
200+
})
201+
)
202+
} as Response;
203+
}
205204
}

0 commit comments

Comments
 (0)