@@ -105,27 +105,6 @@ export class ChromeAdapter {
105
105
const text = await session . prompt ( messages ) ;
106
106
return ChromeAdapter . toResponse ( text ) ;
107
107
}
108
-
109
- /**
110
- * Formats string returned by Chrome as a {@link Response} returned by Vertex.
111
- */
112
- private static toResponse ( text : string ) : Response {
113
- return {
114
- json : async ( ) => ( {
115
- candidates : [
116
- {
117
- content : {
118
- parts : [ { text } ]
119
- }
120
- }
121
- ]
122
- } )
123
- } as Response ;
124
- }
125
-
126
- /**
127
- * Generates a stream of content.
128
- */
129
108
async generateContentStreamOnDevice (
130
109
request : GenerateContentRequest
131
110
) : Promise < Response > {
@@ -137,38 +116,6 @@ export class ChromeAdapter {
137
116
const stream = await session . promptStreaming ( messages ) ;
138
117
return ChromeAdapter . toStreamResponse ( stream ) ;
139
118
}
140
-
141
- /**
142
- * Formats string stream returned by Chrome as SSE returned by Vertex.
143
- */
144
- private static async toStreamResponse (
145
- stream : ReadableStream < string >
146
- ) : Promise < Response > {
147
- const encoder = new TextEncoder ( ) ;
148
- return {
149
- body : stream . pipeThrough (
150
- new TransformStream ( {
151
- transform ( chunk , controller ) {
152
- const json = JSON . stringify ( {
153
- candidates : [
154
- {
155
- content : {
156
- role : 'model' ,
157
- parts : [ { text : chunk } ]
158
- }
159
- }
160
- ]
161
- } ) ;
162
- controller . enqueue ( encoder . encode ( `data: ${ json } \n\n` ) ) ;
163
- }
164
- } )
165
- )
166
- } as Response ;
167
- }
168
-
169
- /**
170
- * Asserts inference for the given request can be performed by an on-device model.
171
- */
172
119
private static isOnDeviceRequest ( request : GenerateContentRequest ) : boolean {
173
120
// Returns false if the prompt is empty.
174
121
if ( request . contents . length === 0 ) {
@@ -273,4 +220,47 @@ export class ChromeAdapter {
273
220
this . oldSession = newSession ;
274
221
return newSession ;
275
222
}
223
+
224
+ /**
225
+ * Formats string returned by Chrome as a {@link Response} returned by Vertex.
226
+ */
227
+ private static toResponse ( text : string ) : Response {
228
+ return {
229
+ json : async ( ) => ( {
230
+ candidates : [
231
+ {
232
+ content : {
233
+ parts : [ { text } ]
234
+ }
235
+ }
236
+ ]
237
+ } )
238
+ } as Response ;
239
+ }
240
+
241
+ /**
242
+ * Formats string stream returned by Chrome as SSE returned by Vertex.
243
+ */
244
+ private static toStreamResponse ( stream : ReadableStream < string > ) : Response {
245
+ const encoder = new TextEncoder ( ) ;
246
+ return {
247
+ body : stream . pipeThrough (
248
+ new TransformStream ( {
249
+ transform ( chunk , controller ) {
250
+ const json = JSON . stringify ( {
251
+ candidates : [
252
+ {
253
+ content : {
254
+ role : 'model' ,
255
+ parts : [ { text : chunk } ]
256
+ }
257
+ }
258
+ ]
259
+ } ) ;
260
+ controller . enqueue ( encoder . encode ( `data: ${ json } \n\n` ) ) ;
261
+ }
262
+ } )
263
+ )
264
+ } as Response ;
265
+ }
276
266
}
0 commit comments