Skip to content

Commit 6e0e303

Browse files
authored
test(ai): Fix node tests (#9350)
1 parent bc5a7c4 commit 6e0e303

File tree

15 files changed

+150
-97
lines changed

15 files changed

+150
-97
lines changed

common/api-review/ai.api.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,8 @@ export interface ChromeAdapter {
157157
generateContent(request: GenerateContentRequest): Promise<Response>;
158158
generateContentStream(request: GenerateContentRequest): Promise<Response>;
159159
isAvailable(request: GenerateContentRequest): Promise<boolean>;
160+
// @internal (undocumented)
161+
mode: InferenceMode;
160162
}
161163

162164
// @public
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
/**
2+
* @license
3+
* Copyright 2025 Google LLC
4+
*
5+
* Licensed under the Apache License, Version 2.0 (the "License");
6+
* you may not use this file except in compliance with the License.
7+
* You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
import { getAI, getGenerativeModel } from './api';
18+
import { expect } from 'chai';
19+
import { InferenceMode } from './public-types';
20+
import { getFullApp } from '../test-utils/get-fake-firebase-services';
21+
import { DEFAULT_HYBRID_IN_CLOUD_MODEL } from './constants';
22+
import { factory } from './factory-browser';
23+
24+
/**
25+
* Browser-only top level API tests using a factory that provides
26+
* a ChromeAdapter.
27+
*/
28+
describe('Top level API', () => {
29+
describe('getAI()', () => {
30+
it('getGenerativeModel with HybridParams sets a default model', () => {
31+
const ai = getAI(getFullApp({ apiKey: 'key', appId: 'id' }, factory));
32+
const genModel = getGenerativeModel(ai, {
33+
mode: InferenceMode.ONLY_ON_DEVICE
34+
});
35+
expect(genModel.model).to.equal(
36+
`models/${DEFAULT_HYBRID_IN_CLOUD_MODEL}`
37+
);
38+
});
39+
it('getGenerativeModel with HybridParams honors a model override', () => {
40+
const ai = getAI(getFullApp({ apiKey: 'key', appId: 'id' }, factory));
41+
const genModel = getGenerativeModel(ai, {
42+
mode: InferenceMode.PREFER_ON_DEVICE,
43+
inCloudParams: { model: 'my-model' }
44+
});
45+
expect(genModel.model).to.equal('models/my-model');
46+
});
47+
});
48+
});

packages/ai/src/api.test.ts

Lines changed: 1 addition & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import { AI } from './public-types';
2929
import { GenerativeModel } from './models/generative-model';
3030
import { GoogleAIBackend, VertexAIBackend } from './backend';
3131
import { getFullApp } from '../test-utils/get-fake-firebase-services';
32-
import { AI_TYPE, DEFAULT_HYBRID_IN_CLOUD_MODEL } from './constants';
32+
import { AI_TYPE } from './constants';
3333

3434
const fakeAI: AI = {
3535
app: {
@@ -144,21 +144,6 @@ describe('Top level API', () => {
144144
expect(genModel).to.be.an.instanceOf(GenerativeModel);
145145
expect(genModel.model).to.equal('publishers/google/models/my-model');
146146
});
147-
it('getGenerativeModel with HybridParams sets a default model', () => {
148-
const genModel = getGenerativeModel(fakeAI, {
149-
mode: 'only_on_device'
150-
});
151-
expect(genModel.model).to.equal(
152-
`publishers/google/models/${DEFAULT_HYBRID_IN_CLOUD_MODEL}`
153-
);
154-
});
155-
it('getGenerativeModel with HybridParams honors a model override', () => {
156-
const genModel = getGenerativeModel(fakeAI, {
157-
mode: 'prefer_on_device',
158-
inCloudParams: { model: 'my-model' }
159-
});
160-
expect(genModel.model).to.equal('publishers/google/models/my-model');
161-
});
162147
it('getImagenModel throws if no model is provided', () => {
163148
try {
164149
getImagenModel(fakeAI, {} as ImagenModelParams);

packages/ai/src/factory-node.ts

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
/**
2+
* @license
3+
* Copyright 2025 Google LLC
4+
*
5+
* Licensed under the Apache License, Version 2.0 (the "License");
6+
* you may not use this file except in compliance with the License.
7+
* You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
import {
19+
ComponentContainer,
20+
InstanceFactoryOptions
21+
} from '@firebase/component';
22+
import { AIError } from './errors';
23+
import { decodeInstanceIdentifier } from './helpers';
24+
import { AIService } from './service';
25+
import { AIErrorCode } from './types';
26+
27+
export function factory(
28+
container: ComponentContainer,
29+
{ instanceIdentifier }: InstanceFactoryOptions
30+
): AIService {
31+
if (!instanceIdentifier) {
32+
throw new AIError(
33+
AIErrorCode.ERROR,
34+
'AIService instance identifier is undefined.'
35+
);
36+
}
37+
38+
const backend = decodeInstanceIdentifier(instanceIdentifier);
39+
40+
// getImmediate for FirebaseApp will always succeed
41+
const app = container.getProvider('app').getImmediate();
42+
const auth = container.getProvider('auth-internal');
43+
const appCheckProvider = container.getProvider('app-check-internal');
44+
45+
return new AIService(app, backend, auth, appCheckProvider);
46+
}

packages/ai/src/index.node.ts

Lines changed: 4 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -22,36 +22,16 @@
2222
*/
2323

2424
import { registerVersion, _registerComponent } from '@firebase/app';
25-
import { AIService } from './service';
2625
import { AI_TYPE } from './constants';
2726
import { Component, ComponentType } from '@firebase/component';
2827
import { name, version } from '../package.json';
29-
import { decodeInstanceIdentifier } from './helpers';
30-
import { AIError } from './errors';
31-
import { AIErrorCode } from './public-types';
28+
import { factory } from './factory-node';
3229

3330
function registerAI(): void {
3431
_registerComponent(
35-
new Component(
36-
AI_TYPE,
37-
(container, { instanceIdentifier }) => {
38-
if (!instanceIdentifier) {
39-
throw new AIError(
40-
AIErrorCode.ERROR,
41-
'AIService instance identifier is undefined.'
42-
);
43-
}
44-
45-
const backend = decodeInstanceIdentifier(instanceIdentifier);
46-
47-
// getImmediate for FirebaseApp will always succeed
48-
const app = container.getProvider('app').getImmediate();
49-
const auth = container.getProvider('auth-internal');
50-
const appCheckProvider = container.getProvider('app-check-internal');
51-
return new AIService(app, backend, auth, appCheckProvider);
52-
},
53-
ComponentType.PUBLIC
54-
).setMultipleInstances(true)
32+
new Component(AI_TYPE, factory, ComponentType.PUBLIC).setMultipleInstances(
33+
true
34+
)
5535
);
5636

5737
registerVersion(name, version, 'node');

packages/ai/src/methods/chat-session.test.ts

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,11 @@ import { match, restore, stub, useFakeTimers } from 'sinon';
2020
import sinonChai from 'sinon-chai';
2121
import chaiAsPromised from 'chai-as-promised';
2222
import * as generateContentMethods from './generate-content';
23-
import { Content, GenerateContentStreamResult, InferenceMode } from '../types';
23+
import { Content, GenerateContentStreamResult } from '../types';
2424
import { ChatSession } from './chat-session';
2525
import { ApiSettings } from '../types/internal';
2626
import { VertexAIBackend } from '../backend';
27-
import { ChromeAdapterImpl } from './chrome-adapter';
27+
import { fakeChromeAdapter } from '../../test-utils/get-fake-firebase-services';
2828

2929
use(sinonChai);
3030
use(chaiAsPromised);
@@ -37,12 +37,6 @@ const fakeApiSettings: ApiSettings = {
3737
backend: new VertexAIBackend()
3838
};
3939

40-
const fakeChromeAdapter = new ChromeAdapterImpl(
41-
// @ts-expect-error
42-
undefined,
43-
InferenceMode.PREFER_ON_DEVICE
44-
);
45-
4640
describe('ChatSession', () => {
4741
afterEach(() => {
4842
restore();

packages/ai/src/methods/count-tokens.test.ts

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import { ApiSettings } from '../types/internal';
2727
import { Task } from '../requests/request';
2828
import { mapCountTokensRequest } from '../googleai-mappers';
2929
import { GoogleAIBackend, VertexAIBackend } from '../backend';
30-
import { ChromeAdapterImpl } from './chrome-adapter';
30+
import { fakeChromeAdapter } from '../../test-utils/get-fake-firebase-services';
3131

3232
use(sinonChai);
3333
use(chaiAsPromised);
@@ -52,12 +52,6 @@ const fakeRequestParams: CountTokensRequest = {
5252
contents: [{ parts: [{ text: 'hello' }], role: 'user' }]
5353
};
5454

55-
const fakeChromeAdapter = new ChromeAdapterImpl(
56-
// @ts-expect-error
57-
undefined,
58-
InferenceMode.PREFER_ON_DEVICE
59-
);
60-
6155
describe('countTokens()', () => {
6256
afterEach(() => {
6357
restore();
@@ -197,11 +191,10 @@ describe('countTokens()', () => {
197191
});
198192
});
199193
it('throws if mode is ONLY_ON_DEVICE', async () => {
200-
const chromeAdapter = new ChromeAdapterImpl(
201-
// @ts-expect-error
202-
undefined,
203-
InferenceMode.ONLY_ON_DEVICE
204-
);
194+
const chromeAdapter = {
195+
...fakeChromeAdapter,
196+
mode: InferenceMode.ONLY_ON_DEVICE
197+
};
205198
await expect(
206199
countTokens(fakeApiSettings, 'model', fakeRequestParams, chromeAdapter)
207200
).to.be.rejectedWith(

packages/ai/src/methods/count-tokens.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import { ApiSettings } from '../types/internal';
2828
import * as GoogleAIMapper from '../googleai-mappers';
2929
import { BackendType } from '../public-types';
3030
import { ChromeAdapter } from '../types/chrome-adapter';
31-
import { ChromeAdapterImpl } from './chrome-adapter';
3231

3332
export async function countTokensOnCloud(
3433
apiSettings: ApiSettings,
@@ -61,9 +60,7 @@ export async function countTokens(
6160
chromeAdapter?: ChromeAdapter,
6261
requestOptions?: RequestOptions
6362
): Promise<CountTokensResponse> {
64-
if (
65-
(chromeAdapter as ChromeAdapterImpl)?.mode === InferenceMode.ONLY_ON_DEVICE
66-
) {
63+
if (chromeAdapter?.mode === InferenceMode.ONLY_ON_DEVICE) {
6764
throw new AIError(
6865
AIErrorCode.UNSUPPORTED,
6966
'countTokens() is not supported for on-device models.'

packages/ai/src/methods/generate-content.test.ts

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import {
2828
HarmBlockMethod,
2929
HarmBlockThreshold,
3030
HarmCategory,
31-
InferenceMode,
3231
Language,
3332
Outcome
3433
} from '../types';
@@ -37,17 +36,11 @@ import { Task } from '../requests/request';
3736
import { AIError } from '../api';
3837
import { mapGenerateContentRequest } from '../googleai-mappers';
3938
import { GoogleAIBackend, VertexAIBackend } from '../backend';
40-
import { ChromeAdapterImpl } from './chrome-adapter';
39+
import { fakeChromeAdapter } from '../../test-utils/get-fake-firebase-services';
4140

4241
use(sinonChai);
4342
use(chaiAsPromised);
4443

45-
const fakeChromeAdapter = new ChromeAdapterImpl(
46-
// @ts-expect-error
47-
undefined,
48-
InferenceMode.PREFER_ON_DEVICE
49-
);
50-
5144
const fakeApiSettings: ApiSettings = {
5245
apiKey: 'key',
5346
project: 'my-project',

packages/ai/src/models/generative-model.test.ts

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ import {
2020
FunctionCallingMode,
2121
AI,
2222
InferenceMode,
23-
AIErrorCode
23+
AIErrorCode,
24+
ChromeAdapter
2425
} from '../public-types';
2526
import * as request from '../requests/request';
2627
import { SinonStub, match, restore, stub } from 'sinon';
@@ -30,9 +31,9 @@ import {
3031
} from '../../test-utils/mock-response';
3132
import sinonChai from 'sinon-chai';
3233
import { VertexAIBackend } from '../backend';
33-
import { ChromeAdapterImpl } from '../methods/chrome-adapter';
3434
import { AIError } from '../errors';
3535
import chaiAsPromised from 'chai-as-promised';
36+
import { fakeChromeAdapter } from '../../test-utils/get-fake-firebase-services';
3637

3738
use(sinonChai);
3839
use(chaiAsPromised);
@@ -51,12 +52,6 @@ const fakeAI: AI = {
5152
location: 'us-central1'
5253
};
5354

54-
const fakeChromeAdapter = new ChromeAdapterImpl(
55-
// @ts-expect-error
56-
undefined,
57-
InferenceMode.PREFER_ON_DEVICE
58-
);
59-
6055
describe('GenerativeModel', () => {
6156
it('passes params through to generateContent', async () => {
6257
const genModel = new GenerativeModel(
@@ -436,7 +431,7 @@ describe('GenerativeModel', () => {
436431

437432
describe('GenerativeModel dispatch logic', () => {
438433
let makeRequestStub: SinonStub;
439-
let mockChromeAdapter: ChromeAdapterImpl;
434+
let mockChromeAdapter: ChromeAdapter;
440435

441436
function stubMakeRequest(stream?: boolean): void {
442437
if (stream) {

0 commit comments

Comments
 (0)