2025-08-07 18:53:39 -07:00
|
|
|
/**
|
|
|
|
|
* @license
|
|
|
|
|
* Copyright 2025 Google LLC
|
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
import {
|
2025-08-12 14:31:59 -04:00
|
|
|
vi,
|
|
|
|
|
describe,
|
|
|
|
|
it,
|
|
|
|
|
expect,
|
|
|
|
|
afterEach,
|
|
|
|
|
beforeAll,
|
|
|
|
|
afterAll,
|
|
|
|
|
} from 'vitest';
|
|
|
|
|
|
|
|
|
|
import { ClearcutLogger, LogEventEntry, TEST_ONLY } from './clearcut-logger.js';
|
|
|
|
|
import { ConfigParameters } from '../../config/config.js';
|
2025-08-07 18:53:39 -07:00
|
|
|
import * as userAccount from '../../utils/user_account.js';
|
|
|
|
|
import * as userId from '../../utils/user_id.js';
|
2025-08-12 14:31:59 -04:00
|
|
|
import { EventMetadataKey } from './event-metadata-key.js';
|
|
|
|
|
import { makeFakeConfig } from '../../test-utils/config.js';
|
|
|
|
|
import { http, HttpResponse } from 'msw';
|
|
|
|
|
import { server } from '../../mocks/msw.js';
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
vi.mock('../../utils/user_account');
|
|
|
|
|
vi.mock('../../utils/user_id');
|
|
|
|
|
|
|
|
|
|
const mockUserAccount = vi.mocked(userAccount);
|
|
|
|
|
const mockUserId = vi.mocked(userId);
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
// TODO(richieforeman): Consider moving this to test setup globally.
|
|
|
|
|
beforeAll(() => {
|
|
|
|
|
server.listen({});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
afterEach(() => {
|
|
|
|
|
server.resetHandlers();
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
afterAll(() => {
|
|
|
|
|
server.close();
|
|
|
|
|
});
|
|
|
|
|
|
2025-08-07 18:53:39 -07:00
|
|
|
describe('ClearcutLogger', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const NEXT_WAIT_MS = 1234;
|
|
|
|
|
const CLEARCUT_URL = 'https://play.googleapis.com/log';
|
|
|
|
|
const MOCK_DATE = new Date('2025-01-02T00:00:00.000Z');
|
|
|
|
|
const EXAMPLE_RESPONSE = `["${NEXT_WAIT_MS}",null,[[["ANDROID_BACKUP",0],["BATTERY_STATS",0],["SMART_SETUP",0],["TRON",0]],-3334737594024971225],[]]`;
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
// A helper to get the internal events array for testing
|
|
|
|
|
const getEvents = (l: ClearcutLogger): LogEventEntry[][] =>
|
|
|
|
|
l['events'].toArray() as LogEventEntry[][];
|
|
|
|
|
|
|
|
|
|
const getEventsSize = (l: ClearcutLogger): number => l['events'].size;
|
|
|
|
|
|
|
|
|
|
const requeueFailedEvents = (l: ClearcutLogger, events: LogEventEntry[][]) =>
|
|
|
|
|
l['requeueFailedEvents'](events);
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
function setup({
|
|
|
|
|
config = {} as Partial<ConfigParameters>,
|
|
|
|
|
lifetimeGoogleAccounts = 1,
|
|
|
|
|
cachedGoogleAccount = 'test@google.com',
|
|
|
|
|
installationId = 'test-installation-id',
|
|
|
|
|
} = {}) {
|
|
|
|
|
server.resetHandlers(
|
|
|
|
|
http.post(CLEARCUT_URL, () => HttpResponse.text(EXAMPLE_RESPONSE)),
|
|
|
|
|
);
|
|
|
|
|
|
2025-08-07 18:53:39 -07:00
|
|
|
vi.useFakeTimers();
|
2025-08-12 14:31:59 -04:00
|
|
|
vi.setSystemTime(MOCK_DATE);
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
const loggerConfig = makeFakeConfig({
|
|
|
|
|
...config,
|
|
|
|
|
});
|
|
|
|
|
ClearcutLogger.clearInstance();
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
mockUserAccount.getCachedGoogleAccount.mockReturnValue(cachedGoogleAccount);
|
|
|
|
|
mockUserAccount.getLifetimeGoogleAccounts.mockReturnValue(
|
|
|
|
|
lifetimeGoogleAccounts,
|
|
|
|
|
);
|
|
|
|
|
mockUserId.getInstallationId.mockReturnValue(installationId);
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
const logger = ClearcutLogger.getInstance(loggerConfig);
|
|
|
|
|
|
|
|
|
|
return { logger, loggerConfig };
|
|
|
|
|
}
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
afterEach(() => {
|
|
|
|
|
ClearcutLogger.clearInstance();
|
|
|
|
|
vi.useRealTimers();
|
|
|
|
|
vi.restoreAllMocks();
|
|
|
|
|
});
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
describe('getInstance', () => {
|
|
|
|
|
it.each([
|
|
|
|
|
{ usageStatisticsEnabled: false, expectedValue: undefined },
|
|
|
|
|
{
|
|
|
|
|
usageStatisticsEnabled: true,
|
|
|
|
|
expectedValue: expect.any(ClearcutLogger),
|
|
|
|
|
},
|
|
|
|
|
])(
|
|
|
|
|
'returns an instance if usage statistics are enabled',
|
|
|
|
|
({ usageStatisticsEnabled, expectedValue }) => {
|
|
|
|
|
ClearcutLogger.clearInstance();
|
|
|
|
|
const { logger } = setup({
|
|
|
|
|
config: {
|
|
|
|
|
usageStatisticsEnabled,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
expect(logger).toEqual(expectedValue);
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
it('is a singleton', () => {
|
|
|
|
|
ClearcutLogger.clearInstance();
|
|
|
|
|
const { loggerConfig } = setup();
|
|
|
|
|
const logger1 = ClearcutLogger.getInstance(loggerConfig);
|
|
|
|
|
const logger2 = ClearcutLogger.getInstance(loggerConfig);
|
|
|
|
|
expect(logger1).toBe(logger2);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
describe('createLogEvent', () => {
|
|
|
|
|
it('logs the total number of google accounts', () => {
|
|
|
|
|
const { logger } = setup({
|
|
|
|
|
lifetimeGoogleAccounts: 9001,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const event = logger?.createLogEvent('abc', []);
|
|
|
|
|
|
|
|
|
|
expect(event?.event_metadata[0][0]).toEqual({
|
|
|
|
|
gemini_cli_key: EventMetadataKey.GEMINI_CLI_GOOGLE_ACCOUNTS_COUNT,
|
|
|
|
|
value: '9001',
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('logs the current surface', () => {
|
|
|
|
|
const { logger } = setup({});
|
|
|
|
|
|
|
|
|
|
const event = logger?.createLogEvent('abc', []);
|
|
|
|
|
|
|
|
|
|
expect(event?.event_metadata[0][1]).toEqual({
|
|
|
|
|
gemini_cli_key: EventMetadataKey.GEMINI_CLI_SURFACE,
|
|
|
|
|
value: 'SURFACE_NOT_SET',
|
|
|
|
|
});
|
|
|
|
|
});
|
2025-08-07 18:53:39 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
describe('enqueueLogEvent', () => {
|
|
|
|
|
it('should add events to the queue', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
2025-08-07 18:53:39 -07:00
|
|
|
logger!.enqueueLogEvent({ test: 'event1' });
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(1);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('should evict the oldest event when the queue is full', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
for (let i = 0; i < TEST_ONLY.MAX_EVENTS; i++) {
|
2025-08-07 18:53:39 -07:00
|
|
|
logger!.enqueueLogEvent({ event_id: i });
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(getEventsSize(logger!)).toBe(TEST_ONLY.MAX_EVENTS);
|
2025-08-07 18:53:39 -07:00
|
|
|
const firstEvent = JSON.parse(
|
|
|
|
|
getEvents(logger!)[0][0].source_extension_json,
|
|
|
|
|
);
|
|
|
|
|
expect(firstEvent.event_id).toBe(0);
|
|
|
|
|
|
|
|
|
|
// This should push out the first event
|
2025-08-12 14:31:59 -04:00
|
|
|
logger!.enqueueLogEvent({ event_id: TEST_ONLY.MAX_EVENTS });
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(getEventsSize(logger!)).toBe(TEST_ONLY.MAX_EVENTS);
|
2025-08-07 18:53:39 -07:00
|
|
|
const newFirstEvent = JSON.parse(
|
|
|
|
|
getEvents(logger!)[0][0].source_extension_json,
|
|
|
|
|
);
|
|
|
|
|
expect(newFirstEvent.event_id).toBe(1);
|
|
|
|
|
const lastEvent = JSON.parse(
|
2025-08-12 14:31:59 -04:00
|
|
|
getEvents(logger!)[TEST_ONLY.MAX_EVENTS - 1][0].source_extension_json,
|
2025-08-07 18:53:39 -07:00
|
|
|
);
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(lastEvent.event_id).toBe(TEST_ONLY.MAX_EVENTS);
|
2025-08-07 18:53:39 -07:00
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
describe('flushToClearcut', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
it('allows for usage with a configured proxy agent', async () => {
|
|
|
|
|
const { logger } = setup({
|
|
|
|
|
config: {
|
|
|
|
|
proxy: 'http://mycoolproxy.whatever.com:3128',
|
2025-08-07 18:53:39 -07:00
|
|
|
},
|
|
|
|
|
});
|
2025-08-12 14:31:59 -04:00
|
|
|
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: 1 });
|
|
|
|
|
|
|
|
|
|
const response = await logger!.flushToClearcut();
|
|
|
|
|
|
|
|
|
|
expect(response.nextRequestWaitMs).toBe(NEXT_WAIT_MS);
|
2025-08-07 18:53:39 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('should clear events on successful flush', async () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: 1 });
|
2025-08-12 14:31:59 -04:00
|
|
|
const response = await logger!.flushToClearcut();
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(getEvents(logger!)).toEqual([]);
|
|
|
|
|
expect(response.nextRequestWaitMs).toBe(NEXT_WAIT_MS);
|
2025-08-07 18:53:39 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('should handle a network error and requeue events', async () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
|
|
|
|
|
|
|
|
|
server.resetHandlers(http.post(CLEARCUT_URL, () => HttpResponse.error()));
|
2025-08-07 18:53:39 -07:00
|
|
|
logger!.enqueueLogEvent({ event_id: 1 });
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: 2 });
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(2);
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
const x = logger!.flushToClearcut();
|
|
|
|
|
await x;
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(2);
|
|
|
|
|
const events = getEvents(logger!);
|
|
|
|
|
expect(JSON.parse(events[0][0].source_extension_json).event_id).toBe(1);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('should handle an HTTP error and requeue events', async () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
|
|
|
|
|
|
|
|
|
server.resetHandlers(
|
|
|
|
|
http.post(
|
|
|
|
|
CLEARCUT_URL,
|
|
|
|
|
() =>
|
|
|
|
|
new HttpResponse(
|
|
|
|
|
{ 'the system is down': true },
|
|
|
|
|
{
|
|
|
|
|
status: 500,
|
|
|
|
|
},
|
|
|
|
|
),
|
|
|
|
|
),
|
|
|
|
|
);
|
2025-08-07 18:53:39 -07:00
|
|
|
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: 1 });
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: 2 });
|
|
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(getEvents(logger!).length).toBe(2);
|
|
|
|
|
await logger!.flushToClearcut();
|
2025-08-07 18:53:39 -07:00
|
|
|
|
2025-08-12 14:31:59 -04:00
|
|
|
expect(getEvents(logger!).length).toBe(2);
|
2025-08-07 18:53:39 -07:00
|
|
|
const events = getEvents(logger!);
|
|
|
|
|
expect(JSON.parse(events[0][0].source_extension_json).event_id).toBe(1);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
describe('requeueFailedEvents logic', () => {
|
|
|
|
|
it('should limit the number of requeued events to max_retry_events', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
|
|
|
|
const maxRetryEvents = TEST_ONLY.MAX_RETRY_EVENTS;
|
2025-08-07 18:53:39 -07:00
|
|
|
const eventsToLogCount = maxRetryEvents + 5;
|
|
|
|
|
const eventsToSend: LogEventEntry[][] = [];
|
|
|
|
|
for (let i = 0; i < eventsToLogCount; i++) {
|
|
|
|
|
eventsToSend.push([
|
|
|
|
|
{
|
|
|
|
|
event_time_ms: Date.now(),
|
|
|
|
|
source_extension_json: JSON.stringify({ event_id: i }),
|
|
|
|
|
},
|
|
|
|
|
]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
requeueFailedEvents(logger!, eventsToSend);
|
|
|
|
|
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(maxRetryEvents);
|
|
|
|
|
const firstRequeuedEvent = JSON.parse(
|
|
|
|
|
getEvents(logger!)[0][0].source_extension_json,
|
|
|
|
|
);
|
|
|
|
|
// The last `maxRetryEvents` are kept. The oldest of those is at index `eventsToLogCount - maxRetryEvents`.
|
|
|
|
|
expect(firstRequeuedEvent.event_id).toBe(
|
|
|
|
|
eventsToLogCount - maxRetryEvents,
|
|
|
|
|
);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('should not requeue more events than available space in the queue', () => {
|
2025-08-12 14:31:59 -04:00
|
|
|
const { logger } = setup();
|
|
|
|
|
const maxEvents = TEST_ONLY.MAX_EVENTS;
|
2025-08-07 18:53:39 -07:00
|
|
|
const spaceToLeave = 5;
|
|
|
|
|
const initialEventCount = maxEvents - spaceToLeave;
|
|
|
|
|
for (let i = 0; i < initialEventCount; i++) {
|
|
|
|
|
logger!.enqueueLogEvent({ event_id: `initial_${i}` });
|
|
|
|
|
}
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(initialEventCount);
|
|
|
|
|
|
|
|
|
|
const failedEventsCount = 10; // More than spaceToLeave
|
|
|
|
|
const eventsToSend: LogEventEntry[][] = [];
|
|
|
|
|
for (let i = 0; i < failedEventsCount; i++) {
|
|
|
|
|
eventsToSend.push([
|
|
|
|
|
{
|
|
|
|
|
event_time_ms: Date.now(),
|
|
|
|
|
source_extension_json: JSON.stringify({ event_id: `failed_${i}` }),
|
|
|
|
|
},
|
|
|
|
|
]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
requeueFailedEvents(logger!, eventsToSend);
|
|
|
|
|
|
|
|
|
|
// availableSpace is 5. eventsToRequeue is min(10, 5) = 5.
|
|
|
|
|
// Total size should be initialEventCount + 5 = maxEvents.
|
|
|
|
|
expect(getEventsSize(logger!)).toBe(maxEvents);
|
|
|
|
|
|
|
|
|
|
// The requeued events are the *last* 5 of the failed events.
|
|
|
|
|
// startIndex = max(0, 10 - 5) = 5.
|
|
|
|
|
// Loop unshifts events from index 9 down to 5.
|
|
|
|
|
// The first element in the deque is the one with id 'failed_5'.
|
|
|
|
|
const firstRequeuedEvent = JSON.parse(
|
|
|
|
|
getEvents(logger!)[0][0].source_extension_json,
|
|
|
|
|
);
|
|
|
|
|
expect(firstRequeuedEvent.event_id).toBe('failed_5');
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
});
|