Skip to content

Commit

Permalink
chore(js): remove some debug logs
Browse files Browse the repository at this point in the history
  • Loading branch information
cabljac committed Oct 11, 2024
1 parent 6556e11 commit eed471e
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 22 deletions.
15 changes: 4 additions & 11 deletions js/plugins/googleai/src/context-caching/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@ export function getContentForCache(
throw new Error('No history provided for context caching');
}

logger.info('model version', modelVersion);

const cachedContent: CachedContent = {
model: modelVersion,
contents: [],
Expand All @@ -55,10 +53,7 @@ export function getContentForCache(

// We split history into two parts: the part that should be cached and the part that should not
const slicedHistory = chatRequest.history.slice(0, endOfCachedContents);
logger.info(
'last of cached contents',
JSON.stringify(slicedHistory.map((m) => m.role))
);

cachedContent.contents = slicedHistory;

let newHistory;
Expand All @@ -70,8 +65,6 @@ export function getContentForCache(
}
chatRequest.history = newHistory;

logger.info('new history', JSON.stringify(newHistory.map((m) => m.role)));

if (request.config?.contextCache?.context) {
cachedContent.systemInstruction = toGeminiSystemInstruction({
role: 'system',
Expand Down Expand Up @@ -140,7 +133,7 @@ export function validateContextCacheRequest(
): boolean {
// Check if contextCache is requested in the config
if (!request.config?.contextCache) {
logger.info('Context caching is not requested');
logger.debug('Context caching is not requested');
return false;
}

Expand Down Expand Up @@ -183,13 +176,13 @@ export function validateContextCacheRequest(
});
}

logger.info('Context caching is valid for this request');
logger.debug('Context caching is valid for this request');
// If all checks pass, content should be cached
return true;
}

/**
* Clears ALL
* Utility to clear ALL Caches
* @param cacheManager
* @param maxPages
* @param pageSize
Expand Down
9 changes: 3 additions & 6 deletions js/plugins/googleai/src/context-caching/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,29 +38,26 @@ export async function handleContextCache(
chatRequest: StartChatParams,
modelVersion: string
): Promise<{ cache: CachedContent; newChatRequest: StartChatParams }> {
logger.info('Using context cache feature');
const cacheManager = new GoogleAICacheManager(apiKey);

const { cachedContent, chatRequest: newChatRequest } = getContentForCache(
request,
chatRequest,
modelVersion
);

logger.info('Cached content:', cachedContent);
cachedContent.model = modelVersion;
const cacheKey = generateCacheKey(cachedContent);

cachedContent.displayName = cacheKey;
logger.info(`Generated cache key: ${cacheKey}`);
logger.debug(`Generated cache key: ${cacheKey}`);
let cache = await lookupContextCache(cacheManager, cacheKey);
logger.info(`Found cache: ${cache ? 'true' : 'false'}`);
logger.debug(`Cache hit: ${cache ? 'true' : 'false'}`);

if (!cache) {
try {
logger.debug('No cache found, creating one.');
cache = await cacheManager.create(cachedContent);
logger.info(`Created new cache entry with key: ${cacheKey}`);
logger.debug(`Created new cache entry with key: ${cacheKey}`);
} catch (cacheError) {
throw new GenkitError({
status: 'INTERNAL',
Expand Down
6 changes: 1 addition & 5 deletions js/plugins/googleai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -580,8 +580,6 @@ export function googleAIModel(
return fromGeminiCandidate(candidate, jsonMode);
};

logger.info('request config', request.config);

let chatRequest = {
systemInstruction,
generationConfig,
Expand Down Expand Up @@ -618,7 +616,7 @@ export function googleAIModel(
const client = new GoogleGenerativeAI(apiKey!);

if (cache) {
logger.info('Using cached content');
logger.debug('Using Context Cache');
genModel = client.getGenerativeModelFromCachedContent(cache, options);
} else {
genModel = client.getGenerativeModel(
Expand All @@ -628,7 +626,6 @@ export function googleAIModel(
options
);
}
logger.info('created generative model client');

if (streamingCallback) {
const result = await genModel
Expand All @@ -653,7 +650,6 @@ export function googleAIModel(
custom: response,
};
} else {
logger.info(chatRequest!.history![0].role);
const result = await genModel
.startChat(chatRequest)
.sendMessage(msg.parts, options);
Expand Down

0 comments on commit eed471e

Please sign in to comment.