Skip to content

Commit

Permalink
fix(webhook): leverage the reply token timeout mechanism to report mo…
Browse files Browse the repository at this point in the history
…st recent progress

- Show loading animation insted
- This measure reduces the interaction provided on the user side and is thus more straightforward
  • Loading branch information
MrOrz committed Feb 5, 2025
1 parent 56a35f8 commit f602fce
Show file tree
Hide file tree
Showing 6 changed files with 71 additions and 26 deletions.
5 changes: 5 additions & 0 deletions src/types/chatbotState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ export type Context = {
/** Used to differientiate different search sessions (searched text or media) */
sessionId: number;
msgs: ReadonlyArray<CooccurredMessage>;

/**
* Message to show when sending reply token collector before the current reply token expires.
*/
replyTokenCollectorMsg?: string;
};

/** Latest reply token in Redis that is not consumed yet */
Expand Down
10 changes: 6 additions & 4 deletions src/webhook/handlers/askingArticleSubmissionConsent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ import {
setExactMatchesAsCooccurrence,
addReplyRequestForUnrepliedCooccurredArticles,
createAskAiReplyFeedbackBubble,
sendReplyTokenCollector,
setReplyTokenCollectorMsg,
displayLoadingAnimation,
} from './utils';

// Input should be array of context.msgs idx. Empty if the user does not want to submit.
Expand Down Expand Up @@ -87,10 +88,11 @@ const askingArticleSubmissionConsent: ChatbotPostbackHandler = async ({

visitor.event({ ec: 'Article', ea: 'Create', el: 'Yes' }).send();

await sendReplyTokenCollector(
await setReplyTokenCollectorMsg(
userId,
t`I will spend some time processing the ${msgsToSubmit.length} new message(s) you have submitted.`
t`I am currently sending the ${msgsToSubmit.length} new message(s) you have submitted to the database.`
);
await displayLoadingAnimation(userId);

const createdArticles = await Promise.all(
msgsToSubmit.map(async (msg) => {
Expand Down Expand Up @@ -202,7 +204,7 @@ const askingArticleSubmissionConsent: ChatbotPostbackHandler = async ({
const articleUrl = getArticleURL(article.id);
const articleCreatedMsg = t`Your submission is now recorded at ${articleUrl}`;

await sendReplyTokenCollector(
await setReplyTokenCollectorMsg(
userId,
t`I am now generating automated analysis for the message you have submitted, please wait.`
);
Expand Down
26 changes: 19 additions & 7 deletions src/webhook/handlers/askingCooccurrence.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ import {
createCooccurredSearchResultsCarouselContents,
setExactMatchesAsCooccurrence,
addReplyRequestForUnrepliedCooccurredArticles,
sendReplyTokenCollector,
setReplyTokenCollectorMsg,
displayLoadingAnimation,
} from './utils';

const inputSchema = z.enum([POSTBACK_NO, POSTBACK_YES]);
Expand Down Expand Up @@ -70,17 +71,28 @@ const askingCooccurence: ChatbotPostbackHandler = async ({
})
.send();

await sendReplyTokenCollector(
let processingCount = context.msgs.length;
await setReplyTokenCollectorMsg(
userId,
t`I will spend some time analyzing the ${context.msgs.length} message(s) you have submitted, and will get back to you ASAP.`
t`Out of the ${context.msgs.length} message(s) you have submitted, I am still analyzing ${processingCount} of them.`
);
await displayLoadingAnimation(userId);

const searchResults = await Promise.all(
context.msgs.map(async (msg) =>
msg.type === 'text'
context.msgs.map(async (msg) => {
const result = await (msg.type === 'text'
? searchText(msg.text)
: searchMedia(getLineContentProxyURL(msg.id), userId)
)
: searchMedia(getLineContentProxyURL(msg.id), userId));

processingCount -= 1;
// Update reply token collector message with latest number of messages that is still being analyzed
await setReplyTokenCollectorMsg(
userId,
t`Out of the ${context.msgs.length} message(s) you have submitted, I am still analyzing ${processingCount} of them.`
);

return result;
})
);

const notInDbMsgIndexes = searchResults.reduce((indexes, result, idx) => {
Expand Down
8 changes: 5 additions & 3 deletions src/webhook/handlers/processMedia.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ import {
createAskArticleSubmissionConsentReply,
searchMedia,
createSearchResultCarouselContents,
sendReplyTokenCollector,
setReplyTokenCollectorMsg,
displayLoadingAnimation,
setNewContext,
} from './utils';
import choosingArticle from './choosingArticle';
Expand All @@ -35,10 +36,11 @@ export default async function (message: CooccurredMessage, userId: string) {
msgs: [message],
});

await sendReplyTokenCollector(
await setReplyTokenCollectorMsg(
userId,
t`I will spend some time analyzing the message you have submitted, and will get back to you ASAP.`
t`I am still analyzing the media file you have submitted.`
);
await displayLoadingAnimation(userId);

const result = await searchMedia(proxyUrl, userId);

Expand Down
15 changes: 12 additions & 3 deletions src/webhook/handlers/singleUserHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,12 @@ import {
import processMedia from './processMedia';
import processBatch from './processBatch';
import initState from './initState';
import { setReplyToken, consumeReplyTokenInfo, setNewContext } from './utils';
import {
setReplyToken,
consumeReplyTokenInfo,
setNewContext,
setReplyTokenCollectorMsg,
} from './utils';

const userIdBlacklist = (process.env.USERID_BLACKLIST || '').split(',');

Expand Down Expand Up @@ -135,8 +140,12 @@ const singleUserHandler = async (
});
}

// The chatbot's reply cuts off the user's input streak, thus we end the current batch here.
redis.del(REDIS_BATCH_KEY);
await Promise.all([
// The chatbot's reply cuts off the user's input streak, thus we end the current batch here.
redis.del(REDIS_BATCH_KEY),
// The chatbot's reply marks an end of previous process, thus we can clear the reply collector message.
setReplyTokenCollectorMsg(userId, null),
]);
}

// Set context
Expand Down
33 changes: 24 additions & 9 deletions src/webhook/handlers/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1481,10 +1481,7 @@ export async function setReplyToken(userId: string, replyToken: string) {
if (!latestReplyTokenInfo || latestReplyTokenInfo.token !== replyToken)
return;

await sendReplyTokenCollector(
userId,
t`I am still processing your request. Please wait.`
);
await sendReplyTokenCollector(userId);
}, REPLY_TIMEOUT);

return () => clearTimeout(timer);
Expand All @@ -1506,14 +1503,13 @@ export async function consumeReplyTokenInfo(
return tokenInfo;
}

const DEFAULT_REPLY_TOKEN_COLLECTOR_MSG = t`I am still processing your request. Please wait.`;

/**
* Sends a message with quick reply to collect new reply token.
* Does nothing if the current token is already expired.
*/
export async function sendReplyTokenCollector(
userId: string,
message: string
): Promise<void> {
async function sendReplyTokenCollector(userId: string): Promise<void> {
const tokenInfo = await consumeReplyTokenInfo(userId);

// Token is already consumed or not set
Expand All @@ -1531,7 +1527,9 @@ export async function sendReplyTokenCollector(
const messages: Message[] = [
{
...createTextMessage({
text: message,
text:
latestContext.replyTokenCollectorMsg ??
DEFAULT_REPLY_TOKEN_COLLECTOR_MSG,
}),
quickReply: {
items: [
Expand All @@ -1557,3 +1555,20 @@ export async function sendReplyTokenCollector(
messages,
});
}

/**
* Setup the message to show when reply token collector is sent to the user.
*/
export async function setReplyTokenCollectorMsg(
userId: string,
/** The mesage to show. Set to null or empty string to use the default message */
msg: string | null
) {
const context = (await redis.get(userId)) as Context;
if (msg) {
context.replyTokenCollectorMsg = msg;
} else {
delete context.replyTokenCollectorMsg;
}
await redis.set(userId, context);
}

0 comments on commit f602fce

Please sign in to comment.