-
Notifications
You must be signed in to change notification settings - Fork 332
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Refactor all Gmail API calls #289
base: main
Are you sure you want to change the base?
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,6 +26,7 @@ import { categorizeSender } from "@/utils/categorize/senders/categorize"; | |
import { unwatchEmails } from "@/app/api/google/watch/controller"; | ||
import { createScopedLogger } from "@/utils/logger"; | ||
import { markMessageAsProcessing } from "@/utils/redis/message-processing"; | ||
import { getHistory } from "@/utils/gmail/misc"; | ||
|
||
const logger = createScopedLogger("Process History"); | ||
|
||
|
@@ -153,8 +154,7 @@ export async function processHistoryForUser( | |
email, | ||
}); | ||
|
||
const history = await gmail.users.history.list({ | ||
userId: "me", | ||
const history = await getHistory(gmail, { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Codebase verification Pagination not implemented in getHistory function The current implementation of Key issues:
🔗 Analysis chainCheck long history retrieval edge cases. Let me check the implementation of Let me try a different approach to check the implementation. 🏁 Scripts executedThe following scripts were executed for the analysis: Script: #!/bin/bash
# Description: Check if multiple pages of history are handled consistently
rg -A 10 'getHistory\('
Length of output: 1539 Script: #!/bin/bash
# Check the full implementation of getHistory function
ast-grep --pattern 'export async function getHistory($_) {
$$$
}'
Length of output: 69 Script: #!/bin/bash
# Check the full implementation of getHistory function using ripgrep
rg -U "export async function getHistory[\s\S]*?\n}" --multiline
Length of output: 1040 |
||
// NOTE this can cause problems if we're way behind | ||
// NOTE this doesn't include startHistoryId in the results | ||
startHistoryId, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,20 +6,20 @@ import { getGmailClient } from "@/utils/gmail/client"; | |
import { INBOX_LABEL_ID, getOrCreateInboxZeroLabel } from "@/utils/gmail/label"; | ||
import { sleep } from "@/utils/sleep"; | ||
import { withError } from "@/utils/middleware"; | ||
import { getThreads, modifyThread } from "@/utils/gmail/thread"; | ||
|
||
const bulkArchiveBody = z.object({ daysAgo: z.string() }); | ||
export type BulkArchiveBody = z.infer<typeof bulkArchiveBody>; | ||
export type BulkArchiveResponse = Awaited<ReturnType<typeof bulkArchive>>; | ||
|
||
async function bulkArchive(body: BulkArchiveBody, gmail: gmail_v1.Gmail) { | ||
const res = await gmail.users.threads.list({ | ||
userId: "me", | ||
const res = await getThreads(gmail, { | ||
maxResults: 500, | ||
q: `older_than:${body.daysAgo}d`, | ||
labelIds: [INBOX_LABEL_ID], | ||
}); | ||
|
||
console.log(`Archiving ${res.data.threads?.length} threads`); | ||
console.log(`Archiving ${res.threads?.length} threads`); | ||
|
||
const archivedLabel = await getOrCreateInboxZeroLabel({ | ||
gmail, | ||
|
@@ -29,22 +29,18 @@ async function bulkArchive(body: BulkArchiveBody, gmail: gmail_v1.Gmail) { | |
if (!archivedLabel.id) | ||
throw new Error("Failed to get or create archived label"); | ||
|
||
for (const thread of res.data.threads || []) { | ||
await gmail.users.threads.modify({ | ||
userId: "me", | ||
id: thread.id!, | ||
requestBody: { | ||
addLabelIds: [archivedLabel.id], | ||
removeLabelIds: [INBOX_LABEL_ID], | ||
}, | ||
for (const thread of res.threads || []) { | ||
await modifyThread(gmail, thread.id!, { | ||
addLabelIds: [archivedLabel.id], | ||
removeLabelIds: [INBOX_LABEL_ID], | ||
Comment on lines
+32
to
+35
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Consider parallel archiving in small batches. |
||
}); | ||
|
||
// we're allowed to archive 250/10 = 25 threads per second: | ||
// https://developers.google.com/gmail/api/reference/quota | ||
await sleep(40); // 1s / 25 = 40ms | ||
} | ||
|
||
return { count: res.data.threads?.length || 0 }; | ||
return { count: res.threads?.length || 0 }; | ||
} | ||
|
||
export const POST = withError(async (request: Request) => { | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,7 +2,7 @@ import prisma from "@/utils/prisma"; | |
import type { gmail_v1 } from "@googleapis/gmail"; | ||
import { createHash } from "node:crypto"; | ||
import groupBy from "lodash/groupBy"; | ||
import { getMessage } from "@/utils/gmail/message"; | ||
import { getMessage, getMessages } from "@/utils/gmail/message"; | ||
import { findMatchingGroupItem } from "@/utils/group/find-matching-group"; | ||
import { parseMessage } from "@/utils/mail"; | ||
import { extractEmailAddress } from "@/utils/email"; | ||
|
@@ -214,15 +214,14 @@ async function fetchGroupMessages( | |
): Promise<{ messages: MessageWithGroupItem[]; nextPageToken?: string }> { | ||
const q = buildQuery(groupItemType, groupItems, from, to); | ||
|
||
const response = await gmail.users.messages.list({ | ||
userId: "me", | ||
const response = await getMessages(gmail, { | ||
maxResults, | ||
pageToken, | ||
q, | ||
query: q, | ||
}); | ||
|
||
const messages = await Promise.all( | ||
(response.data.messages || []).map(async (message) => { | ||
(response.messages || []).map(async (message) => { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Limit concurrency in Promise.all loop. - const messages = await Promise.all(
- (response.messages || []).map(async (message) => {
- ...
- }),
- );
+ import pLimit from "p-limit";
+ const limit = pLimit(5); // limit concurrency to 5
+ const messages = await Promise.all(
+ (response.messages || []).map((msg) => limit(async () => {
+ ...
+ })),
+ );
|
||
const m = await getMessage(message.id!, gmail); | ||
const parsedMessage = parseMessage(m); | ||
const matchingGroupItem = findMatchingGroupItem( | ||
|
@@ -236,7 +235,7 @@ async function fetchGroupMessages( | |
return { | ||
// search might include messages that don't match the rule, so we filter those out | ||
messages: messages.filter((message) => message.matchingGroupItem), | ||
nextPageToken: response.data.nextPageToken || undefined, | ||
nextPageToken: response.nextPageToken || undefined, | ||
}; | ||
} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,6 @@ | ||
import type { gmail_v1 } from "@googleapis/gmail"; | ||
import { parseMessage } from "@/utils/mail"; | ||
import { getMessage } from "@/utils/gmail/message"; | ||
import { getMessage, getMessages } from "@/utils/gmail/message"; | ||
import type { | ||
MessageWithGroupItem, | ||
RuleWithGroup, | ||
|
@@ -66,14 +66,13 @@ async function fetchStaticExampleMessages( | |
q += `subject:${rule.subject} `; | ||
} | ||
|
||
const response = await gmail.users.messages.list({ | ||
userId: "me", | ||
const response = await getMessages(gmail, { | ||
maxResults: 50, | ||
q, | ||
query: q, | ||
Comment on lines
+69
to
+71
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Codebase verification The review comment is valid - pagination support should be implemented Based on the code analysis:
However, in the current implementation at 🔗 Analysis chainConsider handling paginated results. While fetching 50 messages might suffice for now, Let me search for more context about the pagination handling in the codebase. Let me search for more specific information about the getMessages implementation. 🏁 Scripts executedThe following scripts were executed for the analysis: Script: #!/bin/bash
# Searching usage for `fetchStaticExampleMessages` to confirm it does not require subsequent pages
rg -A 4 "fetchStaticExampleMessages"
Length of output: 901 Script: #!/bin/bash
# Search for fetchPaginatedMessages implementation and usage
rg -A 10 "fetchPaginatedMessages"
# Search for getMessages implementation to understand pagination support
ast-grep --pattern 'async function getMessages'
Length of output: 3520 Script: #!/bin/bash
# Search for getMessages implementation and usage
rg -A 15 "getMessages = async|function getMessages"
# Search for files that might contain Gmail API related code
fd -e ts gmail
Length of output: 3111 |
||
}); | ||
|
||
const messages = await Promise.all( | ||
(response.data.messages || []).map(async (message) => { | ||
(response.messages || []).map(async (message) => { | ||
const m = await getMessage(message.id!, gmail); | ||
const parsedMessage = parseMessage(m); | ||
return parsedMessage; | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Handle potential exceptions thrown by
watchUser
.If the API call fails due to invalid credentials or permissions, the error may go unhandled until caught in a higher-level block. Consider adding try/catch or passing the error upward in a structured way.