Skip to content

Commit

Permalink
feat: discord bulk requests
Browse files Browse the repository at this point in the history
  • Loading branch information
Ajeyakrishna-k committed Dec 4, 2023
1 parent 1a0c7b9 commit 7365d30
Show file tree
Hide file tree
Showing 3 changed files with 317 additions and 0 deletions.
109 changes: 109 additions & 0 deletions src/utils/batchDiscordRequests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import JSONResponse from "./JsonResponse";
import { addDelay, convertSecondsToMillis } from "./timeUtils";
export const DISCORD_HEADERS = {
RATE_LIMIT_RESET_AFTER: "X-RateLimit-Reset-After",
RATE_LIMIT_REMAINING: "X-RateLimit-Remaining",
RETRY_AFTER: "Retry-After",
};

const MAX_RETRY = 1;

interface RequestDetails {
retries: number;
request: () => Promise<Response>;
index: number;
}
interface ResponseDetails {
response: Response;
data: RequestDetails;
}

export const batchDiscordRequests = async (
requests: { (): Promise<Response> }[]
): Promise<Response[]> => {
try {
const requestsQueue: RequestDetails[] = requests.map((request, index) => {
return {
retries: 0,
request: request,
index: index,
};
});

const responseList: Response[] = new Array(requestsQueue.length);
let resetAfter = 0;
let rateLimitRemaining: number | null = null;
let retryAfter: number | null = null;
const handleResponse = async (
response: JSONResponse,
data: RequestDetails
): Promise<void> => {
if (response.ok) {
resetAfter = Number.parseFloat(
response.headers.get(DISCORD_HEADERS.RATE_LIMIT_RESET_AFTER) || "0"
);
rateLimitRemaining = Number.parseInt(
response.headers.get(DISCORD_HEADERS.RATE_LIMIT_REMAINING) || "0"
);
responseList[data.index] = response;
} else {
retryAfter = Number.parseFloat(
response.headers.get(DISCORD_HEADERS.RETRY_AFTER) || "0"
);
if (data.retries >= MAX_RETRY) {
responseList[data.index] = response;
} else {
data.retries++;
requestsQueue.push(data);
}
}
};

const executeRequest = async (
data: RequestDetails
): Promise<{ response: Response; data: RequestDetails }> => {
let response;
try {
response = await data.request();
} catch (e: unknown) {
response = new JSONResponse({ error: e }, { status: 500 });
}
return { response, data };
};

let promises: Promise<{ response: Response; data: RequestDetails }>[] = [];

while (requestsQueue.length > 0) {
const requestData = requestsQueue.pop();
if (!requestData) continue;
promises.push(executeRequest(requestData));
if (rateLimitRemaining) {
rateLimitRemaining--;
}
if (
!rateLimitRemaining ||
rateLimitRemaining <= 0 ||
requestsQueue.length === 0
) {
const resultList: ResponseDetails[] = await Promise.all(promises);
promises = [];
for (const result of resultList) {
const { response, data } = result;
await handleResponse(response, data);
}
if (rateLimitRemaining && rateLimitRemaining <= 0 && resetAfter) {
await addDelay(convertSecondsToMillis(resetAfter));
rateLimitRemaining = null;
} else if (retryAfter && retryAfter > 0) {
await addDelay(convertSecondsToMillis(retryAfter));
retryAfter = null;
}
}
}

return responseList;
} catch (e) {
console.error(e);
throw e;
}
};
7 changes: 7 additions & 0 deletions src/utils/timeUtils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
export const addDelay = async (millisecond: number): Promise<void> => {
await new Promise<void>((resolve) => setTimeout(resolve, millisecond));
};

export const convertSecondsToMillis = (seconds: number): number => {
return Math.ceil(seconds * 1000);
};
201 changes: 201 additions & 0 deletions tests/unit/utils/batchDiscordRequests.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
import {
batchDiscordRequests,
DISCORD_HEADERS,
} from "../../../src/utils/batchDiscordRequests";
import JSONResponse from "../../../src/utils/JsonResponse";

describe("Utils | batchDiscordRequests", () => {
const rateLimitingHeaders = {
[DISCORD_HEADERS.RATE_LIMIT_REMAINING]: "9",
[DISCORD_HEADERS.RATE_LIMIT_RESET_AFTER]: "1.1", // seconds
};

const rateLimitExceededHeaders = {
[DISCORD_HEADERS.RETRY_AFTER]: "1.2", // seconds
};

let fetchSpy: jest.SpyInstance;
let setTimeoutSpy: jest.SpyInstance;

beforeEach(() => {
fetchSpy = jest.spyOn(global, "fetch");
setTimeoutSpy = jest.spyOn(global, "setTimeout");
});

afterEach(() => {
jest.resetAllMocks();
jest.restoreAllMocks();
});

test("should execute requests when there are no headers", async () => {
fetchSpy.mockImplementation(() =>
Promise.resolve(new JSONResponse({}, {}))
);
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests([singleRequest]);
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(1);
});

test("should execute multiple requests when there are no headers", async () => {
fetchSpy.mockImplementation(() =>
Promise.resolve(new JSONResponse({}, {}))
);
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(20).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(20);
});

test("should execute requests when there are headers and input size is 40 with a limit of 3", async () => {
const maxRateLimit = 3;
const inputSize = 40;
let remainingRateLimit = maxRateLimit;
const headers = { ...rateLimitingHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
headers[DISCORD_HEADERS.RATE_LIMIT_REMAINING] =
remainingRateLimit.toString();
remainingRateLimit--;
return resolve(new JSONResponse({}, { headers: headers }));
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
remainingRateLimit = maxRateLimit;
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize);
});

test("should execute requests when there are headers and input size is 6 with a limit of 2", async () => {
const maxRateLimit = 3;
const inputSize = 6;
let remainingRateLimit = maxRateLimit;
const headers = { ...rateLimitingHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
headers[DISCORD_HEADERS.RATE_LIMIT_REMAINING] =
remainingRateLimit.toString();
remainingRateLimit--;
return resolve(new JSONResponse({}, { headers: headers }));
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
remainingRateLimit = maxRateLimit;
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize);
});

test("should retry fetch call when the API fails", async () => {
const headers = { ...rateLimitExceededHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
return resolve(
new JSONResponse({}, { headers: headers, status: 500 })
);
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests([singleRequest]);
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(2);
});

test("should retry only failed fetch calls", async () => {
const maxRateLimit = 3;
const inputSize = 6;
let remainingRateLimit = maxRateLimit;
let retries = 5;
const headers = { ...rateLimitingHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
const status = retries > 0 ? 500 : 200;
retries--;
headers[DISCORD_HEADERS.RATE_LIMIT_REMAINING] =
remainingRateLimit.toString();
remainingRateLimit--;
return resolve(
new JSONResponse({}, { headers: headers, status: status })
);
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
remainingRateLimit = maxRateLimit;
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize + 3);
});
test("should retry only failed fetch calls", async () => {
const maxRateLimit = 3;
const inputSize = 6;
let remainingRateLimit = maxRateLimit;
let retries = 5;
const headers = { ...rateLimitingHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
const status = retries > 0 ? 500 : 200;
retries--;
headers[DISCORD_HEADERS.RATE_LIMIT_REMAINING] =
remainingRateLimit.toString();
remainingRateLimit--;
return resolve(
new JSONResponse({}, { headers: headers, status: status })
);
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
remainingRateLimit = maxRateLimit;
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize + 3);
});

test("should retry even for the rate limited exceeded headers", async () => {
const inputSize = 4;
const headers = { ...rateLimitExceededHeaders };
fetchSpy.mockImplementation(
() =>
new Promise((resolve) => {
return resolve(
new JSONResponse({}, { headers: headers, status: 500 })
);
})
);
setTimeoutSpy.mockImplementation((resolve: any) => {
return resolve();
});
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize * 2);
});
test("should handle network errors and continue processing", async () => {
const inputSize = 3;
fetchSpy.mockImplementation(() => Promise.reject("Network error"));
const singleRequest = () => fetch("/abc", { method: "GET" });
await batchDiscordRequests(new Array(inputSize).fill(singleRequest));
expect(global.fetch).toHaveBeenCalledWith("/abc", { method: "GET" });
expect(global.fetch).toBeCalledTimes(inputSize * 2);
});
});

0 comments on commit 7365d30

Please sign in to comment.