diff --git a/README.md b/README.md index c746e6a..6c7c2c4 100644 --- a/README.md +++ b/README.md @@ -182,7 +182,7 @@ const limiter = new Bottleneck({ ### wrap() -Takes a function that returns a promise. Returns a function identical to the original, but is rate limited. +Takes a function that returns a promise. Returns a function identical to the original, but rate limited. ```js const wrapped = limiter.wrap(fn) @@ -360,6 +360,8 @@ limiterB.chain(limiterC); // Requests added to limiterC must follow the C rate limits. ``` +To unchain, call `limiter.chain(null);`. + ## Clustering @@ -423,6 +425,8 @@ This helper method calls the `.end(flush)` method on the Redis clients used by a limiter.disconnect(true) ``` +The `flush` argument is optional and defaults to `true`. + ###### `.clients()` If you need direct access to the redis clients, use `.clients()`: @@ -549,7 +553,7 @@ The return value of `.key(str)` is a limiter. If it doesn't already exist, it is __stopAutoCleanup()__ -Calling `stopAutoCleanup()` on a group will turn off its garbage collection, so limiters for keys that have not been used in over **5 minutes** will NOT be deleted anymore. It can be reenabled by calling `startAutoCleanup()`. The `5 minutes` figure can be modified by calling `updateTimeout()`. +Calling `stopAutoCleanup()` on a group will turn off its garbage collection, so limiters for keys that have not been used in over **5 minutes** will NOT be deleted anymore. It can be reenabled by calling `startAutoCleanup()`. The `5 minutes` figure can be modified by calling `updateSettings()`. __startAutoCleanup()__ @@ -627,7 +631,9 @@ Suggestions and bug reports are also welcome. To work on the Bottleneck code, simply clone the repo, and run `./scripts/build.sh && npm test` to ensure that everything is set up correctly. -Make your changes to the files localted in `src/` only, then run `./scripts/build.sh && npm test` to compile and test them. +Make your changes to the files localted in `src/` only, then run `./scripts/build.sh && npm test` to build and test them. + +To speed up compilation time, run `./scripts/build.sh compile`. It only recompiles the `src/` files and skips the `bottleneck.d.ts` tests and the browser bundle generation. The tests must also pass in Clustering mode. You'll need a Redis server running on `127.0.0.1:6379`, then run `./scripts/build.sh && DATASTORE=redis npm test`. diff --git a/bottleneck.d.ts b/bottleneck.d.ts new file mode 100644 index 0000000..293b0ef --- /dev/null +++ b/bottleneck.d.ts @@ -0,0 +1,245 @@ +declare module "bottleneck" { + namespace Bottleneck { + type ConstructorOptions = { + readonly maxConcurrent?: number; + readonly minTime?: number; + readonly highWater?: number; + readonly strategy?: Bottleneck.Strategy; + readonly penalty?: number; + readonly reservoir?: number; + readonly datastore?: string; + readonly id?: string; + readonly rejectOnDrop?: boolean; + readonly clientOptions?: any; + readonly clearDatastore?: boolean; + [propName: string]: any; + }; + type JobOptions = { + readonly priority?: number; + readonly weight?: number; + readonly expiration?: number; + readonly id?: string; + }; + type GroupOptions = { + readonly timeout?: number; + }; + type Callback = (err: any, result: T) => void; + interface ClientsList { client?: any; subscriber?: any } + interface GroupLimiterPair { key: string; limiter: Bottleneck } + interface Strategy {} + + class Group { + constructor(options?: Bottleneck.ConstructorOptions); + + /** + * Returns the limiter for the specified key. + * @param str - The limiter key. + */ + key(str: string): Bottleneck; + + /** + * Disables limiter garbage collection. + */ + stopAutoCleanup(): void; + + /** + * Enables limiter garbage collection. + */ + startAutoCleanup(): void; + + /** + * Updates the group settings. + * @param options - The new settings. + */ + updateSettings(options: Bottleneck.GroupOptions): void; + + /** + * Deletes the limiter for the given key + * @param str - The key + */ + deleteKey(str: string): void; + + /** + * Returns all the key-limiter pairs. + */ + limiters(): Bottleneck.GroupLimiterPair[]; + + /** + * Returns all the keys in the Cluster + */ + keys(): string[]; + } + } + + class Bottleneck { + public static readonly strategy: { + /** + * When submitting a new request, if the queue length reaches highWater, drop the oldest request with the lowest priority. This is useful when requests that have been waiting for too long are not important anymore. If all the queued up requests are more important than the one being added, it won't be added. + */ + readonly LEAK: Bottleneck.Strategy; + /** + * Same as LEAK, except that it will only drop requests that are less important than the one being added. If all the queued up requests are as important or more than the new one, it won't be added. + */ + readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; + /** + * When submitting a new request, if the queue length reaches highWater, do not add the new request. This strategy totally ignores priority levels. + */ + readonly OVERFLOW: Bottleneck.Strategy; + /** + * When submitting a new request, if the queue length reaches highWater, the limiter falls into "blocked mode". All queued requests are dropped and no new requests will be accepted until the limiter unblocks. It will unblock after penalty milliseconds have passed without receiving a new request. penalty is equal to 15 * minTime (or 5000 if minTime is 0) by default and can be changed by calling changePenalty(). This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. + */ + readonly BLOCK: Bottleneck.Strategy; + }; + + constructor(options?: Bottleneck.ConstructorOptions); + + /** + * Returns a promise which will be resolved once the limiter is ready to accept jobs + * or rejected if it fails to start up. + */ + ready(): Promise; + + /** + * Returns a datastore-specific object of redis clients. + */ + clients(): Bottleneck.ClientsList; + + /** + * Disconnects all redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Bottleneck; + + /** + * Returns the number of requests queued. + * @param priority - Returns the number of requests queued with the specified priority. + */ + queued(priority?: number): number; + + /** + * Returns the number of requests running. + */ + running(): Promise; + + /** + * If a request was added right now, would it be run immediately? + * @param weight - The weight of the request + */ + check(weight?: number): Promise; + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): Bottleneck; + on(name: "error", fn: (error: any) => void): Bottleneck; + on(name: "empty", fn: () => void): Bottleneck; + on(name: "idle", fn: () => void): Bottleneck; + on(name: "dropped", fn: (dropped: any) => void): Bottleneck; + on(name: "debug", fn: (message: string, data: any) => void): Bottleneck; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): Bottleneck; + once(name: "error", fn: (error: any) => void): Bottleneck; + once(name: "empty", fn: () => void): Bottleneck; + once(name: "idle", fn: () => void): Bottleneck; + once(name: "dropped", fn: (dropped: any) => void): Bottleneck; + once(name: "debug", fn: (message: string, data: any) => void): Bottleneck; + + /** + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ + removeAllListeners(name?: string): Bottleneck; + + /** + * Changes the settings for future requests. + * @param options - The new settings. + */ + updateSettings(options?: Bottleneck.ConstructorOptions): Bottleneck; + + /** + * Adds to the reservoir count. + */ + incrementReservoir(incrementBy: number): Bottleneck; + + /** + * Returns the current reservoir count, if any. + */ + currentReservoir(): Promise; + + /** + * Chain this limiter to another. + * @param limiter - The limiter that requests to this limiter must also follow. + */ + chain(limiter?: Bottleneck): Bottleneck; + + wrap(fn: () => PromiseLike): () => Promise; + wrap(fn: (arg1: A1) => PromiseLike): (A1) => Promise; + wrap(fn: (arg1: A1, arg2: A2) => PromiseLike): (A1, A2) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3) => PromiseLike): (A1, A2, A3) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4) => PromiseLike): (A1, A2, A3, A4) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5) => PromiseLike): (A1, A2, A3, A4, A5) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6) => PromiseLike): (A1, A2, A3, A4, A5, A6) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7) => PromiseLike): (A1, A2, A3, A4, A5, A6, A7) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8) => PromiseLike): (A1, A2, A3, A4, A5, A6, A7, A8) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9) => PromiseLike): (A1, A2, A3, A4, A5, A6, A7, A8, A9) => Promise; + wrap(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10) => PromiseLike): (A1, A2, A3, A4, A5, A6, A7, A8, A9, A10) => Promise; + + submit(fn: (callback: Bottleneck.Callback) => void, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, callback: Bottleneck.Callback) => void, arg1: A1, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, callback: Bottleneck.Callback): void; + submit(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10, callback: Bottleneck.Callback): void; + + submit(options: Bottleneck.JobOptions, fn: (callback: Bottleneck.Callback) => void, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, callback: Bottleneck.Callback) => void, arg1: A1, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, callback: Bottleneck.Callback): void; + submit(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10, callback: Bottleneck.Callback) => void, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10, callback: Bottleneck.Callback): void; + + schedule(fn: () => PromiseLike): Promise; + schedule(fn: (arg1: A1) => PromiseLike, arg1: A1): Promise; + schedule(fn: (arg1: A1, arg2: A2) => PromiseLike, arg1: A1, arg2: A2): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3) => PromiseLike, arg1: A1, arg2: A2, arg3: A3): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9): Promise; + schedule(fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10): Promise; + + schedule(options: Bottleneck.JobOptions, fn: () => PromiseLike): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1) => PromiseLike, arg1: A1): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2) => PromiseLike, arg1: A1, arg2: A2): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3) => PromiseLike, arg1: A1, arg2: A2, arg3: A3): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9): Promise; + schedule(options: Bottleneck.JobOptions, fn: (arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10) => PromiseLike, arg1: A1, arg2: A2, arg3: A3, arg4: A4, arg5: A5, arg6: A6, arg7: A7, arg8: A8, arg9: A9, arg10: A10): Promise; + } + + export default Bottleneck; +} + diff --git a/bottleneck.d.ts.ejs b/bottleneck.d.ts.ejs new file mode 100644 index 0000000..2ab6097 --- /dev/null +++ b/bottleneck.d.ts.ejs @@ -0,0 +1,204 @@ +declare module "bottleneck" { + namespace Bottleneck { + type ConstructorOptions = { + readonly maxConcurrent?: number; + readonly minTime?: number; + readonly highWater?: number; + readonly strategy?: Bottleneck.Strategy; + readonly penalty?: number; + readonly reservoir?: number; + readonly datastore?: string; + readonly id?: string; + readonly rejectOnDrop?: boolean; + readonly clientOptions?: any; + readonly clearDatastore?: boolean; + [propName: string]: any; + }; + type JobOptions = { + readonly priority?: number; + readonly weight?: number; + readonly expiration?: number; + readonly id?: string; + }; + type GroupOptions = { + readonly timeout?: number; + }; + type Callback = (err: any, result: T) => void; + interface ClientsList { client?: any; subscriber?: any } + interface GroupLimiterPair { key: string; limiter: Bottleneck } + interface Strategy {} + + class Group { + constructor(options?: Bottleneck.ConstructorOptions); + + /** + * Returns the limiter for the specified key. + * @param str - The limiter key. + */ + key(str: string): Bottleneck; + + /** + * Disables limiter garbage collection. + */ + stopAutoCleanup(): void; + + /** + * Enables limiter garbage collection. + */ + startAutoCleanup(): void; + + /** + * Updates the group settings. + * @param options - The new settings. + */ + updateSettings(options: Bottleneck.GroupOptions): void; + + /** + * Deletes the limiter for the given key + * @param str - The key + */ + deleteKey(str: string): void; + + /** + * Returns all the key-limiter pairs. + */ + limiters(): Bottleneck.GroupLimiterPair[]; + + /** + * Returns all the keys in the Cluster + */ + keys(): string[]; + } + } + + class Bottleneck { + public static readonly strategy: { + /** + * When submitting a new request, if the queue length reaches highWater, drop the oldest request with the lowest priority. This is useful when requests that have been waiting for too long are not important anymore. If all the queued up requests are more important than the one being added, it won't be added. + */ + readonly LEAK: Bottleneck.Strategy; + /** + * Same as LEAK, except that it will only drop requests that are less important than the one being added. If all the queued up requests are as important or more than the new one, it won't be added. + */ + readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; + /** + * When submitting a new request, if the queue length reaches highWater, do not add the new request. This strategy totally ignores priority levels. + */ + readonly OVERFLOW: Bottleneck.Strategy; + /** + * When submitting a new request, if the queue length reaches highWater, the limiter falls into "blocked mode". All queued requests are dropped and no new requests will be accepted until the limiter unblocks. It will unblock after penalty milliseconds have passed without receiving a new request. penalty is equal to 15 * minTime (or 5000 if minTime is 0) by default and can be changed by calling changePenalty(). This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. + */ + readonly BLOCK: Bottleneck.Strategy; + }; + + constructor(options?: Bottleneck.ConstructorOptions); + + /** + * Returns a promise which will be resolved once the limiter is ready to accept jobs + * or rejected if it fails to start up. + */ + ready(): Promise; + + /** + * Returns a datastore-specific object of redis clients. + */ + clients(): Bottleneck.ClientsList; + + /** + * Disconnects all redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Bottleneck; + + /** + * Returns the number of requests queued. + * @param priority - Returns the number of requests queued with the specified priority. + */ + queued(priority?: number): number; + + /** + * Returns the number of requests running. + */ + running(): Promise; + + /** + * If a request was added right now, would it be run immediately? + * @param weight - The weight of the request + */ + check(weight?: number): Promise; + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): Bottleneck; + on(name: "error", fn: (error: any) => void): Bottleneck; + on(name: "empty", fn: () => void): Bottleneck; + on(name: "idle", fn: () => void): Bottleneck; + on(name: "dropped", fn: (dropped: any) => void): Bottleneck; + on(name: "debug", fn: (message: string, data: any) => void): Bottleneck; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): Bottleneck; + once(name: "error", fn: (error: any) => void): Bottleneck; + once(name: "empty", fn: () => void): Bottleneck; + once(name: "idle", fn: () => void): Bottleneck; + once(name: "dropped", fn: (dropped: any) => void): Bottleneck; + once(name: "debug", fn: (message: string, data: any) => void): Bottleneck; + + /** + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ + removeAllListeners(name?: string): Bottleneck; + + /** + * Changes the settings for future requests. + * @param options - The new settings. + */ + updateSettings(options?: Bottleneck.ConstructorOptions): Bottleneck; + + /** + * Adds to the reservoir count. + */ + incrementReservoir(incrementBy: number): Bottleneck; + + /** + * Returns the current reservoir count, if any. + */ + currentReservoir(): Promise; + + /** + * Chain this limiter to another. + * @param limiter - The limiter that requests to this limiter must also follow. + */ + chain(limiter?: Bottleneck): Bottleneck; + + <%_ for (var count of [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) { _%> + wrap, A<%= idx %><%_ } _%>>(fn: (<%= Array.apply(null, Array(count)).map((e, i) => i+1).map(i => `arg${i}: A${i}`).join(", ") %>) => PromiseLike): (<%_ for (var idx = 1; idx <= count; idx++) { _%><% if (idx > 1) { %>, <% } %>A<%= idx %><% } _%>) => Promise; + <%_ } _%> + + <%_ for (var count of [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) { _%> + submit, A<%= idx %><%_ } _%>>(fn: (<%_ for (var idx = 1; idx <= count; idx++) { _%>arg<%= idx %>: A<%= idx %>, <% } _%>callback: Bottleneck.Callback) => void<%_ for (var idx = 1; idx <= count; idx++) { _%>, arg<%= idx %>: A<%= idx %><% } _%>, callback: Bottleneck.Callback): void; + <%_ } _%> + + <%_ for (var count of [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) { _%> + submit, A<%= idx %><%_ } _%>>(options: Bottleneck.JobOptions, fn: (<%_ for (var idx = 1; idx <= count; idx++) { _%>arg<%= idx %>: A<%= idx %>, <% } _%>callback: Bottleneck.Callback) => void<%_ for (var idx = 1; idx <= count; idx++) { _%>, arg<%= idx %>: A<%= idx %><% } _%>, callback: Bottleneck.Callback): void; + <%_ } _%> + + <%_ for (var count of [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) { _%> + schedule, A<%= idx %><%_ } _%>>(fn: (<%= Array.apply(null, Array(count)).map((e, i) => i+1).map(i => `arg${i}: A${i}`).join(", ") %>) => PromiseLike<%_ for (var idx = 1; idx <= count; idx++) { _%>, arg<%= idx %>: A<%= idx %><% } _%>): Promise; + <%_ } _%> + + <%_ for (var count of [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) { _%> + schedule, A<%= idx %><%_ } _%>>(options: Bottleneck.JobOptions, fn: (<%= Array.apply(null, Array(count)).map((e, i) => i+1).map(i => `arg${i}: A${i}`).join(", ") %>) => PromiseLike<%_ for (var idx = 1; idx <= count; idx++) { _%>, arg<%= idx %>: A<%= idx %><% } _%>): Promise; + <%_ } _%> + } + + export default Bottleneck; +} diff --git a/bottleneck.js b/bottleneck.js index 4df0d86..45ee598 100644 --- a/bottleneck.js +++ b/bottleneck.js @@ -71,7 +71,7 @@ return this._store.clients; } - async disconnect(flush) { + async disconnect(flush = true) { return (await this._store.disconnect(flush)); } @@ -903,7 +903,7 @@ args.unshift((options.clearDatastore ? 1 : 0)); return this.runScript("init", args); }).then((results) => { - return this.client; + return this.clients; }); } @@ -1196,11 +1196,14 @@ module.exports={ "url": "https://github.com/SGrondin/bottleneck/issues" }, "devDependencies": { + "@types/es6-promise": "0.0.33", "assert": "1.4.x", "browserify": "*", "coffeescript": "2.0.x", + "ejs-cli": "^2.0.0", "mocha": "4.x", "redis": "^2.8.0", + "typescript": "^2.6.2", "uglify-es": "3.x" }, "dependencies": {} diff --git a/bottleneck.min.js b/bottleneck.min.js index d50bcdd..da986d6 100644 --- a/bottleneck.min.js +++ b/bottleneck.min.js @@ -1 +1 @@ -(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o=ref;i=1<=ref?++j:--j){results.push(new DLList)}return results}chain(_limiter){this._limiter=_limiter;return this}_sanitizePriority(priority){var sProperty;sProperty=~~priority!==priority?DEFAULT_PRIORITY:priority;if(sProperty<0){return 0}else if(sProperty>NUM_PRIORITIES-1){return NUM_PRIORITIES-1}else{return sProperty}}_find(arr,fn){var ref;return(ref=function(){var i,j,len,x;for(i=j=0,len=arr.length;j0})}_randomIndex(){return Math.random().toString(36).slice(2)}async check(weight=1){return await this._store.__check__(weight)}_run(next,wait,index){var completed,done;this._trigger("debug",[`Scheduling ${next.options.id}`,{args:next.args,options:next.options}]);done=false;completed=(async(...args)=>{var e,ref,running;if(!done){try{done=true;clearTimeout(this._executing[index].expiration);delete this._executing[index];this._trigger("debug",[`Completed ${next.options.id}`,{args:next.args,options:next.options}]);({running:running}=await this._store.__free__(index,next.options.weight));this._trigger("debug",[`Freed ${next.options.id}`,{args:next.args,options:next.options}]);this._drainAll().catch(e=>{return this._trigger("error",[e])});if(running===0&&this.queued()===0){this._trigger("idle",[])}return(ref=next.cb)!=null?ref.apply({},args):void 0}catch(error){e=error;return this._trigger("error",[e])}}});return this._executing[index]={timeout:setTimeout(()=>{this._trigger("debug",[`Executing ${next.options.id}`,{args:next.args,options:next.options}]);if(this._limiter!=null){return this._limiter.submit.apply(this._limiter,Array.prototype.concat(next.options,next.task,next.args,completed))}else{return next.task.apply({},next.args.concat(completed))}},wait),expiration:next.options.expiration!=null?setTimeout(()=>{return completed(new Bottleneck.prototype.BottleneckError(`This job timed out after ${next.options.expiration} ms.`))},next.options.expiration):void 0,job:next}}_drainOne(freed){return this._registerLock.schedule(()=>{var args,index,options,queue,queued;if((queued=this.queued())===0){return this.Promise.resolve(false)}queue=this._getFirst(this._queues);({options:options,args:args}=queue.first());if(freed!=null&&options.weight>freed){return this.Promise.resolve(false)}this._trigger("debug",[`Draining ${options.id}`,{args:args,options:options}]);index=this._randomIndex();return this._store.__register__(index,options.weight,options.expiration).then(({success:success,wait:wait})=>{var next;this._trigger("debug",[`Drained ${options.id}`,{success:success,args:args,options:options}]);if(success){next=queue.shift();if(this.queued()===0&&this._submitLock._queue.length===0){this._trigger("empty",[])}this._run(next,wait,index)}return this.Promise.resolve(success)})})}_drainAll(freed){return this._drainOne(freed).then(success=>{if(success){return this._drainAll()}else{return this.Promise.resolve(success)}}).catch(e=>{return this._trigger("error",[e])})}submit(...args){var cb,j,job,k,options,ref,ref1,task;if(typeof args[0]==="function"){ref=args,task=ref[0],args=3<=ref.length?slice.call(ref,1,j=ref.length-1):(j=1,[]),cb=ref[j++];options=this.jobDefaults}else{ref1=args,options=ref1[0],task=ref1[1],args=4<=ref1.length?slice.call(ref1,2,k=ref1.length-1):(k=2,[]),cb=ref1[k++];options=parser.load(options,this.jobDefaults)}job={options:options,task:task,args:args,cb:cb};options.priority=this._sanitizePriority(options.priority);this._trigger("debug",[`Queueing ${options.id}`,{args:args,options:options}]);return this._submitLock.schedule(async()=>{var blocked,e,reachedHWM,shifted,strategy;try{({reachedHWM:reachedHWM,blocked:blocked,strategy:strategy}=await this._store.__submit__(this.queued(),options.weight));this._trigger("debug",[`Queued ${options.id}`,{args:args,options:options,reachedHWM:reachedHWM,blocked:blocked}])}catch(error){e=error;this._trigger("debug",[`Could not queue ${options.id}`,{args:args,options:options,error:e}]);job.cb(e);return false}if(blocked){this._queues=this._makeQueues();this._trigger("dropped",[job]);return true}else if(reachedHWM){shifted=strategy===Bottleneck.prototype.strategy.LEAK?this._getFirst(this._queues.slice(options.priority).reverse()).shift():strategy===Bottleneck.prototype.strategy.OVERFLOW_PRIORITY?this._getFirst(this._queues.slice(options.priority+1).reverse()).shift():strategy===Bottleneck.prototype.strategy.OVERFLOW?job:void 0;if(shifted!=null){this._trigger("dropped",[shifted])}if(shifted==null||strategy===Bottleneck.prototype.strategy.OVERFLOW){return reachedHWM}}this._queues[options.priority].push(job);await this._drainAll();return reachedHWM})}schedule(...args){var options,task,wrapped;if(typeof args[0]==="function"){[task,...args]=args;options=this.jobDefaults}else{[options,task,...args]=args;options=parser.load(options,this.jobDefaults)}wrapped=function(...args){var cb,j,ref;ref=args,args=2<=ref.length?slice.call(ref,0,j=ref.length-1):(j=0,[]),cb=ref[j++];return task.apply({},args).then(function(...args){return cb.apply({},Array.prototype.concat(null,args))}).catch(function(...args){return cb.apply({},args)})};return new this.Promise((resolve,reject)=>{return this.submit.apply({},Array.prototype.concat(options,wrapped,args,function(...args){return(args[0]!=null?reject:(args.shift(),resolve)).apply({},args)})).catch(e=>{return this._trigger("error",[e])})})}wrap(fn){return(...args)=>{return this.schedule.apply({},Array.prototype.concat(fn,args))}}async updateSettings(options={}){await this._store.__updateSettings__(parser.overwrite(options,this.storeDefaults));parser.overwrite(options,this.instanceDefaults,this);this._drainAll().catch(e=>{return this._trigger("error",[e])});return this}async currentReservoir(){return await this._store.__currentReservoir__()}async incrementReservoir(incr=0){await this._store.__incrementReservoir__(incr);this._drainAll().catch(e=>{return this._trigger("error",[e])});return this}on(name,cb){return this._addListener(name,"many",cb)}once(name,cb){return this._addListener(name,"once",cb)}removeAllListeners(name=null){if(name!=null){delete this._events[name]}else{this._events={}}return this}}Bottleneck.default=Bottleneck;Bottleneck.version=Bottleneck.prototype.version=packagejson.version;Bottleneck.strategy=Bottleneck.prototype.strategy={LEAK:1,OVERFLOW:2,OVERFLOW_PRIORITY:4,BLOCK:3};Bottleneck.BottleneckError=Bottleneck.prototype.BottleneckError=require("./BottleneckError");Bottleneck.Group=Bottleneck.prototype.Group=require("./Group");Bottleneck.prototype.jobDefaults={priority:DEFAULT_PRIORITY,weight:1,expiration:null,id:""};Bottleneck.prototype.storeDefaults={maxConcurrent:null,minTime:0,highWater:null,strategy:Bottleneck.prototype.strategy.LEAK,penalty:null,reservoir:null};Bottleneck.prototype.storeInstanceDefaults={clientOptions:{},clearDatastore:false,Promise:Promise};Bottleneck.prototype.instanceDefaults={datastore:"local",id:"",rejectOnDrop:true,Promise:Promise};return Bottleneck}();module.exports=Bottleneck}).call(this)},{"../package.json":11,"./BottleneckError":2,"./DLList":3,"./Group":4,"./Local":5,"./RedisStorage":6,"./Sync":7,"./parser":10}],2:[function(require,module,exports){(function(){var BottleneckError;BottleneckError=class BottleneckError extends Error{};module.exports=BottleneckError}).call(this)},{}],3:[function(require,module,exports){(function(){var DLList;DLList=class DLList{constructor(){this._first=null;this._last=null;this.length=0}push(value){var node;this.length++;node={value:value,next:null};if(this._last!=null){this._last.next=node;this._last=node}else{this._first=this._last=node}return void 0}shift(){var ref1,value;if(this._first==null){return void 0}else{this.length--}value=this._first.value;this._first=(ref1=this._first.next)!=null?ref1:this._last=null;return value}first(){if(this._first!=null){return this._first.value}}getArray(){var node,ref,results;node=this._first;results=[];while(node!=null){results.push((ref=node,node=node.next,ref.value))}return results}};module.exports=DLList}).call(this)},{}],4:[function(require,module,exports){(function(){var Group,parser;parser=require("./parser");Group=function(){class Group{constructor(limiterOptions={},groupOptions={}){this.key=this.key.bind(this);this.deleteKey=this.deleteKey.bind(this);this.limiters=this.limiters.bind(this);this.keys=this.keys.bind(this);this.startAutoCleanup=this.startAutoCleanup.bind(this);this.stopAutoCleanup=this.stopAutoCleanup.bind(this);this.updateSettings=this.updateSettings.bind(this);this.limiterOptions=limiterOptions;parser.load(groupOptions,this.defaults,this);this.instances={};this.Bottleneck=require("./Bottleneck");this.startAutoCleanup()}key(key=""){var ref;return(ref=this.instances[key])!=null?ref:this.instances[key]=new this.Bottleneck(this.limiterOptions)}deleteKey(key=""){return delete this.instances[key]}limiters(){var k,ref,results,v;ref=this.instances;results=[];for(k in ref){v=ref[k];results.push({key:k,limiter:v})}return results}keys(){return Object.keys(this.instances)}startAutoCleanup(){var base;this.stopAutoCleanup();return typeof(base=this.interval=setInterval(()=>{var k,ref,results,time,v;time=Date.now();ref=this.instances;results=[];for(k in ref){v=ref[k];if(v._nextRequest+this.timeout=0)}async __incrementReservoir__(incr){await this.yieldLoop();return this.reservoir+=incr}async __currentReservoir__(){await this.yieldLoop();return this.reservoir}isBlocked(now){return this._unblockTime>=now}check(weight,now){return this.conditionsCheck(weight)&&this._nextRequest-now<=0}async __check__(weight){var now;await this.yieldLoop();now=Date.now();return this.check(weight,now)}async __register__(index,weight,expiration){var now,wait;await this.yieldLoop();now=Date.now();if(this.conditionsCheck(weight)){this._running+=weight;this._executing[index]={timeout:expiration!=null?setTimeout(()=>{if(!this._executing[index].freed){this._executing[index].freed=true;return this._running-=weight}},expiration):void 0,freed:false};if(this.reservoir!=null){this.reservoir-=weight}wait=Math.max(this._nextRequest-now,0);this._nextRequest=now+wait+this.minTime;return{success:true,wait:wait}}else{return{success:false}}}strategyIsBlock(){return this.strategy===3}async __submit__(queueLength,weight){var blocked,now,reachedHWM;await this.yieldLoop();if(this.maxConcurrent!=null&&weight>this.maxConcurrent){throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.maxConcurrent}`)}now=Date.now();reachedHWM=this.highWater!=null&&queueLength===this.highWater&&!this.check(weight,now);blocked=this.strategyIsBlock()&&(reachedHWM||this.isBlocked(now));if(blocked){this._unblockTime=now+this.computePenalty();this._nextRequest=this._unblockTime+this.minTime}return{reachedHWM:reachedHWM,blocked:blocked,strategy:this.strategy}}async __free__(index,weight){await this.yieldLoop();clearTimeout(this._executing[index].timeout);if(!this._executing[index].freed){this._executing[index].freed=true;this._running-=weight}return{running:this._running}}};module.exports=Local}).call(this)},{"./BottleneckError":2,"./DLList":3,"./parser":10}],6:[function(require,module,exports){(function(){var BottleneckError,DLList,RedisStorage,libraries,lua,parser,scripts;parser=require("./parser");DLList=require("./DLList");BottleneckError=require("./BottleneckError");lua=require("./lua.json");libraries={refresh_running:lua["refresh_running.lua"],conditions_check:lua["conditions_check.lua"]};scripts={init:{keys:["b_settings","b_running","b_executing"],libs:[],code:lua["init.lua"]},update_settings:{keys:["b_settings"],libs:[],code:lua["update_settings.lua"]},running:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running"],code:lua["running.lua"]},check:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["check.lua"]},submit:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["submit.lua"]},register:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["register.lua"]},free:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running"],code:lua["free.lua"]},current_reservoir:{keys:["b_settings"],libs:[],code:lua["current_reservoir.lua"]},increment_reservoir:{keys:["b_settings"],libs:[],code:lua["increment_reservoir.lua"]}};RedisStorage=class RedisStorage{constructor(instance,initSettings,options){var redis;this.loadAll=this.loadAll.bind(this);this.instance=instance;redis=require("redis");parser.load(options,options,this);this.client=redis.createClient(this.clientOptions);this.subClient=redis.createClient(this.clientOptions);this.shas={};this.clients={client:this.client,subscriber:this.subClient};this.ready=new this.Promise((resolve,reject)=>{var count,done,errorListener;errorListener=function(e){return reject(e)};count=0;done=(()=>{count++;if(count===2){[this.client,this.subClient].forEach(client=>{client.removeListener("error",errorListener);return client.on("error",e=>{return this.instance._trigger("error",[e])})});return resolve()}});this.client.on("error",errorListener);this.client.on("ready",function(){return done()});this.subClient.on("error",errorListener);return this.subClient.on("ready",()=>{this.subClient.on("subscribe",function(){return done()});return this.subClient.subscribe("bottleneck")})}).then(this.loadAll).then(()=>{var args;this.subClient.on("message",(channel,message)=>{var info,type;[type,info]=message.split(":");if(type==="freed"){return this.instance._drainAll(~~info)}});initSettings.nextRequest=Date.now();initSettings.running=0;initSettings.unblockTime=0;initSettings.version=this.instance.version;args=this.prepareObject(initSettings);args.unshift(options.clearDatastore?1:0);return this.runScript("init",args)}).then(results=>{return this.client})}disconnect(flush){this.client.end(flush);this.subClient.end(flush);return this}loadScript(name){return new this.Promise((resolve,reject)=>{var payload;payload=scripts[name].libs.map(function(lib){return libraries[lib]}).join("\n")+scripts[name].code;return this.client.multi([["script","load",payload]]).exec((err,replies)=>{if(err!=null){return reject(err)}this.shas[name]=replies[0];return resolve(replies[0])})})}loadAll(){var k,v;return this.Promise.all(function(){var results1;results1=[];for(k in scripts){v=scripts[k];results1.push(this.loadScript(k))}return results1}.call(this))}prepareArray(arr){return arr.map(function(x){if(x!=null){return x.toString()}else{return""}})}prepareObject(obj){var arr,k,v;arr=[];for(k in obj){v=obj[k];arr.push(k,v!=null?v.toString():"")}return arr}runScript(name,args){return new this.Promise((resolve,reject)=>{var arr,script;script=scripts[name];arr=[this.shas[name],script.keys.length].concat(script.keys,args,function(err,replies){if(err!=null){return reject(err)}return resolve(replies)});return this.client.evalsha.bind(this.client).apply({},arr)})}convertBool(b){return!!b}async __updateSettings__(options){return await this.runScript("update_settings",this.prepareObject(options))}async __running__(){return await this.runScript("running",[Date.now()])}async __incrementReservoir__(incr){return await this.runScript("increment_reservoir",[incr])}async __currentReservoir__(){return await this.runScript("current_reservoir",[])}async __check__(weight){return this.convertBool(await this.runScript("check",this.prepareArray([weight,Date.now()])))}async __register__(index,weight,expiration){var success,wait;[success,wait]=await this.runScript("register",this.prepareArray([index,weight,expiration,Date.now()]));return{success:this.convertBool(success),wait:wait}}async __submit__(queueLength,weight){var blocked,e,maxConcurrent,overweight,reachedHWM,strategy;try{[reachedHWM,blocked,strategy]=await this.runScript("submit",this.prepareArray([queueLength,weight,Date.now()]));return{reachedHWM:this.convertBool(reachedHWM),blocked:this.convertBool(blocked),strategy:strategy}}catch(error){e=error;if(e.message.indexOf("OVERWEIGHT")===0){[overweight,weight,maxConcurrent]=e.message.split(":");throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`)}else{throw e}}}async __free__(index,weight){var result;result=await this.runScript("free",this.prepareArray([index,Date.now()]));return{running:result}}};module.exports=RedisStorage}).call(this)},{"./BottleneckError":2,"./DLList":3,"./lua.json":9,"./parser":10,redis:undefined}],7:[function(require,module,exports){(function(){var DLList,Sync,slice=[].slice;DLList=require("./DLList");Sync=class Sync{constructor(name){this.submit=this.submit.bind(this);this.schedule=this.schedule.bind(this);this.wrap=this.wrap.bind(this);this.name=name;this._running=0;this._queue=new DLList}_tryToRun(){var next;if(this._running<1&&this._queue.length>0){this._running++;next=this._queue.shift();return next.task.apply({},next.args.concat((...args)=>{var ref;this._running--;this._tryToRun();return(ref=next.cb)!=null?ref.apply({},args):void 0}))}}submit(task,...args){var cb,i,ref;ref=args,args=2<=ref.length?slice.call(ref,0,i=ref.length-1):(i=0,[]),cb=ref[i++];this._queue.push({task:task,args:args,cb:cb});return this._tryToRun()}schedule(task,...args){var wrapped;wrapped=function(...args){var cb,i,ref;ref=args,args=2<=ref.length?slice.call(ref,0,i=ref.length-1):(i=0,[]),cb=ref[i++];return task.apply({},args).then(function(...args){return cb.apply({},Array.prototype.concat(null,args))}).catch(function(...args){return cb.apply({},args)})};return new Promise((resolve,reject)=>{return this.submit.apply({},Array.prototype.concat(wrapped,args,function(...args){return(args[0]!=null?reject:(args.shift(),resolve)).apply({},args)}))})}wrap(fn){return(...args)=>{return this.schedule.apply({},Array.prototype.concat(fn,args))}}};module.exports=Sync}).call(this)},{"./DLList":3}],8:[function(require,module,exports){(function(){module.exports=require("./Bottleneck")}).call(this)},{"./Bottleneck":1}],9:[function(require,module,exports){module.exports={"check.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal weight = tonumber(ARGV[1])\nlocal now = tonumber(ARGV[2])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'reservoir',\n 'nextRequest'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal reservoir = tonumber(settings[2])\nlocal nextRequest = tonumber(settings[3])\n\nlocal conditionsCheck = conditions_check(weight, maxConcurrent, running, reservoir)\n\nlocal result = conditionsCheck and nextRequest - now <= 0\n\nreturn result\n","conditions_check.lua":"local conditions_check = function (weight, maxConcurrent, running, reservoir)\n return (\n (maxConcurrent == nil or running + weight <= maxConcurrent) and\n (reservoir == nil or reservoir - weight >= 0)\n )\nend\n","current_reservoir.lua":"local settings_key = KEYS[1]\n\nreturn tonumber(redis.call('hget', settings_key, 'reservoir'))\n","free.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal index = ARGV[1]\nlocal now = ARGV[2]\n\nredis.call('zadd', executing_key, 0, index)\n\nreturn refresh_running(executing_key, running_key, settings_key, now)\n","increment_reservoir.lua":"local settings_key = KEYS[1]\nlocal incr = ARGV[1]\n\nreturn redis.call('hincrby', settings_key, 'reservoir', incr)\n","init.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal clear = tonumber(ARGV[1])\n\nif clear == 1 then\n redis.call('del', settings_key, running_key, executing_key)\nend\n\nif redis.call('exists', settings_key) == 0 then\n local args = {'hmset', settings_key}\n\n for i = 2, #ARGV do\n table.insert(args, ARGV[i])\n end\n\n redis.call(unpack(args))\nend\n\nreturn {}\n","refresh_running.lua":"local refresh_running = function (executing_key, running_key, settings_key, now)\n\n local expired = redis.call('zrangebyscore', executing_key, '-inf', '('..now)\n\n if #expired == 0 then\n return redis.call('hget', settings_key, 'running')\n else\n redis.call('zremrangebyscore', executing_key, '-inf', '('..now)\n\n local args = {'hmget', running_key}\n for i = 1, #expired do\n table.insert(args, expired[i])\n end\n\n local weights = redis.call(unpack(args))\n\n args[1] = 'hdel'\n local deleted = redis.call(unpack(args))\n\n local total = 0\n for i = 1, #weights do\n total = total + (tonumber(weights[i]) or 0)\n end\n local incr = -total\n if total == 0 then\n incr = 0\n else\n redis.call('publish', 'bottleneck', 'freed:'..total)\n end\n\n return redis.call('hincrby', settings_key, 'running', incr)\n end\n\nend\n","register.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal index = ARGV[1]\nlocal weight = tonumber(ARGV[2])\nlocal expiration = tonumber(ARGV[3])\nlocal now = tonumber(ARGV[4])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'reservoir',\n 'nextRequest',\n 'minTime'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal reservoir = tonumber(settings[2])\nlocal nextRequest = tonumber(settings[3])\nlocal minTime = tonumber(settings[4])\n\nif conditions_check(weight, maxConcurrent, running, reservoir) then\n\n if expiration ~= nil then\n redis.call('zadd', executing_key, now + expiration, index)\n end\n redis.call('hset', running_key, index, weight)\n redis.call('hincrby', settings_key, 'running', weight)\n\n local wait = math.max(nextRequest - now, 0)\n\n if reservoir == nil then\n redis.call('hset', settings_key,\n 'nextRequest', now + wait + minTime\n )\n else\n redis.call('hmset', settings_key,\n 'reservoir', reservoir - weight,\n 'nextRequest', now + wait + minTime\n )\n end\n\n return {true, wait}\n\nelse\n return {false}\nend\n","running.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\nlocal now = ARGV[1]\n\nreturn tonumber(refresh_running(executing_key, running_key, settings_key, now))\n","submit.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal queueLength = tonumber(ARGV[1])\nlocal weight = tonumber(ARGV[2])\nlocal now = tonumber(ARGV[3])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'highWater',\n 'reservoir',\n 'nextRequest',\n 'strategy',\n 'unblockTime',\n 'penalty',\n 'minTime'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal highWater = tonumber(settings[2])\nlocal reservoir = tonumber(settings[3])\nlocal nextRequest = tonumber(settings[4])\nlocal strategy = tonumber(settings[5])\nlocal unblockTime = tonumber(settings[6])\nlocal penalty = tonumber(settings[7])\nlocal minTime = tonumber(settings[8])\n\nif maxConcurrent ~= nil and weight > maxConcurrent then\n return redis.error_reply('OVERWEIGHT:'..weight..':'..maxConcurrent)\nend\n\nlocal reachedHWM = (highWater ~= nil and queueLength == highWater\n and not (\n conditions_check(weight, maxConcurrent, running, reservoir)\n and nextRequest - now <= 0\n )\n)\n\nlocal blocked = strategy == 3 and (reachedHWM or unblockTime >= now)\n\nif blocked then\n local computedPenalty = penalty\n if computedPenalty == nil then\n if minTime == 0 then\n computedPenalty = 5000\n else\n computedPenalty = 15 * minTime\n end\n end\n\n redis.call('hmset', settings_key,\n 'unblockTime', now + computedPenalty,\n 'nextRequest', unblockTime + minTime\n )\nend\n\nreturn {reachedHWM, blocked, strategy}\n","update_settings.lua":"local settings_key = KEYS[1]\n\nlocal args = {'hmset', settings_key}\n\nfor i = 1, #ARGV do\n table.insert(args, ARGV[i])\nend\n\nredis.call(unpack(args))\n\nreturn {}\n"}},{}],10:[function(require,module,exports){(function(){exports.load=function(received,defaults,onto={}){var k,ref,v;for(k in defaults){v=defaults[k];onto[k]=(ref=received[k])!=null?ref:v}return onto};exports.overwrite=function(received,defaults,onto={}){var k,v;for(k in received){v=received[k];if(defaults[k]!==void 0){onto[k]=v}}return onto}}).call(this)},{}],11:[function(require,module,exports){module.exports={name:"bottleneck",version:"2.0.0-beta.3",description:"Distributed task scheduler and rate limiter",main:"lib/index.js",typings:"bottleneck.d.ts",scripts:{test:"./node_modules/mocha/bin/mocha test",build:"./scripts/build.sh",compile:"./scripts/build.sh compile"},repository:{type:"git",url:"https://github.com/SGrondin/bottleneck"},keywords:["async rate limiter","rate limiter","rate limiting","async","rate","limiting","limiter","throttle","throttling","load","ddos"],author:{name:"Simon Grondin"},license:"MIT",bugs:{url:"https://github.com/SGrondin/bottleneck/issues"},devDependencies:{assert:"1.4.x",browserify:"*",coffeescript:"2.0.x",mocha:"4.x",redis:"^2.8.0","uglify-es":"3.x"},dependencies:{}}},{}]},{},[8]); \ No newline at end of file +(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o=ref;i=1<=ref?++j:--j){results.push(new DLList)}return results}chain(_limiter){this._limiter=_limiter;return this}_sanitizePriority(priority){var sProperty;sProperty=~~priority!==priority?DEFAULT_PRIORITY:priority;if(sProperty<0){return 0}else if(sProperty>NUM_PRIORITIES-1){return NUM_PRIORITIES-1}else{return sProperty}}_find(arr,fn){var ref;return(ref=function(){var i,j,len,x;for(i=j=0,len=arr.length;j0})}_randomIndex(){return Math.random().toString(36).slice(2)}async check(weight=1){return await this._store.__check__(weight)}_run(next,wait,index){var completed,done;this._trigger("debug",[`Scheduling ${next.options.id}`,{args:next.args,options:next.options}]);done=false;completed=(async(...args)=>{var e,ref,running;if(!done){try{done=true;clearTimeout(this._executing[index].expiration);delete this._executing[index];this._trigger("debug",[`Completed ${next.options.id}`,{args:next.args,options:next.options}]);({running:running}=await this._store.__free__(index,next.options.weight));this._trigger("debug",[`Freed ${next.options.id}`,{args:next.args,options:next.options}]);this._drainAll().catch(e=>{return this._trigger("error",[e])});if(running===0&&this.queued()===0){this._trigger("idle",[])}return(ref=next.cb)!=null?ref.apply({},args):void 0}catch(error){e=error;return this._trigger("error",[e])}}});return this._executing[index]={timeout:setTimeout(()=>{this._trigger("debug",[`Executing ${next.options.id}`,{args:next.args,options:next.options}]);if(this._limiter!=null){return this._limiter.submit.apply(this._limiter,Array.prototype.concat(next.options,next.task,next.args,completed))}else{return next.task.apply({},next.args.concat(completed))}},wait),expiration:next.options.expiration!=null?setTimeout(()=>{return completed(new Bottleneck.prototype.BottleneckError(`This job timed out after ${next.options.expiration} ms.`))},next.options.expiration):void 0,job:next}}_drainOne(freed){return this._registerLock.schedule(()=>{var args,index,options,queue,queued;if((queued=this.queued())===0){return this.Promise.resolve(false)}queue=this._getFirst(this._queues);({options:options,args:args}=queue.first());if(freed!=null&&options.weight>freed){return this.Promise.resolve(false)}this._trigger("debug",[`Draining ${options.id}`,{args:args,options:options}]);index=this._randomIndex();return this._store.__register__(index,options.weight,options.expiration).then(({success:success,wait:wait})=>{var next;this._trigger("debug",[`Drained ${options.id}`,{success:success,args:args,options:options}]);if(success){next=queue.shift();if(this.queued()===0&&this._submitLock._queue.length===0){this._trigger("empty",[])}this._run(next,wait,index)}return this.Promise.resolve(success)})})}_drainAll(freed){return this._drainOne(freed).then(success=>{if(success){return this._drainAll()}else{return this.Promise.resolve(success)}}).catch(e=>{return this._trigger("error",[e])})}submit(...args){var cb,j,job,k,options,ref,ref1,task;if(typeof args[0]==="function"){ref=args,task=ref[0],args=3<=ref.length?slice.call(ref,1,j=ref.length-1):(j=1,[]),cb=ref[j++];options=this.jobDefaults}else{ref1=args,options=ref1[0],task=ref1[1],args=4<=ref1.length?slice.call(ref1,2,k=ref1.length-1):(k=2,[]),cb=ref1[k++];options=parser.load(options,this.jobDefaults)}job={options:options,task:task,args:args,cb:cb};options.priority=this._sanitizePriority(options.priority);this._trigger("debug",[`Queueing ${options.id}`,{args:args,options:options}]);return this._submitLock.schedule(async()=>{var blocked,e,reachedHWM,shifted,strategy;try{({reachedHWM:reachedHWM,blocked:blocked,strategy:strategy}=await this._store.__submit__(this.queued(),options.weight));this._trigger("debug",[`Queued ${options.id}`,{args:args,options:options,reachedHWM:reachedHWM,blocked:blocked}])}catch(error){e=error;this._trigger("debug",[`Could not queue ${options.id}`,{args:args,options:options,error:e}]);job.cb(e);return false}if(blocked){this._queues=this._makeQueues();this._trigger("dropped",[job]);return true}else if(reachedHWM){shifted=strategy===Bottleneck.prototype.strategy.LEAK?this._getFirst(this._queues.slice(options.priority).reverse()).shift():strategy===Bottleneck.prototype.strategy.OVERFLOW_PRIORITY?this._getFirst(this._queues.slice(options.priority+1).reverse()).shift():strategy===Bottleneck.prototype.strategy.OVERFLOW?job:void 0;if(shifted!=null){this._trigger("dropped",[shifted])}if(shifted==null||strategy===Bottleneck.prototype.strategy.OVERFLOW){return reachedHWM}}this._queues[options.priority].push(job);await this._drainAll();return reachedHWM})}schedule(...args){var options,task,wrapped;if(typeof args[0]==="function"){[task,...args]=args;options=this.jobDefaults}else{[options,task,...args]=args;options=parser.load(options,this.jobDefaults)}wrapped=function(...args){var cb,j,ref;ref=args,args=2<=ref.length?slice.call(ref,0,j=ref.length-1):(j=0,[]),cb=ref[j++];return task.apply({},args).then(function(...args){return cb.apply({},Array.prototype.concat(null,args))}).catch(function(...args){return cb.apply({},args)})};return new this.Promise((resolve,reject)=>{return this.submit.apply({},Array.prototype.concat(options,wrapped,args,function(...args){return(args[0]!=null?reject:(args.shift(),resolve)).apply({},args)})).catch(e=>{return this._trigger("error",[e])})})}wrap(fn){return(...args)=>{return this.schedule.apply({},Array.prototype.concat(fn,args))}}async updateSettings(options={}){await this._store.__updateSettings__(parser.overwrite(options,this.storeDefaults));parser.overwrite(options,this.instanceDefaults,this);this._drainAll().catch(e=>{return this._trigger("error",[e])});return this}async currentReservoir(){return await this._store.__currentReservoir__()}async incrementReservoir(incr=0){await this._store.__incrementReservoir__(incr);this._drainAll().catch(e=>{return this._trigger("error",[e])});return this}on(name,cb){return this._addListener(name,"many",cb)}once(name,cb){return this._addListener(name,"once",cb)}removeAllListeners(name=null){if(name!=null){delete this._events[name]}else{this._events={}}return this}}Bottleneck.default=Bottleneck;Bottleneck.version=Bottleneck.prototype.version=packagejson.version;Bottleneck.strategy=Bottleneck.prototype.strategy={LEAK:1,OVERFLOW:2,OVERFLOW_PRIORITY:4,BLOCK:3};Bottleneck.BottleneckError=Bottleneck.prototype.BottleneckError=require("./BottleneckError");Bottleneck.Group=Bottleneck.prototype.Group=require("./Group");Bottleneck.prototype.jobDefaults={priority:DEFAULT_PRIORITY,weight:1,expiration:null,id:""};Bottleneck.prototype.storeDefaults={maxConcurrent:null,minTime:0,highWater:null,strategy:Bottleneck.prototype.strategy.LEAK,penalty:null,reservoir:null};Bottleneck.prototype.storeInstanceDefaults={clientOptions:{},clearDatastore:false,Promise:Promise};Bottleneck.prototype.instanceDefaults={datastore:"local",id:"",rejectOnDrop:true,Promise:Promise};return Bottleneck}();module.exports=Bottleneck}).call(this)},{"../package.json":11,"./BottleneckError":2,"./DLList":3,"./Group":4,"./Local":5,"./RedisStorage":6,"./Sync":7,"./parser":10}],2:[function(require,module,exports){(function(){var BottleneckError;BottleneckError=class BottleneckError extends Error{};module.exports=BottleneckError}).call(this)},{}],3:[function(require,module,exports){(function(){var DLList;DLList=class DLList{constructor(){this._first=null;this._last=null;this.length=0}push(value){var node;this.length++;node={value:value,next:null};if(this._last!=null){this._last.next=node;this._last=node}else{this._first=this._last=node}return void 0}shift(){var ref1,value;if(this._first==null){return void 0}else{this.length--}value=this._first.value;this._first=(ref1=this._first.next)!=null?ref1:this._last=null;return value}first(){if(this._first!=null){return this._first.value}}getArray(){var node,ref,results;node=this._first;results=[];while(node!=null){results.push((ref=node,node=node.next,ref.value))}return results}};module.exports=DLList}).call(this)},{}],4:[function(require,module,exports){(function(){var Group,parser;parser=require("./parser");Group=function(){class Group{constructor(limiterOptions={},groupOptions={}){this.key=this.key.bind(this);this.deleteKey=this.deleteKey.bind(this);this.limiters=this.limiters.bind(this);this.keys=this.keys.bind(this);this.startAutoCleanup=this.startAutoCleanup.bind(this);this.stopAutoCleanup=this.stopAutoCleanup.bind(this);this.updateSettings=this.updateSettings.bind(this);this.limiterOptions=limiterOptions;parser.load(groupOptions,this.defaults,this);this.instances={};this.Bottleneck=require("./Bottleneck");this.startAutoCleanup()}key(key=""){var ref;return(ref=this.instances[key])!=null?ref:this.instances[key]=new this.Bottleneck(this.limiterOptions)}deleteKey(key=""){return delete this.instances[key]}limiters(){var k,ref,results,v;ref=this.instances;results=[];for(k in ref){v=ref[k];results.push({key:k,limiter:v})}return results}keys(){return Object.keys(this.instances)}startAutoCleanup(){var base;this.stopAutoCleanup();return typeof(base=this.interval=setInterval(()=>{var k,ref,results,time,v;time=Date.now();ref=this.instances;results=[];for(k in ref){v=ref[k];if(v._nextRequest+this.timeout=0)}async __incrementReservoir__(incr){await this.yieldLoop();return this.reservoir+=incr}async __currentReservoir__(){await this.yieldLoop();return this.reservoir}isBlocked(now){return this._unblockTime>=now}check(weight,now){return this.conditionsCheck(weight)&&this._nextRequest-now<=0}async __check__(weight){var now;await this.yieldLoop();now=Date.now();return this.check(weight,now)}async __register__(index,weight,expiration){var now,wait;await this.yieldLoop();now=Date.now();if(this.conditionsCheck(weight)){this._running+=weight;this._executing[index]={timeout:expiration!=null?setTimeout(()=>{if(!this._executing[index].freed){this._executing[index].freed=true;return this._running-=weight}},expiration):void 0,freed:false};if(this.reservoir!=null){this.reservoir-=weight}wait=Math.max(this._nextRequest-now,0);this._nextRequest=now+wait+this.minTime;return{success:true,wait:wait}}else{return{success:false}}}strategyIsBlock(){return this.strategy===3}async __submit__(queueLength,weight){var blocked,now,reachedHWM;await this.yieldLoop();if(this.maxConcurrent!=null&&weight>this.maxConcurrent){throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.maxConcurrent}`)}now=Date.now();reachedHWM=this.highWater!=null&&queueLength===this.highWater&&!this.check(weight,now);blocked=this.strategyIsBlock()&&(reachedHWM||this.isBlocked(now));if(blocked){this._unblockTime=now+this.computePenalty();this._nextRequest=this._unblockTime+this.minTime}return{reachedHWM:reachedHWM,blocked:blocked,strategy:this.strategy}}async __free__(index,weight){await this.yieldLoop();clearTimeout(this._executing[index].timeout);if(!this._executing[index].freed){this._executing[index].freed=true;this._running-=weight}return{running:this._running}}};module.exports=Local}).call(this)},{"./BottleneckError":2,"./DLList":3,"./parser":10}],6:[function(require,module,exports){(function(){var BottleneckError,DLList,RedisStorage,libraries,lua,parser,scripts;parser=require("./parser");DLList=require("./DLList");BottleneckError=require("./BottleneckError");lua=require("./lua.json");libraries={refresh_running:lua["refresh_running.lua"],conditions_check:lua["conditions_check.lua"]};scripts={init:{keys:["b_settings","b_running","b_executing"],libs:[],code:lua["init.lua"]},update_settings:{keys:["b_settings"],libs:[],code:lua["update_settings.lua"]},running:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running"],code:lua["running.lua"]},check:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["check.lua"]},submit:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["submit.lua"]},register:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running","conditions_check"],code:lua["register.lua"]},free:{keys:["b_settings","b_running","b_executing"],libs:["refresh_running"],code:lua["free.lua"]},current_reservoir:{keys:["b_settings"],libs:[],code:lua["current_reservoir.lua"]},increment_reservoir:{keys:["b_settings"],libs:[],code:lua["increment_reservoir.lua"]}};RedisStorage=class RedisStorage{constructor(instance,initSettings,options){var redis;this.loadAll=this.loadAll.bind(this);this.instance=instance;redis=require("redis");parser.load(options,options,this);this.client=redis.createClient(this.clientOptions);this.subClient=redis.createClient(this.clientOptions);this.shas={};this.clients={client:this.client,subscriber:this.subClient};this.ready=new this.Promise((resolve,reject)=>{var count,done,errorListener;errorListener=function(e){return reject(e)};count=0;done=(()=>{count++;if(count===2){[this.client,this.subClient].forEach(client=>{client.removeListener("error",errorListener);return client.on("error",e=>{return this.instance._trigger("error",[e])})});return resolve()}});this.client.on("error",errorListener);this.client.on("ready",function(){return done()});this.subClient.on("error",errorListener);return this.subClient.on("ready",()=>{this.subClient.on("subscribe",function(){return done()});return this.subClient.subscribe("bottleneck")})}).then(this.loadAll).then(()=>{var args;this.subClient.on("message",(channel,message)=>{var info,type;[type,info]=message.split(":");if(type==="freed"){return this.instance._drainAll(~~info)}});initSettings.nextRequest=Date.now();initSettings.running=0;initSettings.unblockTime=0;initSettings.version=this.instance.version;args=this.prepareObject(initSettings);args.unshift(options.clearDatastore?1:0);return this.runScript("init",args)}).then(results=>{return this.clients})}disconnect(flush){this.client.end(flush);this.subClient.end(flush);return this}loadScript(name){return new this.Promise((resolve,reject)=>{var payload;payload=scripts[name].libs.map(function(lib){return libraries[lib]}).join("\n")+scripts[name].code;return this.client.multi([["script","load",payload]]).exec((err,replies)=>{if(err!=null){return reject(err)}this.shas[name]=replies[0];return resolve(replies[0])})})}loadAll(){var k,v;return this.Promise.all(function(){var results1;results1=[];for(k in scripts){v=scripts[k];results1.push(this.loadScript(k))}return results1}.call(this))}prepareArray(arr){return arr.map(function(x){if(x!=null){return x.toString()}else{return""}})}prepareObject(obj){var arr,k,v;arr=[];for(k in obj){v=obj[k];arr.push(k,v!=null?v.toString():"")}return arr}runScript(name,args){return new this.Promise((resolve,reject)=>{var arr,script;script=scripts[name];arr=[this.shas[name],script.keys.length].concat(script.keys,args,function(err,replies){if(err!=null){return reject(err)}return resolve(replies)});return this.client.evalsha.bind(this.client).apply({},arr)})}convertBool(b){return!!b}async __updateSettings__(options){return await this.runScript("update_settings",this.prepareObject(options))}async __running__(){return await this.runScript("running",[Date.now()])}async __incrementReservoir__(incr){return await this.runScript("increment_reservoir",[incr])}async __currentReservoir__(){return await this.runScript("current_reservoir",[])}async __check__(weight){return this.convertBool(await this.runScript("check",this.prepareArray([weight,Date.now()])))}async __register__(index,weight,expiration){var success,wait;[success,wait]=await this.runScript("register",this.prepareArray([index,weight,expiration,Date.now()]));return{success:this.convertBool(success),wait:wait}}async __submit__(queueLength,weight){var blocked,e,maxConcurrent,overweight,reachedHWM,strategy;try{[reachedHWM,blocked,strategy]=await this.runScript("submit",this.prepareArray([queueLength,weight,Date.now()]));return{reachedHWM:this.convertBool(reachedHWM),blocked:this.convertBool(blocked),strategy:strategy}}catch(error){e=error;if(e.message.indexOf("OVERWEIGHT")===0){[overweight,weight,maxConcurrent]=e.message.split(":");throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`)}else{throw e}}}async __free__(index,weight){var result;result=await this.runScript("free",this.prepareArray([index,Date.now()]));return{running:result}}};module.exports=RedisStorage}).call(this)},{"./BottleneckError":2,"./DLList":3,"./lua.json":9,"./parser":10,redis:undefined}],7:[function(require,module,exports){(function(){var DLList,Sync,slice=[].slice;DLList=require("./DLList");Sync=class Sync{constructor(name){this.submit=this.submit.bind(this);this.schedule=this.schedule.bind(this);this.wrap=this.wrap.bind(this);this.name=name;this._running=0;this._queue=new DLList}_tryToRun(){var next;if(this._running<1&&this._queue.length>0){this._running++;next=this._queue.shift();return next.task.apply({},next.args.concat((...args)=>{var ref;this._running--;this._tryToRun();return(ref=next.cb)!=null?ref.apply({},args):void 0}))}}submit(task,...args){var cb,i,ref;ref=args,args=2<=ref.length?slice.call(ref,0,i=ref.length-1):(i=0,[]),cb=ref[i++];this._queue.push({task:task,args:args,cb:cb});return this._tryToRun()}schedule(task,...args){var wrapped;wrapped=function(...args){var cb,i,ref;ref=args,args=2<=ref.length?slice.call(ref,0,i=ref.length-1):(i=0,[]),cb=ref[i++];return task.apply({},args).then(function(...args){return cb.apply({},Array.prototype.concat(null,args))}).catch(function(...args){return cb.apply({},args)})};return new Promise((resolve,reject)=>{return this.submit.apply({},Array.prototype.concat(wrapped,args,function(...args){return(args[0]!=null?reject:(args.shift(),resolve)).apply({},args)}))})}wrap(fn){return(...args)=>{return this.schedule.apply({},Array.prototype.concat(fn,args))}}};module.exports=Sync}).call(this)},{"./DLList":3}],8:[function(require,module,exports){(function(){module.exports=require("./Bottleneck")}).call(this)},{"./Bottleneck":1}],9:[function(require,module,exports){module.exports={"check.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal weight = tonumber(ARGV[1])\nlocal now = tonumber(ARGV[2])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'reservoir',\n 'nextRequest'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal reservoir = tonumber(settings[2])\nlocal nextRequest = tonumber(settings[3])\n\nlocal conditionsCheck = conditions_check(weight, maxConcurrent, running, reservoir)\n\nlocal result = conditionsCheck and nextRequest - now <= 0\n\nreturn result\n","conditions_check.lua":"local conditions_check = function (weight, maxConcurrent, running, reservoir)\n return (\n (maxConcurrent == nil or running + weight <= maxConcurrent) and\n (reservoir == nil or reservoir - weight >= 0)\n )\nend\n","current_reservoir.lua":"local settings_key = KEYS[1]\n\nreturn tonumber(redis.call('hget', settings_key, 'reservoir'))\n","free.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal index = ARGV[1]\nlocal now = ARGV[2]\n\nredis.call('zadd', executing_key, 0, index)\n\nreturn refresh_running(executing_key, running_key, settings_key, now)\n","increment_reservoir.lua":"local settings_key = KEYS[1]\nlocal incr = ARGV[1]\n\nreturn redis.call('hincrby', settings_key, 'reservoir', incr)\n","init.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal clear = tonumber(ARGV[1])\n\nif clear == 1 then\n redis.call('del', settings_key, running_key, executing_key)\nend\n\nif redis.call('exists', settings_key) == 0 then\n local args = {'hmset', settings_key}\n\n for i = 2, #ARGV do\n table.insert(args, ARGV[i])\n end\n\n redis.call(unpack(args))\nend\n\nreturn {}\n","refresh_running.lua":"local refresh_running = function (executing_key, running_key, settings_key, now)\n\n local expired = redis.call('zrangebyscore', executing_key, '-inf', '('..now)\n\n if #expired == 0 then\n return redis.call('hget', settings_key, 'running')\n else\n redis.call('zremrangebyscore', executing_key, '-inf', '('..now)\n\n local args = {'hmget', running_key}\n for i = 1, #expired do\n table.insert(args, expired[i])\n end\n\n local weights = redis.call(unpack(args))\n\n args[1] = 'hdel'\n local deleted = redis.call(unpack(args))\n\n local total = 0\n for i = 1, #weights do\n total = total + (tonumber(weights[i]) or 0)\n end\n local incr = -total\n if total == 0 then\n incr = 0\n else\n redis.call('publish', 'bottleneck', 'freed:'..total)\n end\n\n return redis.call('hincrby', settings_key, 'running', incr)\n end\n\nend\n","register.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal index = ARGV[1]\nlocal weight = tonumber(ARGV[2])\nlocal expiration = tonumber(ARGV[3])\nlocal now = tonumber(ARGV[4])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'reservoir',\n 'nextRequest',\n 'minTime'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal reservoir = tonumber(settings[2])\nlocal nextRequest = tonumber(settings[3])\nlocal minTime = tonumber(settings[4])\n\nif conditions_check(weight, maxConcurrent, running, reservoir) then\n\n if expiration ~= nil then\n redis.call('zadd', executing_key, now + expiration, index)\n end\n redis.call('hset', running_key, index, weight)\n redis.call('hincrby', settings_key, 'running', weight)\n\n local wait = math.max(nextRequest - now, 0)\n\n if reservoir == nil then\n redis.call('hset', settings_key,\n 'nextRequest', now + wait + minTime\n )\n else\n redis.call('hmset', settings_key,\n 'reservoir', reservoir - weight,\n 'nextRequest', now + wait + minTime\n )\n end\n\n return {true, wait}\n\nelse\n return {false}\nend\n","running.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\nlocal now = ARGV[1]\n\nreturn tonumber(refresh_running(executing_key, running_key, settings_key, now))\n","submit.lua":"local settings_key = KEYS[1]\nlocal running_key = KEYS[2]\nlocal executing_key = KEYS[3]\n\nlocal queueLength = tonumber(ARGV[1])\nlocal weight = tonumber(ARGV[2])\nlocal now = tonumber(ARGV[3])\n\nlocal running = tonumber(refresh_running(executing_key, running_key, settings_key, now))\nlocal settings = redis.call('hmget', settings_key,\n 'maxConcurrent',\n 'highWater',\n 'reservoir',\n 'nextRequest',\n 'strategy',\n 'unblockTime',\n 'penalty',\n 'minTime'\n)\nlocal maxConcurrent = tonumber(settings[1])\nlocal highWater = tonumber(settings[2])\nlocal reservoir = tonumber(settings[3])\nlocal nextRequest = tonumber(settings[4])\nlocal strategy = tonumber(settings[5])\nlocal unblockTime = tonumber(settings[6])\nlocal penalty = tonumber(settings[7])\nlocal minTime = tonumber(settings[8])\n\nif maxConcurrent ~= nil and weight > maxConcurrent then\n return redis.error_reply('OVERWEIGHT:'..weight..':'..maxConcurrent)\nend\n\nlocal reachedHWM = (highWater ~= nil and queueLength == highWater\n and not (\n conditions_check(weight, maxConcurrent, running, reservoir)\n and nextRequest - now <= 0\n )\n)\n\nlocal blocked = strategy == 3 and (reachedHWM or unblockTime >= now)\n\nif blocked then\n local computedPenalty = penalty\n if computedPenalty == nil then\n if minTime == 0 then\n computedPenalty = 5000\n else\n computedPenalty = 15 * minTime\n end\n end\n\n redis.call('hmset', settings_key,\n 'unblockTime', now + computedPenalty,\n 'nextRequest', unblockTime + minTime\n )\nend\n\nreturn {reachedHWM, blocked, strategy}\n","update_settings.lua":"local settings_key = KEYS[1]\n\nlocal args = {'hmset', settings_key}\n\nfor i = 1, #ARGV do\n table.insert(args, ARGV[i])\nend\n\nredis.call(unpack(args))\n\nreturn {}\n"}},{}],10:[function(require,module,exports){(function(){exports.load=function(received,defaults,onto={}){var k,ref,v;for(k in defaults){v=defaults[k];onto[k]=(ref=received[k])!=null?ref:v}return onto};exports.overwrite=function(received,defaults,onto={}){var k,v;for(k in received){v=received[k];if(defaults[k]!==void 0){onto[k]=v}}return onto}}).call(this)},{}],11:[function(require,module,exports){module.exports={name:"bottleneck",version:"2.0.0-beta.3",description:"Distributed task scheduler and rate limiter",main:"lib/index.js",typings:"bottleneck.d.ts",scripts:{test:"./node_modules/mocha/bin/mocha test",build:"./scripts/build.sh",compile:"./scripts/build.sh compile"},repository:{type:"git",url:"https://github.com/SGrondin/bottleneck"},keywords:["async rate limiter","rate limiter","rate limiting","async","rate","limiting","limiter","throttle","throttling","load","ddos"],author:{name:"Simon Grondin"},license:"MIT",bugs:{url:"https://github.com/SGrondin/bottleneck/issues"},devDependencies:{"@types/es6-promise":"0.0.33",assert:"1.4.x",browserify:"*",coffeescript:"2.0.x","ejs-cli":"^2.0.0",mocha:"4.x",redis:"^2.8.0",typescript:"^2.6.2","uglify-es":"3.x"},dependencies:{}}},{}]},{},[8]); \ No newline at end of file diff --git a/lib/Bottleneck.js b/lib/Bottleneck.js index 225f268..5101f32 100644 --- a/lib/Bottleneck.js +++ b/lib/Bottleneck.js @@ -70,7 +70,7 @@ return this._store.clients; } - async disconnect(flush) { + async disconnect(flush = true) { return (await this._store.disconnect(flush)); } diff --git a/lib/RedisStorage.js b/lib/RedisStorage.js index 1054af5..1e4a24f 100644 --- a/lib/RedisStorage.js +++ b/lib/RedisStorage.js @@ -123,7 +123,7 @@ args.unshift((options.clearDatastore ? 1 : 0)); return this.runScript("init", args); }).then((results) => { - return this.client; + return this.clients; }); } diff --git a/package-lock.json b/package-lock.json index 06bb541..6858a05 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,6 +4,12 @@ "lockfileVersion": 1, "requires": true, "dependencies": { + "@types/es6-promise": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/@types/es6-promise/-/es6-promise-0.0.33.tgz", + "integrity": "sha512-HKJFVLCGrWQ/1unEw8JdaTxu6n3EUxmwTxJ6D0O1x0gD8joCsgoTWxEgevb7fp2XIogNjof3KEd+3bJoGne/nw==", + "dev": true + }, "JSONStream": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.1.tgz", @@ -67,6 +73,12 @@ "acorn": "4.0.13" } }, + "async": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=", + "dev": true + }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -306,6 +318,12 @@ "integrity": "sha512-/bCMyzu7KSJPF2gRNYWpbEmfPkNL8AzXs78ktxhPTpSXlKetZRl7kIYGqU055UqUVnkKRJK4eUkUhRHQpvdilA==", "dev": true }, + "colors": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", + "dev": true + }, "combine-source-map": { "version": "0.7.2", "resolved": "https://registry.npmjs.org/combine-source-map/-/combine-source-map-0.7.2.tgz", @@ -536,6 +554,47 @@ "readable-stream": "2.3.3" } }, + "ejs": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.4.2.tgz", + "integrity": "sha1-cFfrSBKVj7cxhBzZyjUzQ+/ll7E=", + "dev": true + }, + "ejs-cli": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ejs-cli/-/ejs-cli-2.0.0.tgz", + "integrity": "sha1-7k3To1UQIQfvCymkRcob7p+Gi/I=", + "dev": true, + "requires": { + "async": "0.2.10", + "colors": "0.6.2", + "ejs": "2.4.2", + "glob": "3.2.11", + "optimist": "0.5.2" + }, + "dependencies": { + "glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "dev": true, + "requires": { + "inherits": "2.0.3", + "minimatch": "0.3.0" + } + }, + "minimatch": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "dev": true, + "requires": { + "lru-cache": "2.7.3", + "sigmund": "1.0.1" + } + } + } + }, "elliptic": { "version": "6.4.0", "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.0.tgz", @@ -788,6 +847,12 @@ "integrity": "sha1-LcvSwofLwKVcxCMovQxzYVDVPj8=", "dev": true }, + "lru-cache": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "dev": true + }, "md5.js": { "version": "1.3.4", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz", @@ -920,6 +985,15 @@ "wrappy": "1.0.2" } }, + "optimist": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.5.2.tgz", + "integrity": "sha1-hcjBRUszFeSniUfoV7HfAzRQv7w=", + "dev": true, + "requires": { + "wordwrap": "0.0.3" + } + }, "os-browserify": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", @@ -1157,6 +1231,12 @@ "jsonify": "0.0.0" } }, + "sigmund": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "dev": true + }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -1285,6 +1365,12 @@ "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", "dev": true }, + "typescript": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-2.6.2.tgz", + "integrity": "sha1-PFtv1/beCRQmkCfwPAlGdY92c6Q=", + "dev": true + }, "uglify-es": { "version": "3.1.8", "resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.1.8.tgz", @@ -1359,6 +1445,12 @@ "indexof": "0.0.1" } }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/package.json b/package.json index 91617b0..0faa84d 100644 --- a/package.json +++ b/package.json @@ -34,11 +34,14 @@ "url": "https://github.com/SGrondin/bottleneck/issues" }, "devDependencies": { + "@types/es6-promise": "0.0.33", "assert": "1.4.x", "browserify": "*", "coffeescript": "2.0.x", + "ejs-cli": "^2.0.0", "mocha": "4.x", "redis": "^2.8.0", + "typescript": "^2.6.2", "uglify-es": "3.x" }, "dependencies": {} diff --git a/scripts/build.sh b/scripts/build.sh index dfe2a71..620f7b9 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -8,6 +8,8 @@ if [ ! -d node_modules ]; then npm install fi +node_modules/ejs-cli/bin/ejs-cli bottleneck.d.ts.ejs > bottleneck.d.ts + node_modules/coffeescript/bin/coffee -c src/*.coffee rm -rf lib/* @@ -18,6 +20,7 @@ if [[ $1 = 'compile' ]]; then echo 'Compiling bottleneck...' else echo 'Building bottleneck...' + node_modules/typescript/bin/tsc --noEmit --strictNullChecks test.ts node_modules/browserify/bin/cmd.js -u redis lib/index.js > bottleneck.js node_modules/uglify-es/bin/uglifyjs bottleneck.js -o bottleneck.min.js fi diff --git a/src/Bottleneck.coffee b/src/Bottleneck.coffee index 2ede082..df9d74b 100644 --- a/src/Bottleneck.coffee +++ b/src/Bottleneck.coffee @@ -49,7 +49,7 @@ class Bottleneck else throw new Bottleneck::BottleneckError "Invalid datastore type: #{@datastore}" ready: => @_store.ready clients: => @_store.clients - disconnect: (flush) => await @_store.disconnect flush + disconnect: (flush=true) => await @_store.disconnect flush _addListener: (name, status, cb) -> @_events[name] ?= [] @_events[name].push {cb, status} diff --git a/src/RedisStorage.coffee b/src/RedisStorage.coffee index 8275c0c..5df2d1c 100644 --- a/src/RedisStorage.coffee +++ b/src/RedisStorage.coffee @@ -84,7 +84,7 @@ class RedisStorage args.unshift (if options.clearDatastore then 1 else 0) @runScript "init", args .then (results) => - @client + @clients disconnect: (flush) -> @client.end flush diff --git a/test.ts b/test.ts new file mode 100644 index 0000000..931063d --- /dev/null +++ b/test.ts @@ -0,0 +1,112 @@ +/// + +import Bottleneck from "bottleneck"; +import * as assert from "assert"; + +/* +This file is run by scripts/build.sh. +It is used to validate the typings in bottleneck.d.ts. +The command is: tsc --noEmit --strictNullChecks test.ts +This file cannot be run directly. +In order to do that, you must comment out the first line, +and change "bottleneck" to "." on the third line. +*/ + +function withCb(foo: number, bar: () => void, cb: (err: any, result: string) => void) { + let s: string = `cb ${foo}`; + cb(null, s); +} + +console.log(Bottleneck); + +let limiter = new Bottleneck({ + maxConcurrent: 5, + minTime: 1000, + highWater: 20, + strategy: Bottleneck.strategy.LEAK +}); + +limiter.ready().then(() => { console.log('Ready') }); +limiter.clients().client; +limiter.disconnect(); + +limiter.currentReservoir().then(function (x) { + if (x != null) { + let i: number = x; + } +}); + +limiter.running().then(function (x) { + let i: number = x; +}); + +limiter.submit(withCb, 1, () => {}, (err, result) => { + let s: string = result; + console.log(s); + assert(s == "cb 1"); +}); + +function withPromise(foo: number, bar: () => void): PromiseLike { + let s: string = `promise ${foo}`; + return Promise.resolve(s); +} + +let foo: Promise = limiter.schedule(withPromise, 1, () => {}); +foo.then(function (result: string) { + let s: string = result; + console.log(s); + assert(s == "promise 1"); +}); + +let group = new Bottleneck.Group({ + maxConcurrent: 5, + minTime: 1000, + highWater: 10, + strategy: Bottleneck.strategy.LEAK +}); + +group.key("foo").submit(withCb, 2, () => {}, (err, result) => { + let s: string = `${result} foo`; + console.log(s); + assert(s == "cb 2 foo"); +}); + +group.key("bar").submit({ priority: 4 }, withCb, 3, () => {}, (err, result) => { + let s: string = `${result} bar`; + console.log(s); + assert(s == "cb 3 foo"); +}); + +let f1: Promise = group.key("pizza").schedule(withPromise, 2, () => {}); +f1.then(function (result: string) { + let s: string = result; + console.log(s); + assert(s == "promise 2"); +}); + +let f2: Promise = group.key("pie").schedule({ priority: 4 }, withPromise, 3, () => {}); +f2.then(function (result: string) { + let s: string = result; + console.log(s); + assert(s == "promise 3"); +}); + +let wrapped: ((x: number, y: number) => Promise) = limiter.wrap((a, b) => { + let s: string = `Total: ${a + b}`; + return Promise.resolve(s); +}); + +wrapped(1, 2).then((x) => { + let s: string = x; + console.log(s); + assert(s == "Total: 3"); +}); + + +group.deleteKey("pizza"); +group.updateSettings({ timeout: 5 }); + +let keys: string[] = group.keys(); +assert(keys.length == 3); + +let queued: number = limiter.chain(group.key("pizza")).queued();