Make the documentation less worker-threads centric. (#183)
[poolifier.git] / src / pools / thread / dynamic.ts
1 import type { JSONValue } from '../../utility-types'
2 import { isKillBehavior, KillBehaviors } from '../../worker/worker-options'
3 import type { PoolOptions } from '../abstract-pool'
4 import type { ThreadWorkerWithMessageChannel } from './fixed'
5 import { FixedThreadPool } from './fixed'
6
7 /**
8 * A thread pool with a dynamic number of threads, but a guaranteed minimum number of threads.
9 *
10 * This thread pool creates new threads when the others are busy, up to the maximum number of threads.
11 * When the maximum number of threads is reached, an event is emitted. If you want to listen to this event, use the pool's `emitter`.
12 *
13 * @template Data Type of data sent to the worker.
14 * @template Response Type of response of execution.
15 *
16 * @author [Alessandro Pio Ardizio](https://github.com/pioardi)
17 * @since 0.0.1
18 */
19 export class DynamicThreadPool<
20 Data extends JSONValue = JSONValue,
21 Response extends JSONValue = JSONValue
22 > extends FixedThreadPool<Data, Response> {
23 /**
24 * Constructs a new poolifier dynamic thread pool.
25 *
26 * @param min Minimum number of threads which are always active.
27 * @param max Maximum number of threads that can be created by this pool.
28 * @param filePath Path to an implementation of a `ThreadWorker` file, which can be relative or absolute.
29 * @param opts Options for this dynamic thread pool. Default: `{ maxTasks: 1000 }`
30 */
31 public constructor (
32 min: number,
33 public readonly max: number,
34 filePath: string,
35 opts: PoolOptions<ThreadWorkerWithMessageChannel> = { maxTasks: 1000 }
36 ) {
37 super(min, filePath, opts)
38 }
39
40 /**
41 * Choose a thread for the next task.
42 *
43 * It will first check for and return an idle thread.
44 * If all threads are busy, then it will try to create a new one up to the `max` thread count.
45 * If the max thread count is reached, the emitter will emit a `FullPool` event and it will fall back to using a round robin algorithm to distribute the load.
46 *
47 * @returns Thread worker.
48 */
49 protected chooseWorker (): ThreadWorkerWithMessageChannel {
50 for (const [worker, numberOfTasks] of this.tasks) {
51 if (numberOfTasks === 0) {
52 // A worker is free, use it
53 return worker
54 }
55 }
56
57 if (this.workers.length === this.max) {
58 this.emitter.emit('FullPool')
59 return super.chooseWorker()
60 }
61
62 // All workers are busy, create a new worker
63 const workerCreated = this.createAndSetupWorker()
64 this.registerWorkerMessageListener<Data>(workerCreated, message => {
65 const tasksInProgress = this.tasks.get(workerCreated)
66 if (
67 isKillBehavior(KillBehaviors.HARD, message.kill) ||
68 tasksInProgress === 0
69 ) {
70 // Kill received from the worker, means that no new tasks are submitted to that worker for a while ( > maxInactiveTime)
71 void this.destroyWorker(workerCreated)
72 }
73 })
74 return workerCreated
75 }
76 }