const { expect } = require('expect')
-const { DynamicThreadPool } = require('../../../lib/index')
+const { DynamicThreadPool, PoolEvents } = require('../../../lib')
const { WorkerFunctions } = require('../../test-types')
const TestUtils = require('../../test-utils')
let result = await pool.execute({
function: WorkerFunctions.fibonacci
})
- expect(result).toBe(false)
+ expect(result).toBe(121393)
result = await pool.execute({
function: WorkerFunctions.factorial
})
- expect(result).toBe(false)
+ expect(result).toBe(9.33262154439441e157)
})
it('Verify that new workers are created when required, max size is not exceeded and that after a while new workers will die', async () => {
let poolBusy = 0
- pool.emitter.on('busy', () => ++poolBusy)
+ pool.emitter.on(PoolEvents.busy, () => ++poolBusy)
for (let i = 0; i < max * 2; i++) {
pool.execute()
}
- expect(pool.workers.size).toBeLessThanOrEqual(max)
- expect(pool.workers.size).toBeGreaterThan(min)
+ expect(pool.workerNodes.length).toBeLessThanOrEqual(max)
+ expect(pool.workerNodes.length).toBeGreaterThan(min)
// The `busy` event is triggered when the number of submitted tasks at once reach the max number of workers in the dynamic pool.
// So in total numberOfWorkers + 1 times for a loop submitting up to numberOfWorkers * 2 tasks to the dynamic pool.
expect(poolBusy).toBe(max + 1)
- const numberOfExitEvents = await TestUtils.waitExits(pool, max - min)
+ const numberOfExitEvents = await TestUtils.waitWorkerExits(pool, max - min)
expect(numberOfExitEvents).toBe(max - min)
})
it('Verify scale thread up and down is working', async () => {
- expect(pool.workers.size).toBe(min)
- for (let i = 0; i < max * 10; i++) {
+ expect(pool.workerNodes.length).toBe(min)
+ for (let i = 0; i < max * 2; i++) {
pool.execute()
}
- expect(pool.workers.size).toBe(max)
- await TestUtils.waitExits(pool, max - min)
- expect(pool.workers.size).toBe(min)
- for (let i = 0; i < max * 10; i++) {
+ expect(pool.workerNodes.length).toBe(max)
+ await TestUtils.waitWorkerExits(pool, max - min)
+ expect(pool.workerNodes.length).toBe(min)
+ for (let i = 0; i < max * 2; i++) {
pool.execute()
}
- expect(pool.workers.size).toBe(max)
- await TestUtils.waitExits(pool, max - min)
- expect(pool.workers.size).toBe(min)
+ expect(pool.workerNodes.length).toBe(max)
+ await TestUtils.waitWorkerExits(pool, max - min)
+ expect(pool.workerNodes.length).toBe(min)
})
it('Shutdown test', async () => {
- const exitPromise = TestUtils.waitExits(pool, min)
+ const exitPromise = TestUtils.waitWorkerExits(pool, min)
await pool.destroy()
const numberOfExitEvents = await exitPromise
expect(numberOfExitEvents).toBe(min)
it('Validation of inputs test', () => {
expect(() => new DynamicThreadPool(min)).toThrowError(
- new Error('Please specify a file with a worker implementation')
+ 'Please specify a file with a worker implementation'
)
})
await pool1.destroy()
})
- it('Verify scale thread up and down is working when long running task is used:hard', async () => {
+ it('Verify scale thread up and down is working when long executing task is used:hard', async () => {
const longRunningPool = new DynamicThreadPool(
min,
max,
'./tests/worker-files/thread/longRunningWorkerHardBehavior.js',
{
errorHandler: e => console.error(e),
- onlineHandler: () => console.log('long running worker is online'),
- exitHandler: () => console.log('long running worker exited')
+ onlineHandler: () => console.log('long executing worker is online'),
+ exitHandler: () => console.log('long executing worker exited')
}
)
- expect(longRunningPool.workers.size).toBe(min)
- for (let i = 0; i < max * 10; i++) {
+ expect(longRunningPool.workerNodes.length).toBe(min)
+ for (let i = 0; i < max * 2; i++) {
longRunningPool.execute()
}
- expect(longRunningPool.workers.size).toBe(max)
- await TestUtils.waitExits(longRunningPool, max - min)
- expect(longRunningPool.workers.size).toBe(min)
+ expect(longRunningPool.workerNodes.length).toBe(max)
+ await TestUtils.waitWorkerExits(longRunningPool, max - min)
+ expect(longRunningPool.workerNodes.length).toBe(min)
+ expect(
+ longRunningPool.workerChoiceStrategyContext.workerChoiceStrategies.get(
+ longRunningPool.workerChoiceStrategyContext.workerChoiceStrategy
+ ).nextWorkerNodeId
+ ).toBeLessThan(longRunningPool.workerNodes.length)
// We need to clean up the resources after our test
await longRunningPool.destroy()
})
- it('Verify scale thread up and down is working when long running task is used:soft', async () => {
+ it('Verify scale thread up and down is working when long executing task is used:soft', async () => {
const longRunningPool = new DynamicThreadPool(
min,
max,
'./tests/worker-files/thread/longRunningWorkerSoftBehavior.js',
{
errorHandler: e => console.error(e),
- onlineHandler: () => console.log('long running worker is online'),
- exitHandler: () => console.log('long running worker exited')
+ onlineHandler: () => console.log('long executing worker is online'),
+ exitHandler: () => console.log('long executing worker exited')
}
)
- expect(longRunningPool.workers.size).toBe(min)
- for (let i = 0; i < max * 10; i++) {
+ expect(longRunningPool.workerNodes.length).toBe(min)
+ for (let i = 0; i < max * 2; i++) {
longRunningPool.execute()
}
- expect(longRunningPool.workers.size).toBe(max)
+ expect(longRunningPool.workerNodes.length).toBe(max)
await TestUtils.sleep(1500)
- // Here we expect the workers to be at the max size since that the task is still running
- expect(longRunningPool.workers.size).toBe(max)
+ // Here we expect the workerNodes to be at the max size since the task is still executing
+ expect(longRunningPool.workerNodes.length).toBe(max)
// We need to clean up the resources after our test
await longRunningPool.destroy()
})