docs: add links to examples
authorJérôme Benoit <jerome.benoit@sap.com>
Thu, 10 Aug 2023 20:07:05 +0000 (22:07 +0200)
committerJérôme Benoit <jerome.benoit@sap.com>
Thu, 10 Aug 2023 20:07:05 +0000 (22:07 +0200)
Signed-off-by: Jérôme Benoit <jerome.benoit@sap.com>
README.md
examples/typescript/http-client/package.json
examples/typescript/http-client/src/main.ts
examples/typescript/http-client/src/pool.ts

index 151c658e53ff4238e11b317c5d1278ede033056a..77de1272a31d7c82777a6a334d9d8c71911cd0c5 100644 (file)
--- a/README.md
+++ b/README.md
@@ -141,7 +141,11 @@ pool
 
 You can do the same with the classes _ClusterWorker_, _FixedClusterPool_ and _DynamicClusterPool_.
 
-**See [examples](./examples/) folder for more details (in particular if you want to use a pool with [multiple task functions](./examples/multiFunctionExample.js))**.
+**See [examples](./examples/) folder for more details**:
+
+- [Javascript](./examples/)
+- [Typescript](./examples/typescript/)
+  - [HTTP client pool](./examples/typescript/http-client/)
 
 Remember that workers can only send and receive structured-cloneable data.
 
index 275308804edce6c67f44ba6c66d5e8842ce37cc0..9d09531a72c2a13d72943c41c09346732ed6d453 100644 (file)
@@ -1,8 +1,8 @@
 {
   "$schema": "https://json.schemastore.org/package",
-  "name": "http-client-node-fetch",
+  "name": "http-client-pool",
   "version": "1.0.0",
-  "description": "multithreaded node-fetch",
+  "description": "HTTP client pool",
   "main": "dist/main.js",
   "type": "module",
   "volta": {
index 067aa2d23668d8068efa55dbce1fdf570743193a..1f4532c9a0147d7a30e443724c6851bbb52bd412 100644 (file)
@@ -1,20 +1,20 @@
 import { availableParallelism } from 'poolifier'
-import { fetchPool } from './pool.js'
+import { httpClientPool } from './pool.js'
 import { type WorkerResponse } from './types.js'
 
 const parallelism = availableParallelism()
 const requestUrl = 'http://localhost:8080/'
 
 for (const workerFunction of ['node_fetch', 'fetch', 'axios']) {
-  const fetchPoolPromises = new Set<Promise<WorkerResponse>>()
+  const httpClientPoolPromises = new Set<Promise<WorkerResponse>>()
   for (let i = 0; i < availableParallelism(); i++) {
-    fetchPoolPromises.add(
-      fetchPool.execute({ input: requestUrl }, workerFunction)
+    httpClientPoolPromises.add(
+      httpClientPool.execute({ input: requestUrl }, workerFunction)
     )
   }
   try {
     const now = performance.now()
-    const responses = await Promise.all(fetchPoolPromises)
+    const responses = await Promise.all(httpClientPoolPromises)
     const elapsedTime = performance.now() - now
     console.info(
       `Received in ${elapsedTime.toFixed(2)}ms an array with ${
index 77ceacba2b7210a1cdd1bd16302e5e9394e97ca6..139f3fb963e4fdfd454efc9de79fede213deacbb 100644 (file)
@@ -8,11 +8,14 @@ const workerFile = join(
   `worker${extname(fileURLToPath(import.meta.url))}`
 )
 
-export const fetchPool = new FixedThreadPool<WorkerData, WorkerResponse>(
+export const httpClientPool = new FixedThreadPool<WorkerData, WorkerResponse>(
   availableParallelism(),
   workerFile,
   {
     enableTasksQueue: true,
+    tasksQueueOptions: {
+      concurrency: 8
+    },
     errorHandler: (e: Error) => {
       console.error(e)
     }