A SQLite-backed job queue forbetter-sqlite3
andbun:sqlite
processing 15k jobs/s.
Create a queue:
import{bun,defineQueue}from"plainjob";importDatabasefrom"bun:sqlite";constconnection=bun(newDatabase("data.db",{strict:true}));constqueue=defineQueue({ connection});
Make sure strict mode is enabled!
npm install plainjob better-sqlite3
Create a queue:
import{better,defineQueue}from"plainjob";importDatabasefrom"better-sqlite3";constconnection=better(newDatabase("data.db"));constqueue=defineQueue({ connection});
import{bun,defineQueue,defineWorker}from"plainjob";importDatabasefrom"bun:sqlite";constconnection=bun(newDatabase("data.db",{strict:true}));constqueue=defineQueue({ connection});// Define a workerconstworker=defineWorker("print",async(job)=>{console.log(`Processing job${job.id}:${job.data}`);},{ queue});// Add a jobqueue.add("print","Hello, plainjob!");// Start the workerworker.start();
- SQLite-backed: Reliable persistence using bun:sqlite or better-sqlite3
- High performance: Process up to 15,000 jobs per second
- Cron-scheduled jobs: Easily schedule recurring tasks
- Delayed jobs: Run jobs after a specified delay
- Automatic job cleanup: Remove old completed and failed jobs
- Job timeout handling: Re-queue jobs if a worker dies
- Custom logging: Integrate with your preferred logging solution
- Lightweight: No external dependencies beyond better-sqlite3 and a cron-parser
import{bun,defineQueue}from"plainjob";importDatabasefrom"bun:sqlite";constconnection=bun(newDatabase("data.db",{strict:true}));constqueue=defineQueue({ connection,timeout:30*60*1000,// 30 minutesremoveDoneJobsOlderThan:7*24*60*60*1000,// 7 daysremoveFailedJobsOlderThan:30*24*60*60*1000,// 30 days});
// Enqueue a one-time jobqueue.add("send-email",{to:"user@example.com",subject:"Hello"});// Run a job a after 1 secondqueue.add("send-email",{to:"user@example.com",subject:"Hello"},{delay:1000});// Schedule a recurring jobqueue.schedule("daily-report",{cron:"0 0 * * *"});
import{defineWorker}from"plainjob";constworker=defineWorker("send-email",async(job)=>{const{ to, subject}=JSON.parse(job.data);awaitsendEmail(to,subject);},{ queue,onCompleted:(job)=>console.log(`Job${job.id} completed`),onFailed:(job,error)=>console.error(`Job${job.id} failed:${error}`),});worker.start();
// Count pending jobsconstpendingCount=queue.countJobs({status:JobStatus.Pending});// Get job typesconsttypes=queue.getJobTypes();// Get scheduled jobsconstscheduledJobs=queue.getScheduledJobs();
To ensure all jobs are processed before shutting down:
import{processAll}from"plainjob";process.on("SIGTERM",async()=>{console.log("Shutting down...");awaitworker.stop();// <-- finishes processing jobsqueue.close();process.exit(0);});
For high-throughput scenarios, you can spawn multiple worker processes. Here's an example based onbench-worker.ts
:
import{fork}from"node:child_process";importosfrom"node:os";constnumCPUs=os.cpus().length;constdbUrl="queue.db";for(leti=0;i<numCPUs;i++){constworker=fork("./worker.ts",[dbUrl]);worker.on("exit",(code)=>{console.log(`Worker${i} exited with code${code}`);});}
Inworker.ts
:
importDatabasefrom"better-sqlite3";import{better,defineQueue,defineWorker,processAll}from"plainjob";constdbUrl=process.argv[2];constconnection=better(newDatabase(dbUrl));constqueue=defineQueue({ connection});constworker=defineWorker("bench",async(job)=>{// Process job},{ queue});voidworker.start().catch((error)=>{console.error(error);process.exit(1);});
This setup allows you to leverage multiple CPU cores for processing jobs in parallel.
For more detailed information on the API and advanced usage, please refer to the source code and tests.