arguments, fixes, dockerfile, ci
This commit is contained in:
parent
d6365855b6
commit
b728f19570
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@ -0,0 +1,2 @@
|
||||
logs/
|
||||
node_modules/
|
||||
79
.gitea/workflows/build_publish.yaml
Normal file
79
.gitea/workflows/build_publish.yaml
Normal file
@ -0,0 +1,79 @@
|
||||
name: Build and publish farmacia-bot images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build_images:
|
||||
name: Build and push client's image to Gitea's registry
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Gitea's docker registry
|
||||
uses: docker/login-action@v3.1.0
|
||||
with:
|
||||
registry: ${{ vars.hostname_gitea }}
|
||||
username: mozempk
|
||||
password: ${{ secrets.REGISTRY_GITEA_TOKEN }}
|
||||
|
||||
- name: Extract client metadata (tags, labels) for Docker
|
||||
id: meta_client
|
||||
uses: docker/metadata-action@v5.5.1
|
||||
with:
|
||||
# this forces latest. Else it's created only if a release (tag) is created
|
||||
tags: |
|
||||
type=raw,enable=true,priority=200,prefix=,suffix=,value=latest
|
||||
images: ${{ vars.hostname_gitea }}/${{gitea.repository}}/client
|
||||
|
||||
- name: Build and push client Docker image
|
||||
id: push_client
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta_client.outputs.tags }}
|
||||
labels: ${{ steps.meta_client.outputs.labels }}
|
||||
|
||||
- name: Extract server metadata (tags, labels) for Docker
|
||||
id: meta_server
|
||||
uses: docker/metadata-action@v5.5.1
|
||||
with:
|
||||
# this forces latest. Else it's created only if a release (tag) is created
|
||||
tags: |
|
||||
type=raw,enable=true,priority=200,prefix=,suffix=,value=latest
|
||||
images: ${{ vars.hostname_gitea }}/${{gitea.repository}}/server
|
||||
|
||||
- name: Build and push server Docker image
|
||||
id: push_server
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta_server.outputs.tags }}
|
||||
labels: ${{ steps.meta_server.outputs.labels }}
|
||||
deploy_ovh:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_images
|
||||
steps:
|
||||
- name: Deploy to OVH
|
||||
id: ovh_deploy
|
||||
uses: appleboy/ssh-action@v1.0.3
|
||||
with:
|
||||
host: ${{ secrets.DEPLOY_HOST }}
|
||||
username: ${{ secrets.DEPLOY_USERNAME }}
|
||||
key: ${{ secrets.DEPLOY_KEY }}
|
||||
script: |
|
||||
cd ~/stacks/bot-farmacia
|
||||
docker-compose down && docker-compose pull && docker-compose up -d
|
||||
45
Dockerfile
Normal file
45
Dockerfile
Normal file
@ -0,0 +1,45 @@
|
||||
# Build stage
|
||||
FROM node:23-alpine AS builder
|
||||
|
||||
# Set working directory for the build stage
|
||||
WORKDIR /build
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm install
|
||||
|
||||
# Copy application files
|
||||
COPY . .
|
||||
|
||||
# Runtime stage
|
||||
FROM node:23-alpine AS runtime
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup -S appgroup && adduser -S appuser -G appgroup
|
||||
|
||||
# Set working directory for the application
|
||||
WORKDIR /home/appuser/app
|
||||
|
||||
# Copy only the necessary files from the builder stage
|
||||
COPY --from=builder --chown=appuser:appgroup /build/package*.json ./
|
||||
COPY --from=builder --chown=appuser:appgroup /build/main.js ./
|
||||
COPY --from=builder --chown=appuser:appgroup /build/util.js ./
|
||||
COPY --from=builder --chown=appuser:appgroup /build/worker.js ./
|
||||
COPY --from=builder --chown=appuser:appgroup /build/node_modules ./node_modules
|
||||
|
||||
# Create logs directory with correct permissions
|
||||
RUN mkdir -p logs && chown -R appuser:appgroup logs
|
||||
|
||||
# Expose logs directory as a volume
|
||||
VOLUME ["/home/appuser/app/logs"]
|
||||
|
||||
# Use an environment variable for thread count (default to 0 to use auto)
|
||||
ENV THREAD_COUNT=0
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Run the application - using JSON array format
|
||||
CMD ["sh", "-c", "node main.js ${THREAD_COUNT}"]
|
||||
21
main.js
21
main.js
@ -1,6 +1,19 @@
|
||||
const {workerFactory, onMessage, onError, onExit} = require('./util');
|
||||
const THREAD_COUNT = 4; // Number of threads you want to run
|
||||
const {workerFactory, onMessage, onError, onExit, logger} = require('./util');
|
||||
const os = require('os');
|
||||
|
||||
let THREAD_COUNT = Math.max(1, os.cpus().length - 1);;
|
||||
// Get thread count from first positional argument or use default (all cores - 1)
|
||||
if (process.argv.length > 2) {
|
||||
const requestedThreads = parseInt(process.argv[2], 10);
|
||||
if (!isNaN(requestedThreads) && requestedThreads > 0) {
|
||||
THREAD_COUNT = requestedThreads;
|
||||
}
|
||||
}
|
||||
|
||||
const startDateTime = new Date().toISOString();
|
||||
console.log(`main\t🕒 Started at ${startDateTime}`);
|
||||
console.log(`main\t🧵 Using ${THREAD_COUNT} worker threads`);
|
||||
logger().info(`Application started with ${THREAD_COUNT} worker threads`);
|
||||
|
||||
const data = {
|
||||
url: 'https://care.drmax.eu/it/pharmacy/698Edb',
|
||||
@ -12,7 +25,6 @@ const data = {
|
||||
run: true
|
||||
};
|
||||
|
||||
|
||||
const workers = [...Array(THREAD_COUNT).keys()].map((id) => workerFactory(
|
||||
{ id, ...data },
|
||||
onMessage,
|
||||
@ -20,14 +32,17 @@ const workers = [...Array(THREAD_COUNT).keys()].map((id) => workerFactory(
|
||||
onExit
|
||||
));
|
||||
console.log(`main\t✅ Initialized ${workers.length} workers.`);
|
||||
logger().info(`Initialized ${workers.length} workers`);
|
||||
|
||||
// Handle Ctrl+C
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('\nmain\t🛑 Terminating workers...');
|
||||
logger().info('Received SIGINT signal. Terminating workers...');
|
||||
|
||||
// Gracefully terminate all workers
|
||||
await Promise.all(workers.map(worker => worker.terminate()));
|
||||
|
||||
console.log('main\t✅ All workers terminated. Exiting.');
|
||||
logger().info('All workers terminated. Exiting.');
|
||||
process.exit(0);
|
||||
});
|
||||
@ -1,10 +1,14 @@
|
||||
|
||||
{
|
||||
"name": "multithreaded-http-app",
|
||||
"version": "1.0.0",
|
||||
"description": "A multithreaded Node.js app that performs repeated HTTP POST requests using worker threads.",
|
||||
"main": "main.js",
|
||||
"scripts": {
|
||||
"start": "node main.js"
|
||||
"start": "node main.js",
|
||||
"start:single": "node main.js 1",
|
||||
"start:quad": "node main.js 4",
|
||||
"start:octa": "node main.js 8"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.6.0"
|
||||
|
||||
45
util.js
45
util.js
@ -1,8 +1,28 @@
|
||||
const { Worker } = require('worker_threads');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
let totalSuccess = 0;
|
||||
let totalError = 0;
|
||||
const startTime = Date.now();
|
||||
|
||||
const logsDir = path.join(__dirname, 'logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
fs.mkdirSync(logsDir);
|
||||
}
|
||||
|
||||
const logFilePath = path.join(logsDir, `app-${new Date().toISOString().replace(/:/g, '-')}.log`);
|
||||
const logger = () => {
|
||||
const timestamp = new Date().toISOString();
|
||||
return {
|
||||
info: (message) => fs.appendFileSync(logFilePath, `[${timestamp}] INFO:\t${message}\n`),
|
||||
error: (message) => fs.appendFileSync(logFilePath, `[${timestamp}] ERROR:\t${message}\n`),
|
||||
warn: (message) => fs.appendFileSync(logFilePath, `[${timestamp}] WARN:\t${message}\n`)
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
workerFactory: (
|
||||
workerData,
|
||||
onMessage = ({ id, payload }) => console.log("onMessage", id, payload),
|
||||
@ -17,21 +37,34 @@ module.exports = {
|
||||
switch (payload.status) {
|
||||
case 'starting':
|
||||
console.log(`${payload.id}\t⌛ Starting with url: ${payload.url}`);
|
||||
logger().info(`Worker ${payload.id} starting with url: ${payload.url}`);
|
||||
return;
|
||||
case 'success':
|
||||
totalSuccess++;
|
||||
break;
|
||||
case 'recoverable-error':
|
||||
totalError++;
|
||||
//console.warn(`\r${id}\t🤷 ${payload.error}.`);
|
||||
// Log recoverable errors to file
|
||||
logger().warn(`Worker ${id}: ${payload.error}`);
|
||||
break;
|
||||
}
|
||||
const totalRequests = totalError + totalSuccess;
|
||||
const timeDelta = Date.now() - startTime;
|
||||
const rps = totalRequests / (timeDelta / 1000);
|
||||
const successRate = (totalSuccess/totalRequests)*100
|
||||
process.stdout.write(`\r${id}\t📄 Requests: ${totalSuccess}/${totalRequests}. 📊 Success rate: ${successRate.toFixed(2)}%. ⚡ Req/s: ${rps.toFixed(2)}. ⏲️ Req/h: ${(rps * 3600).toFixed(2)}`);
|
||||
const successRate = (totalSuccess / totalRequests) * 100;
|
||||
process.stdout.write(`\r${id}\t🕒 Started: ${new Date(startTime).toISOString()} 📄 Requests: ${totalSuccess}/${totalRequests}. 📊 Success rate: ${successRate.toFixed(2)}%. ⚡ Req/s: ${rps.toFixed(2)}. ⏲️ Req/h: ${(rps * 3600).toFixed(2)}`);
|
||||
},
|
||||
onError: ({ id, error }) => console.error(`\r${id}\t❌ ${error}. Total Successful Requests: ${totalSuccess}`),
|
||||
onExit: ({ id, status }) => status === 0 ? console.log(`${id}\t✅ Gracefully terminated.`) : console.error(`${id}\t❌ Exit status: ${status}`)
|
||||
}
|
||||
onError: ({ id, error }) => {
|
||||
console.error(`\r${id}\t❌ ${error}. Total Successful Requests: ${totalSuccess}`);
|
||||
logger().error(`Worker ${id} critical error: ${error}`);
|
||||
},
|
||||
onExit: ({ id, status }) => {
|
||||
if (status !== 0) {
|
||||
console.error(`${id}\t❌ Exit status: ${status}`);
|
||||
logger().error(`Worker ${id} exited with status: ${status}`);
|
||||
return;
|
||||
}
|
||||
console.log(`${id}\t✅ Gracefully terminated.`);
|
||||
logger().info(`Worker ${id} gracefully terminated`);
|
||||
}
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user