fix big files

Signed-off-by: ale <ale@manalejandro.com>
Este commit está contenido en:
ale
2025-12-17 12:32:13 +01:00
padre 4d9545d0ec
commit e3525c1673

Ver fichero

@@ -20,10 +20,10 @@
*/
import Redis from 'ioredis';
import { createReadStream, existsSync, readFileSync, writeFileSync, unlinkSync } from 'fs';
import { createReadStream, existsSync, readFileSync, writeFileSync, unlinkSync, openSync, readSync, closeSync } from 'fs';
import { resolve, basename } from 'path';
import { createInterface } from 'readline';
import crypto from 'crypto';
import * as crypto from 'crypto';
const REDIS_HOST = process.env.REDIS_HOST || 'localhost';
const REDIS_PORT = parseInt(process.env.REDIS_PORT || '6379', 10);
@@ -163,10 +163,28 @@ Examples:
}
function computeFileHash(filePath: string): string {
const fileBuffer = readFileSync(filePath);
const hashSum = crypto.createHash('sha256');
hashSum.update(fileBuffer);
return hashSum.digest('hex');
// Use streaming for large files to avoid memory issues
const hash = crypto.createHash('sha256');
const input = createReadStream(filePath, { highWaterMark: 64 * 1024 }); // 64KB chunks
let buffer = Buffer.alloc(0);
const fd = openSync(filePath, 'r');
const chunkSize = 64 * 1024; // 64KB
const readBuffer = Buffer.alloc(chunkSize);
try {
let bytesRead;
do {
bytesRead = readSync(fd, readBuffer, 0, chunkSize, null);
if (bytesRead > 0) {
hash.update(readBuffer.subarray(0, bytesRead));
}
} while (bytesRead > 0);
} finally {
closeSync(fd);
}
return hash.digest('hex');
}
function getStateFilePath(filePath: string, customPath: string | null): string {