Files
alepm/src/cache/cache-manager.js
2025-08-19 03:08:52 +02:00

381 líneas
10 KiB
JavaScript

const path = require('path');
const fs = require('fs-extra');
const crypto = require('crypto');
const zlib = require('zlib');
const { promisify } = require('util');
const gzip = promisify(zlib.gzip);
const gunzip = promisify(zlib.gunzip);
class CacheManager {
constructor() {
this.cacheDir = path.join(require('os').homedir(), '.alepm', 'cache');
this.metadataFile = path.join(this.cacheDir, 'metadata.json');
this.init();
}
async init() {
await fs.ensureDir(this.cacheDir);
if (!fs.existsSync(this.metadataFile)) {
await this.saveMetadata({
version: '1.0.0',
entries: {},
totalSize: 0,
lastCleanup: Date.now()
});
}
}
async get(packageName, version) {
const key = this.generateKey(packageName, version);
const metadata = await this.loadMetadata();
if (!metadata.entries[key]) {
return null;
}
const entry = metadata.entries[key];
const filePath = path.join(this.cacheDir, entry.file);
if (!fs.existsSync(filePath)) {
// Remove stale entry
delete metadata.entries[key];
await this.saveMetadata(metadata);
return null;
}
// Verify integrity
const fileHash = await this.calculateFileHash(filePath);
if (fileHash !== entry.hash) {
// Corrupted entry, remove it
await fs.remove(filePath);
delete metadata.entries[key];
await this.saveMetadata(metadata);
return null;
}
// Update access time
entry.lastAccess = Date.now();
await this.saveMetadata(metadata);
// Read and decompress
const compressedData = await fs.readFile(filePath);
const data = await gunzip(compressedData);
return data;
}
async has(packageName, version) {
const key = this.generateKey(packageName, version);
const metadata = await this.loadMetadata();
if (!metadata.entries[key]) {
return false;
}
const entry = metadata.entries[key];
const filePath = path.join(this.cacheDir, entry.file);
// Check if file exists
if (!fs.existsSync(filePath)) {
// Remove stale entry
delete metadata.entries[key];
await this.saveMetadata(metadata);
return false;
}
return true;
}
async store(packageName, version, data) {
const key = this.generateKey(packageName, version);
const metadata = await this.loadMetadata();
// Compress data for storage efficiency
const compressedData = await gzip(data);
const hash = crypto.createHash('sha256').update(compressedData).digest('hex');
const fileName = `${hash.substring(0, 16)}.bin`;
const filePath = path.join(this.cacheDir, fileName);
// Store compressed data
await fs.writeFile(filePath, compressedData);
// Update metadata
const entry = {
packageName,
version,
file: fileName,
hash,
size: compressedData.length,
originalSize: data.length,
timestamp: Date.now(),
lastAccess: Date.now()
};
// Remove old entry if exists
if (metadata.entries[key]) {
const oldEntry = metadata.entries[key];
const oldFilePath = path.join(this.cacheDir, oldEntry.file);
if (fs.existsSync(oldFilePath)) {
await fs.remove(oldFilePath);
metadata.totalSize -= oldEntry.size;
}
}
metadata.entries[key] = entry;
metadata.totalSize += entry.size;
await this.saveMetadata(metadata);
// Check if cleanup is needed
await this.maybeCleanup();
return entry;
}
async remove(packageName, version) {
const key = this.generateKey(packageName, version);
const metadata = await this.loadMetadata();
if (!metadata.entries[key]) {
return false;
}
const entry = metadata.entries[key];
const filePath = path.join(this.cacheDir, entry.file);
if (fs.existsSync(filePath)) {
await fs.remove(filePath);
}
metadata.totalSize -= entry.size;
delete metadata.entries[key];
await this.saveMetadata(metadata);
return true;
}
async clean() {
const metadata = await this.loadMetadata();
let cleanedSize = 0;
for (const [, entry] of Object.entries(metadata.entries)) {
const filePath = path.join(this.cacheDir, entry.file);
if (fs.existsSync(filePath)) {
await fs.remove(filePath);
cleanedSize += entry.size;
}
}
// Reset metadata
const newMetadata = {
version: metadata.version,
entries: {},
totalSize: 0,
lastCleanup: Date.now()
};
await this.saveMetadata(newMetadata);
return cleanedSize;
}
async verify() {
const metadata = await this.loadMetadata();
const corrupted = [];
const missing = [];
for (const [key, entry] of Object.entries(metadata.entries)) {
const filePath = path.join(this.cacheDir, entry.file);
if (!fs.existsSync(filePath)) {
missing.push(key);
continue;
}
const fileHash = await this.calculateFileHash(filePath);
if (fileHash !== entry.hash) {
corrupted.push(key);
}
}
// Clean up missing and corrupted entries
for (const key of [...missing, ...corrupted]) {
const entry = metadata.entries[key];
metadata.totalSize -= entry.size;
delete metadata.entries[key];
}
if (missing.length > 0 || corrupted.length > 0) {
await this.saveMetadata(metadata);
}
return {
total: Object.keys(metadata.entries).length,
corrupted: corrupted.length,
missing: missing.length,
valid: Object.keys(metadata.entries).length - corrupted.length - missing.length
};
}
async getStats() {
const metadata = await this.loadMetadata();
const entries = Object.values(metadata.entries);
return {
totalEntries: entries.length,
totalSize: metadata.totalSize,
totalOriginalSize: entries.reduce((sum, entry) => sum + entry.originalSize, 0),
compressionRatio: entries.length > 0
? metadata.totalSize / entries.reduce((sum, entry) => sum + entry.originalSize, 0)
: 0,
oldestEntry: entries.length > 0
? Math.min(...entries.map(e => e.timestamp))
: null,
newestEntry: entries.length > 0
? Math.max(...entries.map(e => e.timestamp))
: null,
lastCleanup: metadata.lastCleanup
};
}
async maybeCleanup() {
const metadata = await this.loadMetadata();
const maxCacheSize = 1024 * 1024 * 1024; // 1GB
const maxAge = 30 * 24 * 60 * 60 * 1000; // 30 days
const timeSinceLastCleanup = Date.now() - metadata.lastCleanup;
const weekInMs = 7 * 24 * 60 * 60 * 1000;
// Only run cleanup weekly or if cache is too large
if (timeSinceLastCleanup < weekInMs && metadata.totalSize < maxCacheSize) {
return;
}
const now = Date.now();
const entries = Object.entries(metadata.entries);
let removedSize = 0;
// Remove old entries
for (const [key, entry] of entries) {
if (now - entry.lastAccess > maxAge) {
const filePath = path.join(this.cacheDir, entry.file);
if (fs.existsSync(filePath)) {
await fs.remove(filePath);
}
removedSize += entry.size;
delete metadata.entries[key];
}
}
// If still over limit, remove least recently used entries
if (metadata.totalSize - removedSize > maxCacheSize) {
const sortedEntries = Object.entries(metadata.entries)
.sort(([, a], [, b]) => a.lastAccess - b.lastAccess);
for (const [key, entry] of sortedEntries) {
if (metadata.totalSize - removedSize <= maxCacheSize) break;
const filePath = path.join(this.cacheDir, entry.file);
if (fs.existsExists(filePath)) {
await fs.remove(filePath);
}
removedSize += entry.size;
delete metadata.entries[key];
}
}
metadata.totalSize -= removedSize;
metadata.lastCleanup = now;
await this.saveMetadata(metadata);
}
generateKey(packageName, version) {
return crypto.createHash('sha1')
.update(`${packageName}@${version}`)
.digest('hex');
}
async calculateFileHash(filePath) {
const data = await fs.readFile(filePath);
return crypto.createHash('sha256').update(data).digest('hex');
}
async loadMetadata() {
try {
return await fs.readJson(this.metadataFile);
} catch (error) {
// Return default metadata if file is corrupted
return {
version: '1.0.0',
entries: {},
totalSize: 0,
lastCleanup: Date.now()
};
}
}
async saveMetadata(metadata) {
await fs.writeJson(this.metadataFile, metadata, { spaces: 2 });
}
// Binary storage optimization methods
async packPackageData(packageData) {
// Create efficient binary format for package data
const buffer = Buffer.from(JSON.stringify(packageData));
// Add magic header for format identification
const header = Buffer.from('ALEPM001', 'ascii'); // Version 1 format
const length = Buffer.alloc(4);
length.writeUInt32BE(buffer.length, 0);
return Buffer.concat([header, length, buffer]);
}
async unpackPackageData(binaryData) {
// Verify magic header
const header = binaryData.slice(0, 8).toString('ascii');
if (header !== 'ALEPM001') {
throw new Error('Invalid package data format');
}
// Read length
const length = binaryData.readUInt32BE(8);
// Extract and parse package data
const packageBuffer = binaryData.slice(12, 12 + length);
return JSON.parse(packageBuffer.toString());
}
async deduplicate() {
const metadata = await this.loadMetadata();
const hashMap = new Map();
let savedSpace = 0;
// Find duplicate files by hash
for (const [key, entry] of Object.entries(metadata.entries)) {
if (hashMap.has(entry.hash)) {
// Duplicate found, remove this entry
const filePath = path.join(this.cacheDir, entry.file);
if (fs.existsSync(filePath)) {
await fs.remove(filePath);
savedSpace += entry.size;
}
delete metadata.entries[key];
metadata.totalSize -= entry.size;
} else {
hashMap.set(entry.hash, key);
}
}
if (savedSpace > 0) {
await this.saveMetadata(metadata);
}
return savedSpace;
}
}
module.exports = CacheManager;