@@ -1,76 +0,0 @@
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
|
||||
const ELASTICSEARCH_NODE = process.env.ELASTICSEARCH_NODE || 'http://localhost:9200';
|
||||
const INDEX_NAME = 'hasher';
|
||||
|
||||
export const esClient = new Client({
|
||||
node: ELASTICSEARCH_NODE,
|
||||
requestTimeout: 30000,
|
||||
maxRetries: 3,
|
||||
});
|
||||
|
||||
export const INDEX_MAPPING = {
|
||||
settings: {
|
||||
number_of_shards: 10,
|
||||
number_of_replicas: 1,
|
||||
analysis: {
|
||||
analyzer: {
|
||||
lowercase_analyzer: {
|
||||
type: 'custom' as const,
|
||||
tokenizer: 'keyword',
|
||||
filter: ['lowercase']
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
plaintext: {
|
||||
type: 'text' as const,
|
||||
analyzer: 'lowercase_analyzer',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword' as const
|
||||
}
|
||||
}
|
||||
},
|
||||
md5: {
|
||||
type: 'keyword' as const
|
||||
},
|
||||
sha1: {
|
||||
type: 'keyword' as const
|
||||
},
|
||||
sha256: {
|
||||
type: 'keyword' as const
|
||||
},
|
||||
sha512: {
|
||||
type: 'keyword' as const
|
||||
},
|
||||
created_at: {
|
||||
type: 'date' as const
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export async function initializeIndex(): Promise<void> {
|
||||
try {
|
||||
const indexExists = await esClient.indices.exists({ index: INDEX_NAME });
|
||||
|
||||
if (!indexExists) {
|
||||
await esClient.indices.create({
|
||||
index: INDEX_NAME,
|
||||
settings: INDEX_MAPPING.settings,
|
||||
mappings: INDEX_MAPPING.mappings
|
||||
});
|
||||
console.log(`Index '${INDEX_NAME}' created successfully with 10 shards`);
|
||||
} else {
|
||||
console.log(`Index '${INDEX_NAME}' already exists`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error initializing Elasticsearch index:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export { INDEX_NAME };
|
||||
181
lib/redis.ts
Archivo normal
181
lib/redis.ts
Archivo normal
@@ -0,0 +1,181 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
const REDIS_HOST = process.env.REDIS_HOST || 'localhost';
|
||||
const REDIS_PORT = parseInt(process.env.REDIS_PORT || '6379', 10);
|
||||
const REDIS_PASSWORD = process.env.REDIS_PASSWORD || undefined;
|
||||
const REDIS_DB = parseInt(process.env.REDIS_DB || '0', 10);
|
||||
|
||||
export const INDEX_NAME = 'hasher';
|
||||
|
||||
// Create Redis client with connection pooling
|
||||
export const redisClient = new Redis({
|
||||
host: REDIS_HOST,
|
||||
port: REDIS_PORT,
|
||||
password: REDIS_PASSWORD,
|
||||
db: REDIS_DB,
|
||||
retryStrategy: (times) => {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
maxRetriesPerRequest: 3,
|
||||
enableReadyCheck: true,
|
||||
lazyConnect: false,
|
||||
});
|
||||
|
||||
// Handle connection errors
|
||||
redisClient.on('error', (err) => {
|
||||
console.error('Redis Client Error:', err);
|
||||
});
|
||||
|
||||
redisClient.on('connect', () => {
|
||||
console.log('Redis connected successfully');
|
||||
});
|
||||
|
||||
/**
|
||||
* Redis Keys Structure:
|
||||
*
|
||||
* 1. Hash documents: hash:plaintext:{plaintext} = JSON string
|
||||
* - Stores all hash data for a plaintext
|
||||
*
|
||||
* 2. Hash indexes: hash:index:{algorithm}:{hash} = plaintext
|
||||
* - Allows reverse lookup from hash to plaintext
|
||||
* - One key per algorithm (md5, sha1, sha256, sha512)
|
||||
*
|
||||
* 3. Statistics: hash:stats = Hash {count, size}
|
||||
* - count: total number of unique plaintexts
|
||||
* - size: approximate total size in bytes
|
||||
*/
|
||||
|
||||
export interface HashDocument {
|
||||
plaintext: string;
|
||||
md5: string;
|
||||
sha1: string;
|
||||
sha256: string;
|
||||
sha512: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a hash document in Redis
|
||||
*/
|
||||
export async function storeHashDocument(doc: HashDocument): Promise<void> {
|
||||
const pipeline = redisClient.pipeline();
|
||||
|
||||
// Store main document
|
||||
const key = `hash:plaintext:${doc.plaintext}`;
|
||||
pipeline.set(key, JSON.stringify(doc));
|
||||
|
||||
// Create indexes for each hash type
|
||||
pipeline.set(`hash:index:md5:${doc.md5}`, doc.plaintext);
|
||||
pipeline.set(`hash:index:sha1:${doc.sha1}`, doc.plaintext);
|
||||
pipeline.set(`hash:index:sha256:${doc.sha256}`, doc.plaintext);
|
||||
pipeline.set(`hash:index:sha512:${doc.sha512}`, doc.plaintext);
|
||||
|
||||
// Update statistics
|
||||
pipeline.hincrby('hash:stats', 'count', 1);
|
||||
pipeline.hincrby('hash:stats', 'size', JSON.stringify(doc).length);
|
||||
|
||||
await pipeline.exec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a hash document by plaintext
|
||||
*/
|
||||
export async function findByPlaintext(plaintext: string): Promise<HashDocument | null> {
|
||||
const key = `hash:plaintext:${plaintext}`;
|
||||
const data = await redisClient.get(key);
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
return JSON.parse(data) as HashDocument;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a hash document by any hash value
|
||||
*/
|
||||
export async function findByHash(algorithm: string, hash: string): Promise<HashDocument | null> {
|
||||
const indexKey = `hash:index:${algorithm}:${hash}`;
|
||||
const plaintext = await redisClient.get(indexKey);
|
||||
|
||||
if (!plaintext) return null;
|
||||
|
||||
return findByPlaintext(plaintext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a plaintext or any of its hashes exist
|
||||
*/
|
||||
export async function checkExistence(plaintext: string, hashes?: {
|
||||
md5?: string;
|
||||
sha1?: string;
|
||||
sha256?: string;
|
||||
sha512?: string;
|
||||
}): Promise<boolean> {
|
||||
// Check if plaintext exists
|
||||
const plaintextKey = `hash:plaintext:${plaintext}`;
|
||||
const exists = await redisClient.exists(plaintextKey);
|
||||
|
||||
if (exists) return true;
|
||||
|
||||
// Check if any hash exists
|
||||
if (hashes) {
|
||||
const pipeline = redisClient.pipeline();
|
||||
if (hashes.md5) pipeline.exists(`hash:index:md5:${hashes.md5}`);
|
||||
if (hashes.sha1) pipeline.exists(`hash:index:sha1:${hashes.sha1}`);
|
||||
if (hashes.sha256) pipeline.exists(`hash:index:sha256:${hashes.sha256}`);
|
||||
if (hashes.sha512) pipeline.exists(`hash:index:sha512:${hashes.sha512}`);
|
||||
|
||||
const results = await pipeline.exec();
|
||||
if (results && results.some(([_err, result]) => result === 1)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get database statistics
|
||||
*/
|
||||
export async function getStats(): Promise<{ count: number; size: number }> {
|
||||
const stats = await redisClient.hgetall('hash:stats');
|
||||
return {
|
||||
count: parseInt(stats.count || '0', 10),
|
||||
size: parseInt(stats.size || '0', 10),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Redis server info
|
||||
*/
|
||||
export async function getRedisInfo(): Promise<{
|
||||
version: string;
|
||||
memory: string;
|
||||
dbSize: number;
|
||||
}> {
|
||||
const info = await redisClient.info('server');
|
||||
const memory = await redisClient.info('memory');
|
||||
const dbSize = await redisClient.dbsize();
|
||||
|
||||
const versionMatch = info.match(/redis_version:([^\r\n]+)/);
|
||||
const memoryMatch = memory.match(/used_memory_human:([^\r\n]+)/);
|
||||
|
||||
return {
|
||||
version: versionMatch ? versionMatch[1] : 'unknown',
|
||||
memory: memoryMatch ? memoryMatch[1] : 'unknown',
|
||||
dbSize,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize Redis connection (just verify it's working)
|
||||
*/
|
||||
export async function initializeRedis(): Promise<void> {
|
||||
try {
|
||||
await redisClient.ping();
|
||||
console.log('Redis connection verified');
|
||||
} catch (error) {
|
||||
console.error('Error connecting to Redis:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
Referencia en una nueva incidencia
Block a user