237 líneas
6.3 KiB
TypeScript
237 líneas
6.3 KiB
TypeScript
import { NextRequest, NextResponse } from 'next/server';
|
|
import { esClient, INDEX_NAME, initializeIndex } from '@/lib/elasticsearch';
|
|
import { generateHashes, detectHashType } from '@/lib/hash';
|
|
|
|
interface HashDocument {
|
|
plaintext: string;
|
|
md5: string;
|
|
sha1: string;
|
|
sha256: string;
|
|
sha512: string;
|
|
created_at?: string;
|
|
}
|
|
|
|
// Maximum allowed query length
|
|
const MAX_QUERY_LENGTH = 1000;
|
|
|
|
// Characters that could be used in NoSQL/Elasticsearch injection attacks
|
|
const DANGEROUS_PATTERNS = [
|
|
/[{}\[\]]/g, // JSON structure characters
|
|
/\$[a-zA-Z]/g, // MongoDB-style operators
|
|
/\\u[0-9a-fA-F]{4}/g, // Unicode escapes
|
|
/<script/gi, // XSS attempts
|
|
/javascript:/gi, // XSS attempts
|
|
];
|
|
|
|
/**
|
|
* Sanitize input to prevent NoSQL injection attacks
|
|
* For hash lookups, we only need alphanumeric characters and $
|
|
* For plaintext, we allow more characters but sanitize dangerous patterns
|
|
*/
|
|
function sanitizeInput(input: string): string {
|
|
// Trim and take first word only
|
|
let sanitized = input.trim().split(/\s+/)[0] || '';
|
|
|
|
// Limit length
|
|
if (sanitized.length > MAX_QUERY_LENGTH) {
|
|
sanitized = sanitized.substring(0, MAX_QUERY_LENGTH);
|
|
}
|
|
|
|
// Remove null bytes
|
|
sanitized = sanitized.replace(/\0/g, '');
|
|
|
|
// Check for dangerous patterns
|
|
for (const pattern of DANGEROUS_PATTERNS) {
|
|
sanitized = sanitized.replace(pattern, '');
|
|
}
|
|
|
|
return sanitized;
|
|
}
|
|
|
|
/**
|
|
* Validate that the input is safe for use in Elasticsearch queries
|
|
*/
|
|
function isValidInput(input: string): boolean {
|
|
// Check for empty input
|
|
if (!input || input.length === 0) {
|
|
return false;
|
|
}
|
|
|
|
// Check for excessively long input
|
|
if (input.length > MAX_QUERY_LENGTH) {
|
|
return false;
|
|
}
|
|
|
|
// Check for control characters (except normal whitespace)
|
|
if (/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F]/.test(input)) {
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
export async function POST(request: NextRequest) {
|
|
try {
|
|
const body = await request.json();
|
|
|
|
// Validate request body structure
|
|
if (!body || typeof body !== 'object') {
|
|
return NextResponse.json(
|
|
{ error: 'Invalid request body' },
|
|
{ status: 400 }
|
|
);
|
|
}
|
|
|
|
const { query } = body;
|
|
|
|
// Validate query type
|
|
if (!query || typeof query !== 'string') {
|
|
return NextResponse.json(
|
|
{ error: 'Query parameter is required and must be a string' },
|
|
{ status: 400 }
|
|
);
|
|
}
|
|
|
|
// Validate input before processing
|
|
if (!isValidInput(query)) {
|
|
return NextResponse.json(
|
|
{ error: 'Invalid query: contains forbidden characters or is too long' },
|
|
{ status: 400 }
|
|
);
|
|
}
|
|
|
|
// Sanitize input
|
|
const cleanQuery = sanitizeInput(query);
|
|
|
|
if (!cleanQuery) {
|
|
return NextResponse.json(
|
|
{ error: 'Invalid query: only whitespace or invalid characters provided' },
|
|
{ status: 400 }
|
|
);
|
|
}
|
|
|
|
// Ensure index exists
|
|
await initializeIndex();
|
|
|
|
const cleanQueryLower = cleanQuery.toLowerCase();
|
|
const hashType = detectHashType(cleanQueryLower);
|
|
|
|
if (hashType) {
|
|
// Query is a hash - search for it in Elasticsearch
|
|
const searchResponse = await esClient.search<HashDocument>({
|
|
index: INDEX_NAME,
|
|
query: {
|
|
term: {
|
|
[hashType]: cleanQueryLower
|
|
}
|
|
}
|
|
});
|
|
|
|
const hits = searchResponse.hits.hits;
|
|
|
|
if (hits.length > 0) {
|
|
// Found matching plaintext
|
|
return NextResponse.json({
|
|
found: true,
|
|
hashType,
|
|
hash: cleanQuery,
|
|
results: hits.map((hit) => {
|
|
const source = hit._source!;
|
|
return {
|
|
plaintext: source.plaintext,
|
|
hashes: {
|
|
md5: source.md5,
|
|
sha1: source.sha1,
|
|
sha256: source.sha256,
|
|
sha512: source.sha512,
|
|
}
|
|
};
|
|
})
|
|
});
|
|
} else {
|
|
// Hash not found in database
|
|
return NextResponse.json({
|
|
found: false,
|
|
hashType,
|
|
hash: cleanQuery,
|
|
message: 'Hash not found in database'
|
|
});
|
|
}
|
|
} else {
|
|
// Query is plaintext - check if it already exists first
|
|
const existsResponse = await esClient.search<HashDocument>({
|
|
index: INDEX_NAME,
|
|
query: {
|
|
term: {
|
|
'plaintext.keyword': cleanQuery
|
|
}
|
|
}
|
|
});
|
|
|
|
let hashes;
|
|
|
|
if (existsResponse.hits.hits.length > 0) {
|
|
// Plaintext found, retrieve existing hashes
|
|
const existingDoc = existsResponse.hits.hits[0]._source!;
|
|
hashes = {
|
|
md5: existingDoc.md5,
|
|
sha1: existingDoc.sha1,
|
|
sha256: existingDoc.sha256,
|
|
sha512: existingDoc.sha512,
|
|
};
|
|
} else {
|
|
// Plaintext not found, generate hashes and check if any hash already exists
|
|
hashes = generateHashes(cleanQuery);
|
|
|
|
const hashExistsResponse = await esClient.search<HashDocument>({
|
|
index: INDEX_NAME,
|
|
query: {
|
|
bool: {
|
|
should: [
|
|
{ term: { md5: hashes.md5 } },
|
|
{ term: { sha1: hashes.sha1 } },
|
|
{ term: { sha256: hashes.sha256 } },
|
|
{ term: { sha512: hashes.sha512 } },
|
|
],
|
|
minimum_should_match: 1
|
|
}
|
|
}
|
|
});
|
|
|
|
if (hashExistsResponse.hits.hits.length === 0) {
|
|
// No duplicates found, insert new document
|
|
await esClient.index({
|
|
index: INDEX_NAME,
|
|
document: {
|
|
...hashes,
|
|
created_at: new Date().toISOString()
|
|
}
|
|
});
|
|
|
|
// Refresh index to make the document searchable immediately
|
|
await esClient.indices.refresh({ index: INDEX_NAME });
|
|
}
|
|
}
|
|
|
|
return NextResponse.json({
|
|
found: true,
|
|
isPlaintext: true,
|
|
plaintext: cleanQuery,
|
|
wasGenerated: existsResponse.hits.hits.length === 0,
|
|
hashes: {
|
|
md5: hashes.md5,
|
|
sha1: hashes.sha1,
|
|
sha256: hashes.sha256,
|
|
sha512: hashes.sha512,
|
|
}
|
|
});
|
|
}
|
|
} catch (error) {
|
|
console.error('Search error:', error);
|
|
return NextResponse.json(
|
|
{ error: 'Internal server error', details: error instanceof Error ? error.message : 'Unknown error' },
|
|
{ status: 500 }
|
|
);
|
|
}
|
|
}
|