Refactor file handling in BuyProduct and Request models; implement file schema for better structure. Update routes to handle file uploads and downloads with improved error handling and logging. Adjust MongoDB connection management across scripts and routes for consistency.

This commit is contained in:
2025-11-05 19:06:11 +03:00
parent 41b5cb6fae
commit 284be82e1e
15 changed files with 630 additions and 184 deletions

View File

@@ -7,7 +7,7 @@ const Request = require('../models/Request');
const BuyProduct = require('../models/BuyProduct');
const Message = require('../models/Message');
const Review = require('../models/Review');
const mongoose = require('mongoose');
const mongoose = require('../../../utils/mongoose');
const { Types } = mongoose;
const PRESET_COMPANY_ID = new Types.ObjectId('68fe2ccda3526c303ca06796');
@@ -116,9 +116,6 @@ const waitForDatabaseConnection = async () => {
const verifyAuth = async () => {
try {
if (!mongoose.connection.db) {
return false;
}
await mongoose.connection.db.admin().command({ listDatabases: 1 });
return true;
} catch (error) {
@@ -139,15 +136,17 @@ const waitForDatabaseConnection = async () => {
}
try {
// Ожидаем подключения (подключение происходит автоматически через server/utils/mongoose.ts)
await new Promise(resolve => setTimeout(resolve, 500));
if (mongoose.connection.readyState === 1) {
const authed = await verifyAuth();
if (authed) {
return;
}
const connection = await connectDB();
if (!connection) {
break;
}
const authed = await verifyAuth();
if (authed) {
return;
}
await mongoose.connection.close().catch(() => {});
} catch (error) {
if (!isAuthFailure(error)) {
throw error;
@@ -218,8 +217,12 @@ const initializeTestUser = async () => {
} catch (error) {
console.error('Error initializing test data:', error.message);
if (error?.code === 13 || /auth/i.test(error?.message || '')) {
if (process.env.DEV === 'true') {
console.error('Auth error detected. Connection managed by server/utils/mongoose.ts');
try {
await connectDB();
} catch (connectError) {
if (process.env.DEV === 'true') {
console.error('Failed to re-connect after auth error:', connectError.message);
}
}
}
}

View File

@@ -1,10 +1,11 @@
const express = require('express')
const fs = require('fs')
const path = require('path')
const router = express.Router()
const BuyDocument = require('../models/BuyDocument')
// Create remote-assets/docs directory if it doesn't exist
const docsDir = 'server/remote-assets/docs'
const docsDir = 'server/routers/remote-assets/docs'
if (!fs.existsSync(docsDir)) {
fs.mkdirSync(docsDir, { recursive: true })
}

View File

@@ -2,9 +2,10 @@ const express = require('express');
const router = express.Router();
const { verifyToken } = require('../middleware/auth');
const BuyProduct = require('../models/BuyProduct');
const path = require('path');
const fs = require('fs');
const multer = require('multer');
const UPLOADS_ROOT = 'server/remote-assets/uploads/buy-products';
const UPLOADS_ROOT = 'server/routers/remote-assets/uploads/buy-products';
const ensureDirectory = (dirPath) => {
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
@@ -23,18 +24,6 @@ const ALLOWED_MIME_TYPES = new Set([
'text/csv',
]);
const getExtension = (filename) => {
const lastDot = filename.lastIndexOf('.');
return lastDot > 0 ? filename.slice(lastDot) : '';
};
const getBasename = (filename) => {
const lastDot = filename.lastIndexOf('.');
const name = lastDot > 0 ? filename.slice(0, lastDot) : filename;
const lastSlash = Math.max(name.lastIndexOf('/'), name.lastIndexOf('\\'));
return lastSlash >= 0 ? name.slice(lastSlash + 1) : name;
};
const storage = multer.diskStorage({
destination: (req, file, cb) => {
const productId = req.params.id || 'common';
@@ -43,10 +32,12 @@ const storage = multer.diskStorage({
cb(null, productDir);
},
filename: (req, file, cb) => {
const originalExtension = getExtension(file.originalname);
const baseName = getBasename(file.originalname)
.replace(/[^a-zA-Z0-9-_]+/g, '_')
.toLowerCase();
// Исправляем кодировку имени файла из Latin1 в UTF-8
const fixedName = Buffer.from(file.originalname, 'latin1').toString('utf8');
const originalExtension = path.extname(fixedName) || '';
const baseName = path
.basename(fixedName, originalExtension)
.replace(/[<>:"/\\|?*\x00-\x1F]+/g, '_'); // Убираем только недопустимые символы Windows, оставляем кириллицу
cb(null, `${Date.now()}_${baseName}${originalExtension}`);
},
});
@@ -241,7 +232,16 @@ router.post('/:id/files', verifyToken, handleSingleFileUpload, async (req, res)
}
// Только владелец товара может добавить файл
if (product.companyId.toString() !== req.companyId.toString()) {
const productCompanyId = product.companyId?.toString() || product.companyId;
const requestCompanyId = req.companyId?.toString() || req.companyId;
console.log('[BuyProducts] Comparing company IDs:', {
productCompanyId,
requestCompanyId,
match: productCompanyId === requestCompanyId
});
if (productCompanyId !== requestCompanyId) {
return res.status(403).json({ error: 'Not authorized' });
}
@@ -253,28 +253,75 @@ router.post('/:id/files', verifyToken, handleSingleFileUpload, async (req, res)
return res.status(400).json({ error: 'File is required' });
}
const relativePath = `buy-products/${id}/${req.file.filename}`;
// Исправляем кодировку имени файла из Latin1 в UTF-8
const fixedFileName = Buffer.from(req.file.originalname, 'latin1').toString('utf8');
// Извлекаем timestamp из имени файла, созданного multer (формат: {timestamp}_{name}.ext)
const fileTimestamp = req.file.filename.split('_')[0];
// storagePath относительно UPLOADS_ROOT (который уже включает 'buy-products')
const relativePath = `${id}/${req.file.filename}`;
const file = {
id: `file-${Date.now()}`,
name: req.file.originalname,
url: `/uploads/${relativePath}`,
id: `file-${fileTimestamp}`, // Используем тот же timestamp, что и в имени файла
name: fixedFileName,
url: `/uploads/buy-products/${relativePath}`,
type: req.file.mimetype,
size: req.file.size,
uploadedAt: new Date(),
storagePath: relativePath,
};
product.files.push(file);
await product.save();
console.log('[BuyProducts] Adding file to product:', {
productId: id,
fileName: file.name,
fileSize: file.size,
filePath: relativePath
});
console.log('[BuyProducts] File object:', JSON.stringify(file, null, 2));
// Используем findByIdAndUpdate вместо save() для избежания проблем с валидацией
let updatedProduct;
try {
console.log('[BuyProducts] Calling findByIdAndUpdate with id:', id);
updatedProduct = await BuyProduct.findByIdAndUpdate(
id,
{
$push: { files: file },
$set: { updatedAt: new Date() }
},
{ new: true, runValidators: false }
);
console.log('[BuyProducts] findByIdAndUpdate completed');
} catch (updateError) {
console.error('[BuyProducts] findByIdAndUpdate error:', {
message: updateError.message,
name: updateError.name,
code: updateError.code
});
throw updateError;
}
if (!updatedProduct) {
throw new Error('Failed to update product with file');
}
console.log('[BuyProducts] File added successfully to product:', id);
log('[BuyProducts] File added to product:', id, file.name);
res.json(product);
res.json(updatedProduct);
} catch (error) {
console.error('[BuyProducts] Error adding file:', error.message);
console.error('[BuyProducts] Error stack:', error.stack);
console.error('[BuyProducts] Error name:', error.name);
if (error.errors) {
console.error('[BuyProducts] Validation errors:', JSON.stringify(error.errors, null, 2));
}
res.status(500).json({
error: 'Internal server error',
message: error.message,
details: error.errors || {},
});
}
});
@@ -303,7 +350,7 @@ router.delete('/:id/files/:fileId', verifyToken, async (req, res) => {
await product.save();
const storedPath = fileToRemove.storagePath || fileToRemove.url.replace(/^\/uploads\//, '');
const absolutePath = `server/remote-assets/uploads/${storedPath}`;
const absolutePath = `server/routers/remote-assets/uploads/${storedPath}`;
fs.promises.unlink(absolutePath).catch((unlinkError) => {
if (unlinkError && unlinkError.code !== 'ENOENT') {
@@ -391,4 +438,65 @@ router.get('/:id/acceptances', verifyToken, async (req, res) => {
}
});
// GET /buy-products/download/:id/:fileId - скачать файл
router.get('/download/:id/:fileId', verifyToken, async (req, res) => {
try {
console.log('[BuyProducts] Download request received:', {
productId: req.params.id,
fileId: req.params.fileId,
userId: req.userId,
companyId: req.companyId,
headers: req.headers.authorization
});
const { id, fileId } = req.params;
const product = await BuyProduct.findById(id);
if (!product) {
return res.status(404).json({ error: 'Product not found' });
}
const file = product.files.find((f) => f.id === fileId);
if (!file) {
return res.status(404).json({ error: 'File not found' });
}
// Создаем абсолютный путь к файлу
const filePath = path.resolve(UPLOADS_ROOT, file.storagePath);
console.log('[BuyProducts] Trying to download file:', {
fileId: file.id,
fileName: file.name,
storagePath: file.storagePath,
absolutePath: filePath,
exists: fs.existsSync(filePath)
});
// Проверяем существование файла
if (!fs.existsSync(filePath)) {
console.error('[BuyProducts] File not found on disk:', filePath);
return res.status(404).json({ error: 'File not found on disk' });
}
// Устанавливаем правильные заголовки для скачивания с поддержкой кириллицы
const encodedFileName = encodeURIComponent(file.name);
res.setHeader('Content-Type', file.type || 'application/octet-stream');
res.setHeader('Content-Disposition', `attachment; filename*=UTF-8''${encodedFileName}`);
res.setHeader('Content-Length', file.size);
// Отправляем файл
res.sendFile(filePath, (err) => {
if (err) {
console.error('[BuyProducts] Error sending file:', err.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Error downloading file' });
}
}
});
} catch (error) {
console.error('[BuyProducts] Error downloading file:', error.message);
res.status(500).json({ error: 'Internal server error' });
}
});
module.exports = router;

View File

@@ -5,7 +5,8 @@ const Company = require('../models/Company');
const Experience = require('../models/Experience');
const Request = require('../models/Request');
const Message = require('../models/Message');
const { Types } = require('mongoose');
const mongoose = require('../../../utils/mongoose');
const { Types } = mongoose;
// GET /my/info - получить мою компанию (требует авторизации) - ДОЛЖНО быть ПЕРЕД /:id
router.get('/my/info', verifyToken, async (req, res) => {

View File

@@ -2,7 +2,8 @@ const express = require('express');
const router = express.Router();
const { verifyToken } = require('../middleware/auth');
const Experience = require('../models/Experience');
const { Types } = require('mongoose');
const mongoose = require('../../../utils/mongoose');
const { Types } = mongoose;
// GET /experience - Получить список опыта работы компании
router.get('/', verifyToken, async (req, res) => {

View File

@@ -21,21 +21,23 @@ router.get('/aggregates', verifyToken, async (req, res) => {
const companyId = user.companyId.toString();
const [docsCount, acceptsCount, requestsCount] = await Promise.all([
BuyProduct.countDocuments({ companyId }),
Request.countDocuments({
$or: [
{ senderCompanyId: companyId, status: 'accepted' },
{ recipientCompanyId: companyId, status: 'accepted' }
]
}),
Request.countDocuments({
$or: [
{ senderCompanyId: companyId },
{ recipientCompanyId: companyId }
]
})
]);
// Получить все BuyProduct для подсчета файлов и акцептов
const buyProducts = await BuyProduct.find({ companyId });
// Подсчет документов - сумма всех файлов во всех BuyProduct
const docsCount = buyProducts.reduce((total, product) => {
return total + (product.files ? product.files.length : 0);
}, 0);
// Подсчет акцептов - сумма всех acceptedBy во всех BuyProduct
const acceptsCount = buyProducts.reduce((total, product) => {
return total + (product.acceptedBy ? product.acceptedBy.length : 0);
}, 0);
// Подсчет исходящих запросов (только отправленные этой компанией)
const requestsCount = await Request.countDocuments({
senderCompanyId: companyId
});
res.json({
docsCount,

View File

@@ -2,6 +2,8 @@ const express = require('express');
const router = express.Router();
const { verifyToken } = require('../middleware/auth');
const Message = require('../models/Message');
const mongoose = require('../../../utils/mongoose');
const { ObjectId } = mongoose.Types;
// Функция для логирования с проверкой DEV переменной
const log = (message, data = '') => {
@@ -18,7 +20,6 @@ const log = (message, data = '') => {
router.get('/threads', verifyToken, async (req, res) => {
try {
const companyId = req.companyId;
const { ObjectId } = require('mongoose').Types;
log('[Messages] Fetching threads for companyId:', companyId, 'type:', typeof companyId);
@@ -146,7 +147,6 @@ router.post('/:threadId', verifyToken, async (req, res) => {
// Найти recipientCompanyId по ObjectId если нужно
let recipientObjectId = recipientCompanyId;
const { ObjectId } = require('mongoose').Types;
try {
if (typeof recipientCompanyId === 'string' && ObjectId.isValid(recipientCompanyId)) {
recipientObjectId = new ObjectId(recipientCompanyId);
@@ -210,7 +210,6 @@ router.post('/admin/migrate-fix-recipients', async (req, res) => {
// If recipientCompanyId is not set or wrong - fix it
if (!message.recipientCompanyId || message.recipientCompanyId.toString() !== expectedRecipient) {
const { ObjectId } = require('mongoose').Types;
let recipientObjectId = expectedRecipient;
try {
if (typeof expectedRecipient === 'string' && ObjectId.isValid(expectedRecipient)) {

View File

@@ -3,8 +3,10 @@ const router = express.Router();
const { verifyToken } = require('../middleware/auth');
const Request = require('../models/Request');
const BuyProduct = require('../models/BuyProduct');
const path = require('path');
const fs = require('fs');
const multer = require('multer');
const mongoose = require('../../../utils/mongoose');
// Функция для логирования с проверкой DEV переменной
const log = (message, data = '') => {
@@ -17,7 +19,7 @@ const log = (message, data = '') => {
}
};
const REQUESTS_UPLOAD_ROOT = 'server/remote-assets/uploads/requests';
const REQUESTS_UPLOAD_ROOT = 'server/routers/remote-assets/uploads/requests';
const ensureDirectory = (dirPath) => {
if (!fs.existsSync(dirPath)) {
@@ -37,28 +39,17 @@ const ALLOWED_REQUEST_MIME_TYPES = new Set([
'text/csv',
]);
const getExtension = (filename) => {
const lastDot = filename.lastIndexOf('.');
return lastDot > 0 ? filename.slice(lastDot) : '';
};
const getBasename = (filename) => {
const lastDot = filename.lastIndexOf('.');
const name = lastDot > 0 ? filename.slice(0, lastDot) : filename;
const lastSlash = Math.max(name.lastIndexOf('/'), name.lastIndexOf('\\'));
return lastSlash >= 0 ? name.slice(lastSlash + 1) : name;
};
const storage = multer.diskStorage({
destination: (req, file, cb) => {
const subfolder = req.requestUploadSubfolder || '';
const destinationDir = subfolder ? `${REQUESTS_UPLOAD_ROOT}/${subfolder}` : REQUESTS_UPLOAD_ROOT;
const destinationDir = `${REQUESTS_UPLOAD_ROOT}/${subfolder}`;
ensureDirectory(destinationDir);
cb(null, destinationDir);
},
filename: (req, file, cb) => {
const extension = getExtension(file.originalname);
const baseName = getBasename(file.originalname)
const extension = path.extname(file.originalname) || '';
const baseName = path
.basename(file.originalname, extension)
.replace(/[^a-zA-Z0-9-_]+/g, '_')
.toLowerCase();
cb(null, `${Date.now()}_${baseName}${extension}`);
@@ -107,7 +98,7 @@ const cleanupUploadedFiles = async (req) => {
const subfolder = req.requestUploadSubfolder || '';
const removalTasks = req.files.map((file) => {
const filePath = subfolder ? `${REQUESTS_UPLOAD_ROOT}/${subfolder}/${file.filename}` : `${REQUESTS_UPLOAD_ROOT}/${file.filename}`;
const filePath = `${REQUESTS_UPLOAD_ROOT}/${subfolder}/${file.filename}`;
return fs.promises.unlink(filePath).catch((error) => {
if (error.code !== 'ENOENT') {
console.error('[Requests] Failed to cleanup uploaded file:', error.message);
@@ -125,7 +116,7 @@ const mapFilesToMetadata = (req) => {
const subfolder = req.requestUploadSubfolder || '';
return req.files.map((file) => {
const relativePath = subfolder ? `requests/${subfolder}/${file.filename}` : `requests/${file.filename}`;
const relativePath = `requests/${subfolder}/${file.filename}`;
return {
id: `file-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
name: file.originalname,
@@ -169,7 +160,7 @@ const removeStoredFiles = async (files = []) => {
const tasks = files
.filter((file) => file && file.storagePath)
.map((file) => {
const absolutePath = `server/remote-assets/uploads/${file.storagePath}`;
const absolutePath = `server/routers/remote-assets/uploads/${file.storagePath}`;
return fs.promises.unlink(absolutePath).catch((error) => {
if (error.code !== 'ENOENT') {
console.error('[Requests] Failed to remove stored file:', error.message);
@@ -255,24 +246,61 @@ router.post(
return res.status(400).json({ error: 'At least one recipient is required' });
}
if (!subject && productId) {
let uploadedFiles = mapFilesToMetadata(req);
console.log('========================');
console.log('[Requests] Initial uploadedFiles:', uploadedFiles.length);
console.log('[Requests] ProductId:', productId);
// Если есть productId, получаем данные товара
if (productId) {
try {
const product = await BuyProduct.findById(productId);
console.log('[Requests] Product found:', product ? product.name : 'null');
console.log('[Requests] Product files count:', product?.files?.length || 0);
if (product && product.files) {
console.log('[Requests] Product files:', JSON.stringify(product.files, null, 2));
}
if (product) {
subject = product.name;
// Берем subject из товара, если не указан
if (!subject) {
subject = product.name;
}
// Если файлы не загружены вручную, используем файлы из товара
if (uploadedFiles.length === 0 && product.files && product.files.length > 0) {
console.log('[Requests] ✅ Copying files from product...');
// Копируем файлы из товара, изменяя путь для запроса
uploadedFiles = product.files.map(file => ({
id: file.id || `file-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
name: file.name,
url: file.url,
type: file.type,
size: file.size,
uploadedAt: file.uploadedAt || new Date(),
storagePath: file.storagePath || file.url.replace('/uploads/', ''),
}));
console.log('[Requests] ✅ Using', uploadedFiles.length, 'files from product:', productId);
console.log('[Requests] ✅ Copied files:', JSON.stringify(uploadedFiles, null, 2));
} else {
console.log('[Requests] ❌ NOT copying files. uploadedFiles.length:', uploadedFiles.length, 'product.files.length:', product.files?.length || 0);
}
}
} catch (lookupError) {
console.error('[Requests] Failed to lookup product for subject:', lookupError.message);
console.error('[Requests] Failed to lookup product:', lookupError.message);
console.error(lookupError.stack);
}
}
console.log('[Requests] Final uploadedFiles for saving:', JSON.stringify(uploadedFiles, null, 2));
console.log('========================');
if (!subject) {
await cleanupUploadedFiles(req);
return res.status(400).json({ error: 'Subject is required' });
}
const uploadedFiles = mapFilesToMetadata(req);
const results = [];
for (const recipientCompanyId of recipients) {
try {
@@ -331,9 +359,17 @@ router.put(
async (req, res) => {
try {
const { id } = req.params;
console.log('[Requests] PUT /requests/:id called with id:', id);
console.log('[Requests] Request body:', req.body);
console.log('[Requests] Files:', req.files);
console.log('[Requests] CompanyId:', req.companyId);
const responseText = (req.body.response || '').trim();
const statusRaw = (req.body.status || 'accepted').toLowerCase();
const status = statusRaw === 'rejected' ? 'rejected' : 'accepted';
console.log('[Requests] Response text:', responseText);
console.log('[Requests] Status:', status);
if (req.invalidFiles && req.invalidFiles.length > 0) {
await cleanupUploadedFiles(req);
@@ -361,6 +397,8 @@ router.put(
}
const uploadedResponseFiles = mapFilesToMetadata(req);
console.log('[Requests] Uploaded response files count:', uploadedResponseFiles.length);
console.log('[Requests] Uploaded response files:', JSON.stringify(uploadedResponseFiles, null, 2));
if (uploadedResponseFiles.length > 0) {
await removeStoredFiles(request.responseFiles || []);
@@ -372,18 +410,126 @@ router.put(
request.respondedAt = new Date();
request.updatedAt = new Date();
await request.save();
let savedRequest;
try {
savedRequest = await request.save();
log('[Requests] Request responded:', id);
} catch (saveError) {
console.error('[Requests] Mongoose save failed, trying direct MongoDB update:', saveError.message);
// Fallback: использовать MongoDB драйвер напрямую
const updateData = {
response: responseText,
status: status,
respondedAt: new Date(),
updatedAt: new Date()
};
if (uploadedResponseFiles.length > 0) {
updateData.responseFiles = uploadedResponseFiles;
}
const result = await mongoose.connection.collection('requests').findOneAndUpdate(
{ _id: new mongoose.Types.ObjectId(id) },
{ $set: updateData },
{ returnDocument: 'after' }
);
if (!result) {
throw new Error('Failed to update request');
}
savedRequest = result;
log('[Requests] Request responded via direct MongoDB update:', id);
}
log('[Requests] Request responded:', id);
res.json(request);
res.json(savedRequest);
} catch (error) {
console.error('[Requests] Error responding to request:', error.message);
console.error('[Requests] Error stack:', error.stack);
if (error.name === 'ValidationError') {
console.error('[Requests] Validation errors:', JSON.stringify(error.errors, null, 2));
}
res.status(500).json({ error: error.message });
}
}
);
// GET /requests/download/:id/:fileId - скачать файл ответа
router.get('/download/:id/:fileId', verifyToken, async (req, res) => {
try {
console.log('[Requests] Download request received:', {
requestId: req.params.id,
fileId: req.params.fileId,
userId: req.userId,
companyId: req.companyId,
});
const { id, fileId } = req.params;
const request = await Request.findById(id);
if (!request) {
return res.status(404).json({ error: 'Request not found' });
}
// Проверяем, что пользователь имеет доступ к запросу (отправитель или получатель)
if (request.senderCompanyId !== req.companyId && request.recipientCompanyId !== req.companyId) {
return res.status(403).json({ error: 'Not authorized' });
}
// Ищем файл в responseFiles или в обычных files
let file = request.responseFiles?.find((f) => f.id === fileId);
if (!file) {
file = request.files?.find((f) => f.id === fileId);
}
if (!file) {
return res.status(404).json({ error: 'File not found' });
}
// Создаем абсолютный путь к файлу
// Если storagePath не начинается с 'requests/', значит это файл из buy-products
let fullPath = file.storagePath;
if (!fullPath.startsWith('requests/')) {
fullPath = `buy-products/${fullPath}`;
}
const filePath = path.resolve(`server/routers/remote-assets/uploads/${fullPath}`);
console.log('[Requests] Trying to download file:', {
fileId: file.id,
fileName: file.name,
storagePath: file.storagePath,
absolutePath: filePath,
exists: fs.existsSync(filePath),
});
// Проверяем существование файла
if (!fs.existsSync(filePath)) {
console.error('[Requests] File not found on disk:', filePath);
return res.status(404).json({ error: 'File not found on disk' });
}
// Устанавливаем правильные заголовки для скачивания с поддержкой кириллицы
const encodedFileName = encodeURIComponent(file.name);
res.setHeader('Content-Type', file.type || 'application/octet-stream');
res.setHeader('Content-Disposition', `attachment; filename*=UTF-8''${encodedFileName}`);
res.setHeader('Content-Length', file.size);
// Отправляем файл
res.sendFile(filePath, (err) => {
if (err) {
console.error('[Requests] Error sending file:', err.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Error sending file' });
}
} else {
log('[Requests] File downloaded:', file.name);
}
});
} catch (error) {
console.error('[Requests] Error downloading file:', error.message);
if (!res.headersSent) {
res.status(500).json({ error: error.message });
}
}
});
// DELETE /requests/:id - удалить запрос
router.delete('/:id', verifyToken, async (req, res) => {
try {

View File

@@ -54,10 +54,13 @@ router.get('/recommendations', verifyToken, async (req, res) => {
// GET /search - Поиск компаний
router.get('/', verifyToken, async (req, res) => {
try {
console.log('[Search] === NEW VERSION WITH FIXED SIZE FILTER ===');
const {
query = '',
page = 1,
limit = 10,
offset, // Добавляем поддержку offset для точной пагинации
industries,
companySize,
geography,
@@ -65,8 +68,12 @@ router.get('/', verifyToken, async (req, res) => {
hasReviews,
hasAcceptedDocs,
sortBy = 'relevance',
sortOrder = 'desc'
sortOrder = 'desc',
minEmployees, // Кастомный фильтр: минимум сотрудников
maxEmployees // Кастомный фильтр: максимум сотрудников
} = req.query;
console.log('[Search] Filters:', { minEmployees, maxEmployees, companySize });
// Получить компанию пользователя, чтобы исключить её из результатов
const User = require('../models/User');
@@ -135,12 +142,99 @@ router.get('/', verifyToken, async (req, res) => {
}
}
// Фильтр по размеру компании
if (companySize) {
const sizeList = Array.isArray(companySize) ? companySize : [companySize];
if (sizeList.length > 0) {
filters.push({ companySize: { $in: sizeList } });
// Функция для парсинга диапазона из строки вида "51-250" или "500+"
const parseEmployeeRange = (sizeStr) => {
if (sizeStr.includes('+')) {
const min = parseInt(sizeStr.replace('+', ''));
return { min, max: Infinity };
}
const parts = sizeStr.split('-');
return {
min: parseInt(parts[0]),
max: parts[1] ? parseInt(parts[1]) : parseInt(parts[0])
};
};
// Функция для проверки пересечения двух диапазонов
const rangesOverlap = (range1, range2) => {
return range1.min <= range2.max && range1.max >= range2.min;
};
// Фильтр по размеру компании (чекбоксы) или кастомный диапазон
// Важно: этот фильтр должен получить все компании для корректной работы пересечения диапазонов
let sizeFilteredIds = null;
if ((companySize && companySize.length > 0) || minEmployees || maxEmployees) {
// Получаем все компании (без других фильтров, так как размер компании - это property-based фильтр)
const allCompanies = await Company.find({});
log('[Search] Employee size filter - checking companies:', allCompanies.length);
let matchingIds = [];
// Если есть кастомный диапазон - используем его
if (minEmployees || maxEmployees) {
const customRange = {
min: minEmployees ? parseInt(minEmployees, 10) : 0,
max: maxEmployees ? parseInt(maxEmployees, 10) : Infinity
};
log('[Search] Custom employee range filter:', customRange);
matchingIds = allCompanies
.filter(company => {
if (!company.companySize) {
log('[Search] Company has no size:', company.fullName);
return false;
}
const companyRange = parseEmployeeRange(company.companySize);
const overlaps = rangesOverlap(companyRange, customRange);
log('[Search] Checking overlap:', {
company: company.fullName,
companyRange,
customRange,
overlaps
});
return overlaps;
})
.map(c => c._id);
log('[Search] Matching companies by custom range:', matchingIds.length);
}
// Иначе используем чекбоксы
else if (companySize && companySize.length > 0) {
const sizeList = Array.isArray(companySize) ? companySize : [companySize];
log('[Search] Company size checkboxes filter:', sizeList);
matchingIds = allCompanies
.filter(company => {
if (!company.companySize) {
return false;
}
const companyRange = parseEmployeeRange(company.companySize);
// Проверяем пересечение с любым из выбранных диапазонов
const matches = sizeList.some(selectedSize => {
const filterRange = parseEmployeeRange(selectedSize);
const overlaps = rangesOverlap(companyRange, filterRange);
log('[Search] Check:', company.fullName, companyRange, 'vs', filterRange, '=', overlaps);
return overlaps;
});
return matches;
})
.map(c => c._id);
log('[Search] Matching companies by size checkboxes:', matchingIds.length);
}
// Сохраняем ID для дальнейшей фильтрации
sizeFilteredIds = matchingIds;
log('[Search] Size filtered IDs count:', sizeFilteredIds.length);
}
// Фильтр по географии
@@ -170,13 +264,25 @@ router.get('/', verifyToken, async (req, res) => {
filters.push({ verified: true });
}
// Применяем фильтр по размеру компании (если был задан)
if (sizeFilteredIds !== null) {
if (sizeFilteredIds.length > 0) {
filters.push({ _id: { $in: sizeFilteredIds } });
log('[Search] Applied size filter, IDs:', sizeFilteredIds.length);
} else {
// Если нет подходящих компаний по размеру, возвращаем пустой результат
filters.push({ _id: null });
log('[Search] No companies match size criteria');
}
}
// Комбинировать все фильтры
let filter = filters.length > 0 ? { $and: filters } : {};
// Пагинация
const pageNum = parseInt(page) || 1;
// Пагинация - используем offset если передан, иначе вычисляем из page
const limitNum = parseInt(limit) || 10;
const skip = (pageNum - 1) * limitNum;
const skip = offset !== undefined ? parseInt(offset) : ((parseInt(page) || 1) - 1) * limitNum;
const pageNum = offset !== undefined ? Math.floor(skip / limitNum) + 1 : parseInt(page) || 1;
// Сортировка
let sortOptions = {};
@@ -228,3 +334,4 @@ router.get('/', verifyToken, async (req, res) => {
module.exports = router;