Производительность Cursor | Глава 19

18 октября 2025
77

ЧАСТЬ VI. ОПТИМИЗАЦИЯ И МАСШТАБИРОВАНИЕ

Глава 19. Производительность Cursor

19.1. Оптимизация использования ресурсов

Мониторинг производительности Cursor


// .cursor/performance-config.json

{
"performance": {
"indexing": {
"maxConcurrentIndexes": 2,
"indexOnSave": true,
"incrementalIndexing": true,
"excludeLargeFiles": true,
"maxFileSize": "5MB"
},
"ai": {
"requestTimeout": 30000,
"maxConcurrentRequests": 3,
"cacheEnabled": true,
"cacheTTL": 3600
},
"editor": {
"maxFileSizeMB": 50,
"largeFileOptimizations": true,
"tokenization": "lazy"
}
}
}

Скрипт мониторинга производительности:


// scripts/monitor-cursor-performance.js

const os = require('os');
const fs = require('fs');
const path = require('path');

class CursorPerformanceMonitor {
constructor() {
this.metrics = {
cpu: [],
memory: [],
diskIO: [],
networkLatency: []
};
this.startTime = Date.now();
}

collectMetrics() {
const cpuUsage = process.cpuUsage();
const memUsage = process.memoryUsage();

this.metrics.cpu.push({
timestamp: Date.now(),
user: cpuUsage.user,
system: cpuUsage.system
});

this.metrics.memory.push({
timestamp: Date.now(),
heapUsed: memUsage.heapUsed,
heapTotal: memUsage.heapTotal,
external: memUsage.external,
rss: memUsage.rss
});

// System memory
const totalMem = os.totalmem();
const freeMem = os.freemem();
const usedMem = totalMem - freeMem;

console.log('\n' + '='.repeat(60));
console.log('CURSOR PERFORMANCE METRICS');
console.log('='.repeat(60));

console.log('\nCPU Usage:');
console.log(` User: ${(cpuUsage.user / 1000000).toFixed(2)}s`);
console.log(` System: ${(cpuUsage.system / 1000000).toFixed(2)}s`);
console.log(` Load Average: ${os.loadavg().map(l => l.toFixed(2)).join(', ')}`);

console.log('\nMemory Usage:');
console.log(` Heap Used: ${this.formatBytes(memUsage.heapUsed)}`);
console.log(` Heap Total: ${this.formatBytes(memUsage.heapTotal)}`);
console.log(` RSS: ${this.formatBytes(memUsage.rss)}`);
console.log(` System Used: ${this.formatBytes(usedMem)} / ${this.formatBytes(totalMem)} (${((usedMem / totalMem) * 100).toFixed(1)}%)`);

console.log('\nDisk Usage:');
const cursorDir = path.join(os.homedir(), '.cursor');
if (fs.existsSync(cursorDir)) {
const size = this.getDirectorySize(cursorDir);
console.log(` Cursor Data: ${this.formatBytes(size)}`);
}

console.log('\nRecommendations:');
if (memUsage.heapUsed / memUsage.heapTotal > 0.9) {
console.log(' High memory usage detected');
console.log(' Consider closing unused files or restarting Cursor');
}

if (usedMem / totalMem > 0.9) {
console.log(' System memory usage is high');
console.log(' Close other applications');
}

const uptime = (Date.now() - this.startTime) / 1000 / 60;
if (uptime > 480) { // 8 hours
console.log(` Cursor has been running for ${uptime.toFixed(0)} minutes`);
console.log(' Consider restarting for optimal performance');
}
}

formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}

getDirectorySize(directory) {
let size = 0;
const files = fs.readdirSync(directory);

for (const file of files) {
const filePath = path.join(directory, file);
const stats = fs.statSync(filePath);

if (stats.isDirectory()) {
size += this.getDirectorySize(filePath);
} else {
size += stats.size;
}
}

return size;
}

startMonitoring(intervalMs = 60000) {
console.log('Starting Cursor performance monitoring...');
console.log(`Interval: ${intervalMs}ms\n`);

this.collectMetrics();
this.interval = setInterval(() => this.collectMetrics(), intervalMs);
}

stopMonitoring() {
if (this.interval) {
clearInterval(this.interval);
}
}

exportMetrics(filename = 'cursor-metrics.json') {
const report = {
startTime: new Date(this.startTime).toISOString(),
endTime: new Date().toISOString(),
duration: Date.now() - this.startTime,
metrics: this.metrics,
system: {
platform: os.platform(),
arch: os.arch(),
cpus: os.cpus().length,
totalMemory: os.totalmem(),
nodeVersion: process.version
}
};

fs.writeFileSync(filename, JSON.stringify(report, null, 2));
console.log(`\nMetrics exported to ${filename}`);
}
}

// Usage
const monitor = new CursorPerformanceMonitor();
monitor.startMonitoring(60000); // Every minute

// Stop after 1 hour and export
setTimeout(() => {
monitor.stopMonitoring();
monitor.exportMetrics();
process.exit(0);
}, 3600000);

19.2. Управление кэшем

Оптимизация кэширования


// utils/cache-manager.ts

import { createClient } from 'redis';
import { LRUCache } from 'lru-cache';

export class CacheManager {
private redis: any;
private localCache: LRUCache<string, any>;
private useRedis: boolean;

constructor(useRedis = true) {
this.useRedis = useRedis;

// Local in-memory cache (L1)
this.localCache = new LRUCache({
max: 500, // Maximum items
ttl: 1000 * 60 * 5, // 5 minutes
maxSize: 50 * 1024 * 1024, // 50MB
sizeCalculation: (value) => {
return JSON.stringify(value).length;
}
});

// Redis cache (L2)
if (useRedis) {
this.initRedis();
}
}

async initRedis() {
this.redis = createClient({
url: process.env.REDIS_URL || 'redis://localhost:6379',
socket: {
reconnectStrategy: (retries) => {
if (retries > 10) {
return new Error('Max reconnection attempts reached');
}
return Math.min(retries * 100, 3000);
}
}
});

this.redis.on('error', (err) => {
console.error('Redis error:', err);
});

await this.redis.connect();
}

/**
* Get from cache (L1 → L2 fallback)
*/
async get<T>(key: string): Promise<T | null> {
// Try local cache first (L1)
const localValue = this.localCache.get(key);
if (localValue !== undefined) {
return localValue as T;
}

// Try Redis (L2)
if (this.useRedis && this.redis) {
try {
const redisValue = await this.redis.get(key);
if (redisValue) {
const parsed = JSON.parse(redisValue);
// Populate local cache
this.localCache.set(key, parsed);
return parsed as T;
}
} catch (error) {
console.error('Redis get error:', error);
}
}

return null;
}

/**
* Set in cache (L1 + L2)
*/
async set(key: string, value: any, ttl?: number): Promise<void> {
// Set in local cache
this.localCache.set(key, value, { ttl: ttl ? ttl * 1000 : undefined });

// Set in Redis
if (this.useRedis && this.redis) {
try {
const serialized = JSON.stringify(value);
if (ttl) {
await this.redis.setEx(key, ttl, serialized);
} else {
await this.redis.set(key, serialized);
}
} catch (error) {
console.error('Redis set error:', error);
}
}
}

/**
* Delete from cache
*/
async delete(key: string): Promise<void> {
this.localCache.delete(key);

if (this.useRedis && this.redis) {
try {
await this.redis.del(key);
} catch (error) {
console.error('Redis delete error:', error);
}
}
}

/**
* Clear all cache
*/
async clear(): Promise<void> {
this.localCache.clear();

if (this.useRedis && this.redis) {
try {
await this.redis.flushAll();
} catch (error) {
console.error('Redis clear error:', error);
}
}
}

/**
* Get cache statistics
*/
getStats() {
return {
local: {
size: this.localCache.size,
calculatedSize: this.localCache.calculatedSize,
max: this.localCache.max
},
redis: this.useRedis ? 'connected' : 'disabled'
};
}

/**
* Warm up cache with frequently accessed data
*/
async warmUp(dataLoader: () => Promise<Record<string, any>>) {
console.log('Warming up cache...');

const data = await dataLoader();

for (const [key, value] of Object.entries(data)) {
await this.set(key, value, 3600); // 1 hour TTL
}

console.log(`Cache warmed up with ${Object.keys(data).length} items`);
}
}

// Singleton export
export const cache = new CacheManager();

// Usage with decorator pattern
export function Cacheable(ttl: number = 300) {
return function (
target: any,
propertyKey: string,
descriptor: PropertyDescriptor
) {
const originalMethod = descriptor.value;

descriptor.value = async function (...args: any[]) {
const cacheKey = `${target.constructor.name}:${propertyKey}:${JSON.stringify(args)}`;

// Try cache first
const cached = await cache.get(cacheKey);
if (cached !== null) {
return cached;
}

// Execute original method
const result = await originalMethod.apply(this, args);

// Cache result
await cache.set(cacheKey, result, ttl);

return result;
};

return descriptor;
};
}

// Example usage:
class UserService {
@Cacheable(600) // Cache for 10 minutes
async getUserById(id: string) {
return await db.users.findById(id);
}

@Cacheable(3600) // Cache for 1 hour
async getPublicProfile(username: string) {
return await db.users.findByUsername(username);
}
}

19.3. Работа с большими проектами

Оптимизация для монорепозиториев и крупных кодовых баз


You: Как оптимизировать Cursor для работы с большим монорепозиторием?

AI:

Конфигурация для больших проектов:


// .cursor/large-project-config.json

{
"workspace": {
"type": "monorepo",
"structure": "packages",
"autoDetect": true
},
"indexing": {
"strategy": "selective",
"priorityPaths": [
"packages/core/src",
"packages/shared/src",
"apps/main/src"
],
"excludePaths": [
"**/node_modules",
"**/dist",
"**/build",
"**/.next",
"**/coverage",
"**/*.test.ts",
"**/*.spec.ts",
"**/e2e"
],
"maxConcurrentIndexes": 4,
"indexOnDemand": true,
"incrementalOnly": true,
"fileWatching": {
"debounceMs": 2000,
"maxFiles": 50000
}
},
"ai": {
"contextWindow": {
"maxFiles": 10,
"maxTokens": 16000,
"smartSelection": true,
"relevanceThreshold": 0.7
},
"caching": {
"enabled": true,
"strategy": "aggressive",
"ttl": 7200,
"maxSize": "500MB"
}
},
"performance": {
"lazyLoading": true,
"virtualScrolling": true,
"syntaxHighlighting": "onDemand",
"linting": {
"runOnSave": false,
"runOnType": false,
"runManually": true
}
}
}

Скрипт для анализа и оптимизации монорепозитория:


// scripts/optimize-monorepo.ts

import * as fs from 'fs';
import * as path from 'path';
import { glob } from 'glob';
import * as crypto from 'crypto';

interface ProjectStats {
totalFiles: number;
totalSize: number;
filesByType: Record<string, number>;
sizeByType: Record<string, number>;
largestFiles: Array<{ path: string; size: number }>;
recommendations: string[];
}

class MonorepoOptimizer {
private rootPath: string;
private stats: ProjectStats;

constructor(rootPath: string) {
this.rootPath = rootPath;
this.stats = {
totalFiles: 0,
totalSize: 0,
filesByType: {},
sizeByType: {},
largestFiles: [],
recommendations: []
};
}

async analyze(): Promise<ProjectStats> {
console.log('Analyzing monorepo structure...\n');

// Find all files
const files = await glob('**/*', {
cwd: this.rootPath,
ignore: [
'**/node_modules/**',
'**/dist/**',
'**/build/**',
'**/.git/**',
'**/coverage/**'
],
nodir: true,
absolute: true
});

console.log(`Found ${files.length} files\n`);

// Analyze each file
for (const file of files) {
const stats = fs.statSync(file);
const ext = path.extname(file).toLowerCase();

this.stats.totalFiles++;
this.stats.totalSize += stats.size;

// Count by type
this.stats.filesByType[ext] = (this.stats.filesByType[ext] || 0) + 1;
this.stats.sizeByType[ext] = (this.stats.sizeByType[ext] || 0) + stats.size;

// Track large files
if (stats.size > 1024 * 1024) { // > 1MB
this.stats.largestFiles.push({
path: path.relative(this.rootPath, file),
size: stats.size
});
}
}

// Sort largest files
this.stats.largestFiles.sort((a, b) => b.size - a.size);
this.stats.largestFiles = this.stats.largestFiles.slice(0, 20);

// Generate recommendations
this.generateRecommendations();

return this.stats;
}

private generateRecommendations(): void {
const recommendations: string[] = [];

// Check total size
const totalSizeMB = this.stats.totalSize / (1024 * 1024);
if (totalSizeMB > 5000) {
recommendations.push(
`Large repository size: ${totalSizeMB.toFixed(0)}MB`
);
recommendations.push(
' Consider splitting into multiple repositories or using Git LFS for large files'
);
}

// Check file count
if (this.stats.totalFiles > 50000) {
recommendations.push(
`High file count: ${this.stats.totalFiles.toLocaleString()}`
);
recommendations.push(
' Enable selective indexing in Cursor to improve performance'
);
}

// Check for large individual files
if (this.stats.largestFiles.length > 0) {
const largest = this.stats.largestFiles[0];
const sizeMB = largest.size / (1024 * 1024);

if (sizeMB > 10) {
recommendations.push(
`Very large file detected: ${largest.path} (${sizeMB.toFixed(1)}MB)`
);
recommendations.push(
' Consider excluding from Cursor indexing or using Git LFS'
);
}
}

// Check for generated files
const generatedExtensions = ['.min.js', '.map', '.d.ts'];
const generatedCount = Object.entries(this.stats.filesByType)
.filter(([ext]) => generatedExtensions.some(e => ext.includes(e)))
.reduce((sum, [, count]) => sum + count, 0);

if (generatedCount > 1000) {
recommendations.push(
`Many generated files: ${generatedCount}`
);
recommendations.push(
' Add generated files to .cursorignore to improve performance'
);
}

// Check for test files
const testFiles = Object.entries(this.stats.filesByType)
.filter(([ext]) => ext.includes('.test.') || ext.includes('.spec.'))
.reduce((sum, [, count]) => sum + count, 0);

if (testFiles > this.stats.totalFiles * 0.3) {
recommendations.push(
`High proportion of test files: ${testFiles} (${((testFiles / this.stats.totalFiles) * 100).toFixed(1)}%)`
);
recommendations.push(
' Consider excluding test files from AI context unless actively working on tests'
);
}

this.stats.recommendations = recommendations;
}

printReport(): void {
console.log('\n' + '='.repeat(80));
console.log('MONOREPO ANALYSIS REPORT');
console.log('='.repeat(80));

console.log('\nOVERVIEW:');
console.log(` Total Files: ${this.stats.totalFiles.toLocaleString()}`);
console.log(` Total Size: ${this.formatBytes(this.stats.totalSize)}`);

console.log('\nFILE TYPES:');
const sortedTypes = Object.entries(this.stats.filesByType)
.sort((a, b) => b[1] - a[1])
.slice(0, 15);

for (const [ext, count] of sortedTypes) {
const size = this.stats.sizeByType[ext] || 0;
console.log(
` ${ext || '(no extension)'.padEnd(15)}: ${count.toString().padStart(6)} files ${this.formatBytes(size).padStart(10)}`
);
}

if (this.stats.largestFiles.length > 0) {
console.log('\nLARGEST FILES:');
for (const file of this.stats.largestFiles.slice(0, 10)) {
console.log(` ${this.formatBytes(file.size).padStart(10)} ${file.path}`);
}
}

if (this.stats.recommendations.length > 0) {
console.log('\nRECOMMENDATIONS:');
this.stats.recommendations.forEach(rec => console.log(rec));
}

console.log('\n' + '='.repeat(80));
}

async generateCursorIgnore(): Promise<void> {
const ignorePatterns: string[] = [
'# Generated by monorepo optimizer',
'',
'# Dependencies',
'node_modules/',
'bower_components/',
'',
'# Build outputs',
'dist/',
'build/',
'out/',
'.next/',
'',
'# Test coverage',
'coverage/',
'.nyc_output/',
'',
'# Large files (> 5MB)',
];

// Add large files to ignore
for (const file of this.stats.largestFiles) {
if (file.size > 5 * 1024 * 1024) {
ignorePatterns.push(file.path);
}
}

// Add generated files
ignorePatterns.push('', '# Generated files');
ignorePatterns.push('*.min.js');
ignorePatterns.push('*.map');
ignorePatterns.push('*.d.ts');

const cursorIgnorePath = path.join(this.rootPath, '.cursorignore');
fs.writeFileSync(cursorIgnorePath, ignorePatterns.join('\n'));

console.log(`\n✓ Generated .cursorignore with ${ignorePatterns.length} patterns`);
}

async createWorkspaceConfig(): Promise<void> {
// Detect workspace structure
const packageJsons = await glob('**/package.json', {
cwd: this.rootPath,
ignore: ['**/node_modules/**']
});

const workspaces: string[] = [];

for (const pkgPath of packageJsons) {
const fullPath = path.join(this.rootPath, pkgPath);
const pkg = JSON.parse(fs.readFileSync(fullPath, 'utf-8'));

if (pkg.name && !pkg.private) {
workspaces.push(path.dirname(pkgPath));
}
}

const config = {
"workspace.multiRoot": true,
"workspace.folders": workspaces.map(ws => ({
path: ws,
name: path.basename(ws)
})),
"cursor.indexing.priorityPaths": workspaces.map(ws => `${ws}/src`),
"cursor.ai.maxContextFiles": 15,
"cursor.performance.lazyLoading": true
};

const configPath = path.join(this.rootPath, '.cursor', 'workspace.json');
fs.mkdirSync(path.dirname(configPath), { recursive: true });
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));

console.log(`✓ Generated workspace config for ${workspaces.length} packages`);
}

private formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
}

// CLI usage
async function main() {
const rootPath = process.argv[2] || process.cwd();

console.log(`Analyzing monorepo at: ${rootPath}\n`);

const optimizer = new MonorepoOptimizer(rootPath);

await optimizer.analyze();
optimizer.printReport();

// Generate optimization files
const shouldGenerate = process.argv.includes('--generate');
if (shouldGenerate) {
await optimizer.generateCursorIgnore();
await optimizer.createWorkspaceConfig();
} else {
console.log('\nRun with --generate flag to create optimization files');
}
}

main().catch(console.error);

Workspace разделение для монорепозитория:


// .cursor/workspace-split.json

{
"workspaces": [
{
"name": "Core Packages",
"path": "packages/core",
"indexing": {
"priority": "high",
"enabled": true
},
"ai": {
"contextPriority": 1.0
}
},
{
"name": "Shared Libraries",
"path": "packages/shared",
"indexing": {
"priority": "medium",
"enabled": true
},
"ai": {
"contextPriority": 0.8
}
},
{
"name": "Applications",
"path": "apps/*",
"indexing": {
"priority": "medium",
"enabled": true,
"onDemand": true
},
"ai": {
"contextPriority": 0.6
}
},
{
"name": "Tests",
"path": "**/*.test.ts",
"indexing": {
"priority": "low",
"enabled": false
},
"ai": {
"contextPriority": 0.2,
"includeOnlyWhenActive": true
}
}
],
"contextStrategy": {
"mode": "smart",
"maxWorkspaces": 3,
"autoSwitch": true,
"switchTriggers": [
"fileOpen",
"search",
"aiRequest"
]
}
}

19.4. Настройка производительности AI

Тонкая настройка AI для максимальной эффективности


You: Как настроить AI для лучшей производительности и качества ответов?

AI:

Продвинутая конфигурация AI:


// .cursor/ai-performance.json

{
"ai": {
"models": {
"chat": {
"primary": "gpt-4",
"fallback": "gpt-3.5-turbo",
"temperature": 0.3,
"maxTokens": 4096,
"topP": 0.9,
"frequencyPenalty": 0.0,
"presencePenalty": 0.0
},
"autocomplete": {
"model": "cursor-small",
"temperature": 0.2,
"maxTokens": 256,
"latencyTarget": 100
},
"codeGeneration": {
"model": "gpt-4-turbo",
"temperature": 0.4,
"maxTokens": 8192,
"streaming": true
}
},
"context": {
"strategy": "adaptive",
"maxFiles": 20,
"maxTokensPerFile": 2000,
"relevanceScoring": {
"enabled": true,
"algorithm": "semantic",
"threshold": 0.6,
"decay": 0.85
},
"fileSelection": {
"prioritize": [
"currentFile",
"recentlyEdited",
"importedFiles",
"similarFiles"
],
"weights": {
"currentFile": 1.0,
"recentlyEdited": 0.8,
"importedFiles": 0.7,
"similarFiles": 0.6
}
}
},
"caching": {
"enabled": true,
"strategy": "semantic",
"ttl": 3600,
"maxSize": "1GB",
"compression": true,
"layers": {
"embedding": {
"enabled": true,
"ttl": 86400
},
"response": {
"enabled": true,
"ttl": 3600
},
"context": {
"enabled": true,
"ttl": 1800
}
}
},
"optimization": {
"batchRequests": true,
"requestCoalescing": true,
"parallelProcessing": true,
"maxConcurrent": 3,
"requestQueue": {
"maxSize": 10,
"prioritization": "fifo",
"timeout": 30000
}
},
"smartFeatures": {
"contextWindow": {
"dynamic": true,
"expandOnDemand": true,
"shrinkOnIdle": true
},
"prefetching": {
"enabled": true,
"predictNextRequest": true,
"prefetchDepth": 2
},
"adaptiveLearning": {
"enabled": true,
"learnFromCorrections": true,
"personalizeResponses": true
}
}
}
}

Класс для управления AI производительностью:


// utils/ai-performance-manager.ts

import EventEmitter from 'events';

interface AIRequest {
id: string;
type: 'chat' | 'autocomplete' | 'codeGen';
prompt: string;
context: any[];
timestamp: number;
priority: number;
}

interface AIResponse {
id: string;
content: string;
tokensUsed: number;
latency: number;
fromCache: boolean;
}

interface PerformanceMetrics {
requests: number;
avgLatency: number;
cacheHitRate: number;
tokensUsed: number;
errors: number;
}

export class AIPerformanceManager extends EventEmitter {
private requestQueue: AIRequest[] = [];
private activeRequests: Map<string, AIRequest> = new Map();
private responseCache: Map<string, any> = new Map();
private metrics: PerformanceMetrics;
private maxConcurrent: number = 3;

constructor() {
super();
this.metrics = {
requests: 0,
avgLatency: 0,
cacheHitRate: 0,
tokensUsed: 0,
errors: 0
};
}

/**
* Умная очередь запросов с приоритизацией
*/
async submitRequest(request: Omit<AIRequest, 'id' | 'timestamp'>): Promise<AIResponse> {
const fullRequest: AIRequest = {
...request,
id: this.generateRequestId(),
timestamp: Date.now()
};

// Check cache first
const cacheKey = this.getCacheKey(fullRequest);
const cached = this.responseCache.get(cacheKey);

if (cached && this.isCacheValid(cached)) {
this.metrics.cacheHitRate =
(this.metrics.cacheHitRate * this.metrics.requests + 1) / (this.metrics.requests + 1);
this.metrics.requests++;

return {
id: fullRequest.id,
content: cached.content,
tokensUsed: 0,
latency: 0,
fromCache: true
};
}

// Add to queue if at capacity
if (this.activeRequests.size >= this.maxConcurrent) {
await this.queueRequest(fullRequest);
}

return await this.processRequest(fullRequest);
}

private async queueRequest(request: AIRequest): Promise<void> {
// Insert based on priority
const insertIndex = this.requestQueue.findIndex(
r => r.priority < request.priority
);

if (insertIndex === -1) {
this.requestQueue.push(request);
} else {
this.requestQueue.splice(insertIndex, 0, request);
}

// Wait for slot to become available
return new Promise((resolve) => {
this.once('slot-available', () => resolve());
});
}

private async processRequest(request: AIRequest): Promise<AIResponse> {
this.activeRequests.set(request.id, request);
const startTime = Date.now();

try {
// Optimize context
const optimizedContext = await this.optimizeContext(request.context);

// Make AI request
const response = await this.callAI(request, optimizedContext);

// Update metrics
const latency = Date.now() - startTime;
this.updateMetrics(response, latency);

// Cache response
const cacheKey = this.getCacheKey(request);
this.responseCache.set(cacheKey, {
...response,
cachedAt: Date.now()
});

return {
...response,
latency,
fromCache: false
};

} catch (error) {
this.metrics.errors++;
throw error;

} finally {
this.activeRequests.delete(request.id);

// Process next in queue
if (this.requestQueue.length > 0) {
const next = this.requestQueue.shift()!;
this.emit('slot-available');
this.processRequest(next);
}
}
}

/**
* Оптимизация контекста для уменьшения токенов
*/
private async optimizeContext(context: any[]): Promise<any[]> {
if (!context || context.length === 0) return [];

// Sort by relevance
const scoredContext = context.map(item => ({
item,
score: this.calculateRelevanceScore(item)
}));

scoredContext.sort((a, b) => b.score - a.score);

// Token budget
const maxTokens = 8000;
let currentTokens = 0;
const optimized: any[] = [];

for (const { item, score } of scoredContext) {
const itemTokens = this.estimateTokens(item);

if (currentTokens + itemTokens <= maxTokens) {
optimized.push(item);
currentTokens += itemTokens;
} else if (score > 0.8) {
// High relevance - try to compress
const compressed = this.compressContext(item);
const compressedTokens = this.estimateTokens(compressed);

if (currentTokens + compressedTokens <= maxTokens) {
optimized.push(compressed);
currentTokens += compressedTokens;
}
}
}

return optimized;
}

private calculateRelevanceScore(context: any): number {
// Simplified relevance scoring
let score = 0;

// Recently edited files get higher score
if (context.recentlyEdited) score += 0.3;

// Current file gets highest score
if (context.isCurrent) score += 0.5;

// Imported files get medium score
if (context.isImported) score += 0.2;

// Decay based on age
const ageMinutes = (Date.now() - context.lastModified) / 60000;
const decay = Math.exp(-ageMinutes / 30); // 30 min half-life
score *= decay;

return Math.min(score, 1.0);
}

private estimateTokens(content: any): number {
// Rough estimation: ~4 chars per token
const text = typeof content === 'string'
? content
: JSON.stringify(content);

return Math.ceil(text.length / 4);
}

private compressContext(context: any): any {
// Remove comments
if (typeof context === 'string') {
return context
.replace(/\/\*[\s\S]*?\*\//g, '')
.replace(/\/\/.*/g, '')
.replace(/\n\s*\n/g, '\n');
}
return context;
}

private async callAI(request: AIRequest, context: any[]): Promise<any> {
// Actual AI API call implementation
// This is a placeholder
return {
id: request.id,
content: 'AI response',
tokensUsed: 100
};
}

private updateMetrics(response: any, latency: number): void {
this.metrics.requests++;
this.metrics.avgLatency =
(this.metrics.avgLatency * (this.metrics.requests - 1) + latency) / this.metrics.requests;
this.metrics.tokensUsed += response.tokensUsed || 0;
}

private getCacheKey(request: AIRequest): string {
const crypto = require('crypto');
const data = JSON.stringify({
type: request.type,
prompt: request.prompt,
context: request.context
});
return crypto.createHash('sha256').update(data).digest('hex');
}

private isCacheValid(cached: any): boolean {
const ttl = 3600000; // 1 hour
return Date.now() - cached.cachedAt < ttl;
}

private generateRequestId(): string {
return `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}

/**
* Получить метрики производительности
*/
getMetrics(): PerformanceMetrics {
return { ...this.metrics };
}

/**
* Очистить кэш
*/
clearCache(): void {
this.responseCache.clear();
}

/**
* Установить максимум параллельных запросов
*/
setMaxConcurrent(max: number): void {
this.maxConcurrent = max;
}
}

// Singleton export
export const aiPerformance = new AIPerformanceManager();

Адаптивная настройка на основе метрик:


// utils/ai-auto-tuner.ts

import { aiPerformance } from './ai-performance-manager';

export class AIAutoTuner {
private checkInterval: NodeJS.Timer | null = null;
private config: any;

constructor(initialConfig: any) {
this.config = initialConfig;
}

/**
* Автоматическая оптимизация на основе метрик
*/
startAutoTuning(intervalMs: number = 60000): void {
this.checkInterval = setInterval(() => {
this.tune();
}, intervalMs);

console.log('AI auto-tuning started');
}

stopAutoTuning(): void {
if (this.checkInterval) {
clearInterval(this.checkInterval);
this.checkInterval = null;
}
}

private tune(): void {
const metrics = aiPerformance.getMetrics();

console.log('\n--- AI Performance Tuning ---');
console.log('Current metrics:', metrics);

// Tune based on latency
if (metrics.avgLatency > 5000) {
console.log('High latency detected, reducing context size');
this.config.ai.context.maxFiles = Math.max(
5,
this.config.ai.context.maxFiles - 2
);
} else if (metrics.avgLatency < 1000) {
console.log('✓ Low latency, can increase context');
this.config.ai.context.maxFiles = Math.min(
30,
this.config.ai.context.maxFiles + 1
);
}

// Tune based on cache hit rate
if (metrics.cacheHitRate < 0.3) {
console.log('Low cache hit rate, increasing TTL');
this.config.ai.caching.ttl = Math.min(
7200,
this.config.ai.caching.ttl * 1.2
);
}

// Tune concurrent requests based on errors
if (metrics.errors / metrics.requests > 0.1) {
console.log(' High error rate, reducing concurrency');
aiPerformance.setMaxConcurrent(
Math.max(1, this.config.ai.optimization.maxConcurrent - 1)
);
}

console.log('Updated config:', this.config.ai);
}

getConfig(): any {
return this.config;
}
}

19.5. Профилирование и диагностика

Инструменты для диагностики проблем производительности


You: Создай систему профилирования для отладки проблем с производительностью

AI:


// utils/cursor-profiler.ts

import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';

interface ProfilerEvent {
timestamp: number;
type: 'start' | 'end' | 'mark';
name: string;
duration?: number;
metadata?: any;
}

interface ProfilerReport {
summary: {
totalDuration: number;
eventCount: number;
topBottlenecks: Array<{ name: string; duration: number; percentage: number }>;
};
events: ProfilerEvent[];
systemInfo: {
platform: string;
cpus: number;
memory: number;
cursorVersion: string;
};
}

export class CursorProfiler {
private events: ProfilerEvent[] = [];
private activeTimers: Map<string, number> = new Map();
private startTime: number;

constructor() {
this.startTime = Date.now();
}

/**
* Начать профилирование операции
*/
start(name: string, metadata?: any): void {
if (this.activeTimers.has(name)) {
console.warn(`Timer "${name}" already started`);
return;
}

this.activeTimers.set(name, performance.now());
this.events.push({
timestamp: Date.now(),
type: 'start',
name,
metadata
});
}

/**
* Завершить профилирование операции
*/
end(name: string, metadata?: any): number {
const startTime = this.activeTimers.get(name);

if (startTime === undefined) {
console.warn(`Timer "${name}" was not started`);
return 0;
}

const duration = performance.now() - startTime;
this.activeTimers.delete(name);

this.events.push({
timestamp: Date.now(),
type: 'end',
name,
duration,
metadata
});

return duration;
}

/**
* Пометить событие
*/
mark(name: string, metadata?: any): void {
this.events.push({
timestamp: Date.now(),
type: 'mark',
name,
metadata
});
}

/**
* Профилирование с декоратором
*/
static profile(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;

descriptor.value = async function (...args: any[]) {
const profiler = new CursorProfiler();
profiler.start(propertyKey);

try {
const result = await originalMethod.apply(this, args);
return result;
} finally {
const duration = profiler.end(propertyKey);
console.log(`[Profile] ${propertyKey}: ${duration.toFixed(2)}ms`);
}
};

return descriptor;
}

/**
* Профилирование функции
*/
static measureSync<T>(name: string, fn: () => T): T {
const start = performance.now();
try {
return fn();
} finally {
const duration = performance.now() - start;
console.log(`[Profile] ${name}: ${duration.toFixed(2)}ms`);
}
}

static async measureAsync<T>(name: string, fn: () => Promise<T>): Promise<T> {
const start = performance.now();
try {
return await fn();
} finally {
const duration = performance.now() - start;
console.log(`[Profile] ${name}: ${duration.toFixed(2)}ms`);
}
}

/**
* Сгенерировать отчет
*/
generateReport(): ProfilerReport {
const totalDuration = Date.now() - this.startTime;

// Calculate durations for each operation
const durations = new Map<string, number>();

for (const event of this.events) {
if (event.type === 'end' && event.duration !== undefined) {
const existing = durations.get(event.name) || 0;
durations.set(event.name, existing + event.duration);
}
}

// Find top bottlenecks
const topBottlenecks = Array.from(durations.entries())
.map(([name, duration]) => ({
name,
duration,
percentage: (duration / totalDuration) * 100
}))
.sort((a, b) => b.duration - a.duration)
.slice(0, 10);

return {
summary: {
totalDuration,
eventCount: this.events.length,
topBottlenecks
},
events: this.events,
systemInfo: {
platform: os.platform(),
cpus: os.cpus().length,
memory: os.totalmem(),
cursorVersion: this.getCursorVersion()
}
};
}

/**
* Вывести отчет
*/
printReport(): void {
const report = this.generateReport();

console.log('\n' + '='.repeat(80));
console.log('CURSOR PERFORMANCE PROFILE');
console.log('='.repeat(80));

console.log('\nSUMMARY:');
console.log(` Total Duration: ${report.summary.totalDuration}ms`);
console.log(` Events Recorded: ${report.summary.eventCount}`);

console.log('\nSYSTEM INFO:');
console.log(` Platform: ${report.systemInfo.platform}`);
console.log(` CPUs: ${report.systemInfo.cpus}`);
console.log(` Memory: ${this.formatBytes(report.systemInfo.memory)}`);
console.log(` Cursor: ${report.systemInfo.cursorVersion}`);

console.log('\nTOP BOTTLENECKS:');
for (const bottleneck of report.summary.topBottlenecks) {
const bar = ''.repeat(Math.floor(bottleneck.percentage / 2));
console.log(
` ${bottleneck.name.padEnd(30)} ${bottleneck.duration.toFixed(2)}ms`.padEnd(50) +
`${bottleneck.percentage.toFixed(1)}% ${bar}`
);
}

console.log('\n' + '='.repeat(80));
}

/**
* Экспорт в JSON
*/
exportJSON(filename: string): void {
const report = this.generateReport();
fs.writeFileSync(filename, JSON.stringify(report, null, 2));
console.log(`Profile exported to ${filename}`);
}

/**
* Экспорт в Chrome DevTools формат
*/
exportChromeTrace(filename: string): void {
const traceEvents = this.events.map(event => ({
name: event.name,
cat: 'cursor',
ph: event.type === 'start' ? 'B' : event.type === 'end' ? 'E' : 'I',
ts: event.timestamp * 1000, // microseconds
pid: 1,
tid: 1,
args: event.metadata || {}
}));

fs.writeFileSync(filename, JSON.stringify(traceEvents, null, 2));
console.log(`Chrome trace exported to ${filename}`);
console.log('Open in Chrome DevTools: chrome://tracing');
}

private getCursorVersion(): string {
try {
// Try to read from package.json or version file
return 'unknown';
} catch {
return 'unknown';
}
}

private formatBytes(bytes: number): string {
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 B';
const i = Math.floor(Math.log(bytes) / Math.log(1024));
return Math.round(bytes / Math.pow(1024, i) * 100) / 100 + ' ' + sizes[i];
}
}

// Usage examples:
const profiler = new CursorProfiler();

profiler.start('indexing');
// ... indexing operation
profiler.end('indexing');

profiler.start('ai-request');
// ... AI request
profiler.end('ai-request');

profiler.printReport();
profiler.exportJSON('profile.json');
profiler.exportChromeTrace('trace.json');

Диагностический скрипт для типичных проблем:


// scripts/diagnose-issues.ts

import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import { exec } from 'child_process';
import { promisify } from 'util';

const execAsync = promisify(exec);

interface DiagnosticResult {
issue: string;
severity: 'critical' | 'warning' | 'info';
description: string;
recommendation: string;
}

class CursorDiagnostics {
private results: DiagnosticResult[] = [];

async runAll(): Promise<DiagnosticResult[]> {
console.log('Running Cursor diagnostics...\n');

await this.checkSystemResources();
await this.checkDiskSpace();
await this.checkCursorInstallation();
await this.checkProjectSize();
await this.checkIndexingIssues();
await this.checkNetworkConnectivity();
await this.checkExtensions();

return this.results;
}

private async checkSystemResources(): Promise<void> {
const freeMem = os.freemem();
const totalMem = os.totalmem();
const usedMem = totalMem - freeMem;
const memPercentage = (usedMem / totalMem) * 100;

if (memPercentage > 90) {
this.addResult({
issue: 'High Memory Usage',
severity: 'critical',
description: `System memory usage at ${memPercentage.toFixed(1)}%`,
recommendation: 'Close other applications or upgrade RAM'
});
} else if (memPercentage > 75) {
this.addResult({
issue: 'Elevated Memory Usage',
severity: 'warning',
description: `System memory usage at ${memPercentage.toFixed(1)}%`,
recommendation: 'Consider closing some applications'
});
}

// Check CPU load
const loadAvg = os.loadavg()[0];
const cpuCount = os.cpus().length;
const loadPercentage = (loadAvg / cpuCount) * 100;

if (loadPercentage > 80) {
this.addResult({
issue: 'High CPU Load',
severity: 'warning',
description: `CPU load at ${loadPercentage.toFixed(1)}%`,
recommendation: 'Check for background processes consuming CPU'
});
}
}

private async checkDiskSpace(): Promise<void> {
try {
const cursorPath = path.join(os.homedir(), '.cursor');

if (fs.existsSync(cursorPath)) {
const stats = fs.statSync(cursorPath);
const size = this.getDirectorySize(cursorPath);
const sizeGB = size / (1024 ** 3);

if (sizeGB > 10) {
this.addResult({
issue: 'Large Cursor Directory',
severity: 'warning',
description: `Cursor data directory is ${sizeGB.toFixed(2)}GB`,
recommendation: 'Consider clearing cache: rm -rf ~/.cursor/cache'
});
}
}
} catch (error) {
// Ignore errors
}
}

private async checkCursorInstallation(): Promise<void> {
const cursorPath = path.join(os.homedir(), '.cursor');

if (!fs.existsSync(cursorPath)) {
this.addResult({
issue: 'Cursor Not Found',
severity: 'critical',
description: 'Cursor installation directory not found',
recommendation: 'Reinstall Cursor'
});
return;
}

// Check for corrupt files
const criticalPaths = [
'config',
'extensions',
'logs'
];

for (const p of criticalPaths) {
const fullPath = path.join(cursorPath, p);
if (!fs.existsSync(fullPath)) {
this.addResult({
issue: `Missing ${p} directory`,
severity: 'warning',
description: `${p} directory not found`,
recommendation: 'Cursor may need to be reinstalled'
});
}
}
}

private async checkProjectSize(): Promise<void> {
const cwd = process.cwd();

try {
// Count files
const { stdout: fileCount } = await execAsync(
`find . -type f ! -path "*/node_modules/*" ! -path "*/.git/*" | wc -l`,
{ cwd }
);

const count = parseInt(fileCount.trim());

if (count > 50000) {
this.addResult({
issue: 'Very Large Project',
severity: 'warning',
description: `Project has ${count.toLocaleString()} files`,
recommendation: 'Enable selective indexing and use .cursorignore'
});
} else if (count > 20000) {
this.addResult({
issue: 'Large Project',
severity: 'info',
description: `Project has ${count.toLocaleString()} files`,
recommendation: 'Consider using .cursorignore to exclude build artifacts'
});
}
} catch (error) {
// Fall back to manual counting
}
}

private async checkIndexingIssues(): Promise<void> {
const cursorignorePath = path.join(process.cwd(), '.cursorignore');

if (!fs.existsSync(cursorignorePath)) {
this.addResult({
issue: 'Missing .cursorignore',
severity: 'info',
description: 'No .cursorignore file found',
recommendation: 'Create .cursorignore to exclude unnecessary files'
});
}

// Check for large files that should be ignored
try {
const { stdout } = await execAsync(
`find . -type f -size +10M ! -path "*/node_modules/*" ! -path "*/.git/*"`,
{ cwd: process.cwd() }
);

const largeFiles = stdout.trim().split('\n').filter(f => f);

if (largeFiles.length > 0) {
this.addResult({
issue: 'Large Files Not Ignored',
severity: 'warning',
description: `Found ${largeFiles.length} files > 10MB`,
recommendation: `Add these to .cursorignore:\n${largeFiles.slice(0, 5).join('\n')}`
});
}
} catch (error) {
// Ignore
}
}

private async checkNetworkConnectivity(): Promise<void> {
try {
const start = Date.now();
const { stdout } = await execAsync('ping -c 1 api.cursor.sh');
const latency = Date.now() - start;

if (latency > 1000) {
this.addResult({
issue: 'Slow Network Connection',
severity: 'warning',
description: `API latency: ${latency}ms`,
recommendation: 'Check network connection to Cursor servers'
});
}
} catch (error) {
this.addResult({
issue: 'Network Connectivity Issue',
severity: 'critical',
description: 'Cannot reach Cursor API servers',
recommendation: 'Check firewall and network settings'
});
}
}

private async checkExtensions(): Promise<void> {
const extensionsPath = path.join(os.homedir(), '.cursor', 'extensions');

if (fs.existsSync(extensionsPath)) {
const extensions = fs.readdirSync(extensionsPath);

if (extensions.length > 50) {
this.addResult({
issue: 'Many Extensions',
severity: 'info',
description: `${extensions.length} extensions installed`,
recommendation: 'Disable unused extensions to improve performance'
});
}
}
}

private addResult(result: DiagnosticResult): void {
this.results.push(result);
}

private getDirectorySize(directory: string): number {
let size = 0;
const files = fs.readdirSync(directory);

for (const file of files) {
const filePath = path.join(directory, file);
const stats = fs.statSync(filePath);

if (stats.isDirectory()) {
size += this.getDirectorySize(filePath);
} else {
size += stats.size;
}
}

return size;
}

printReport(): void {
console.log('\n' + '='.repeat(80));
console.log('CURSOR DIAGNOSTICS REPORT');
console.log('='.repeat(80));

const critical = this.results.filter(r => r.severity === 'critical');
const warnings = this.results.filter(r => r.severity === 'warning');
const info = this.results.filter(r => r.severity === 'info');

if (critical.length > 0) {
console.log('\n CRITICAL ISSUES:');
critical.forEach(r => this.printResult(r));
}

if (warnings.length > 0) {
console.log('\n WARNINGS:');
warnings.forEach(r => this.printResult(r));
}

if (info.length > 0) {
console.log('\n INFORMATION:');
info.forEach(r => this.printResult(r));
}

if (this.results.length === 0) {
console.log('\n No issues detected!');
}

console.log('\n' + '='.repeat(80));
}

private printResult(result: DiagnosticResult): void {
console.log(`\n ${result.issue}`);
console.log(` ${result.description}`);
console.log(` → ${result.recommendation}`);
}

exportReport(filename: string): void {
fs.writeFileSync(filename, JSON.stringify(this.results, null, 2));
console.log(`\nDiagnostics exported to ${filename}`);
}
}

// CLI usage
async function main() {
const diagnostics = new CursorDiagnostics();
await diagnostics.runAll();
diagnostics.printReport();

if (process.argv.includes('--export')) {
diagnostics.exportReport('cursor-diagnostics.json');
}
}

main().catch(console.error);