Skip to main content

Resource Management

Resource management is an essential part of operating AI Builder applications. It includes monitoring, optimizing, and managing resources such as cloud functions, databases, storage, and CDN. Effective resource management ensures application stability, cost control, and performance improvements.

Overview

Resource management covers the following core areas:

  • 📊 Resource monitoring: real-time monitoring of resource usage
  • 💰 Cost control: optimize resource allocation to control operational costs
  • 🔧 Performance optimization: improve application performance and user experience
  • 🛡️ Security management: ensure resource and data security
  • 📈 Scaling management: adjust resource scale according to business needs

Resource overview

1. Resource types

Compute resources:

  • Cloud function instances
  • Function execution duration
  • Memory usage
  • Concurrent executions

Storage resources:

  • Database storage size
  • File storage size
  • Read/write operation counts
  • Data transfer volume

Network resources:

  • CDN traffic
  • Bandwidth usage
  • Request counts
  • Cache hit rate

2. Resource monitoring dashboard

Console overview:

// Resource monitoring data structure
const resourceMetrics = {
functions: {
totalInvocations: 125000,
totalDuration: 3600000, // milliseconds
averageMemoryUsage: 128, // MB
errorRate: 0.02, // 2%
concurrentExecutions: 50
},
database: {
storageUsed: 2.5, // GB
readOperations: 50000,
writeOperations: 15000,
indexSize: 0.3 // GB
},
storage: {
totalFiles: 1200,
storageUsed: 5.8, // GB
downloadCount: 8500,
uploadCount: 1200
},
cdn: {
totalRequests: 180000,
bandwidth: 45.2, // GB
cacheHitRate: 0.85, // 85%
averageResponseTime: 120 // milliseconds
}
};

Cloud Function management

1. Function monitoring

Performance metric monitoring:

// functions/monitoring/metrics.js
const cloud = require('@cloudbase/node-sdk');

class FunctionMetrics {
constructor() {
this.app = cloud.init({
env: cloud.SYMBOL_CURRENT_ENV
});
this.db = this.app.database();
}

// Record function execution metrics
async recordMetrics(functionName, startTime, endTime, memoryUsed, success) {
const duration = endTime - startTime;

const metrics = {
functionName,
duration,
memoryUsed,
success,
timestamp: new Date(),
date: new Date().toISOString().split('T')[0]
};

try {
await this.db.collection('function_metrics').add(metrics);
} catch (error) {
console.error('Failed to record metrics:', error);
}
}

// Get function performance statistics
async getFunctionStats(functionName, days = 7) {
const startDate = new Date();
startDate.setDate(startDate.getDate() - days);

const result = await this.db.collection('function_metrics')
.aggregate()
.match({
functionName,
timestamp: this.db.command.gte(startDate)
})
.group({
_id: '$date',
totalInvocations: { $sum: 1 },
averageDuration: { $avg: '$duration' },
maxDuration: { $max: '$duration' },
averageMemory: { $avg: '$memoryUsed' },
successRate: {
$avg: {
$cond: ['$success', 1, 0]
}
}
})
.sort({ _id: 1 })
.end();

return result.data;
}
}

// Usage in cloud function
exports.main = async (event, context) => {
const startTime = Date.now();
const metrics = new FunctionMetrics();

try {
// business logic
const result = await processTask(event);

// record success metrics
await metrics.recordMetrics(
context.functionName,
startTime,
Date.now(),
context.memoryLimitInMB,
true
);

return {
code: 0,
data: result
};
} catch (error) {
// record failure metrics
await metrics.recordMetrics(
context.functionName,
startTime,
Date.now(),
context.memoryLimitInMB,
false
);

throw error;
}
};

2. Function optimization

Memory configuration optimization:

// scripts/optimize-functions.js
const cloud = require('@cloudbase/node-sdk');

class FunctionOptimizer {
constructor() {
this.app = cloud.init({
env: process.env.TCB_ENV
});
}

// Analyze function memory usage
async analyzeMemoryUsage(functionName) {
const stats = await this.getFunctionStats(functionName, 30);

const memoryStats = {
average: stats.reduce((sum, day) => sum + day.averageMemory, 0) / stats.length,
max: Math.max(...stats.map(day => day.maxMemory)),
p95: this.calculatePercentile(stats.map(day => day.maxMemory), 95)
};

// Recommend memory configuration
const recommendedMemory = Math.ceil(memoryStats.p95 * 1.2 / 64) * 64; // round up to 64MB

return {
current: await this.getCurrentMemoryConfig(functionName),
recommended: recommendedMemory,
stats: memoryStats
};
}

// Auto-adjust function config
async optimizeFunctionConfig(functionName) {
const analysis = await this.analyzeMemoryUsage(functionName);

if (analysis.recommended !== analysis.current) {
console.log(`Recommend updating ${functionName} memory config:`);
console.log(`Current: ${analysis.current}MB`);
console.log(`Recommended: ${analysis.recommended}MB`);

// You may call API here to update config automatically
// await this.updateFunctionConfig(functionName, {
// memory: analysis.recommended
// });
}
}

calculatePercentile(values, percentile) {
const sorted = values.sort((a, b) => a - b);
const index = Math.ceil(sorted.length * percentile / 100) - 1;
return sorted[index];
}
}

// Schedule optimization
const optimizer = new FunctionOptimizer();
optimizer.optimizeFunctionConfig('getTasks');
optimizer.optimizeFunctionConfig('createTask');

3. Error monitoring and alerting

Error collection system:

// functions/monitoring/errorCollector.js
class ErrorCollector {
constructor() {
this.app = cloud.init({
env: cloud.SYMBOL_CURRENT_ENV
});
this.db = this.app.database();
}

// Record error information
async logError(error, context, event) {
const errorLog = {
functionName: context.functionName,
errorMessage: error.message,
errorStack: error.stack,
requestId: context.requestId,
event: JSON.stringify(event),
timestamp: new Date(),
severity: this.classifyError(error)
};

try {
await this.db.collection('error_logs').add(errorLog);

// Immediately alert for critical errors
if (errorLog.severity === 'critical') {
await this.sendAlert(errorLog);
}
} catch (logError) {
console.error('Failed to record error log:', logError);
}
}

// Classify error severity
classifyError(error) {
if (error.message.includes('timeout')) {
return 'warning';
}
if (error.message.includes('permission') || error.message.includes('auth')) {
return 'critical';
}
if (error.message.includes('database')) {
return 'error';
}
return 'info';
}

// Send alert
async sendAlert(errorLog) {
// Compose email, SMS, webhook alerts here
console.log('🚨 Critical error alert:', errorLog.errorMessage);

// Example: send email alert
// await this.sendEmail({
// to: 'admin@yourcompany.com',
// subject: `[CRITICAL ERROR] ${errorLog.functionName}`,
// body: `Error: ${errorLog.errorMessage}\n\nStack: ${errorLog.errorStack}`
// });
}

// Get error statistics
async getErrorStats(hours = 24) {
const startTime = new Date();
startTime.setHours(startTime.getHours() - hours);

const result = await this.db.collection('error_logs')
.aggregate()
.match({
timestamp: this.db.command.gte(startTime)
})
.group({
_id: {
functionName: '$functionName',
severity: '$severity'
},
count: { $sum: 1 },
latestError: { $max: '$timestamp' }
})
.end();

return result.data;
}
}

// Usage in cloud function
const errorCollector = new ErrorCollector();

exports.main = async (event, context) => {
try {
// business logic
return await processRequest(event);
} catch (error) {
// record error
await errorCollector.logError(error, context, event);

// rethrow
throw error;
}
};

Database management

1. Storage monitoring

Database usage analysis:

// scripts/database-analysis.js
class DatabaseAnalyzer {
constructor() {
this.app = cloud.init({
env: process.env.TCB_ENV
});
this.db = this.app.database();
}

// Analyze collections at scale
async analyzeCollections() {
const collections = ['users', 'tasks', 'categories', 'logs'];
const analysis = [];

for (const collectionName of collections) {
try {
const stats = await this.getCollectionStats(collectionName);
analysis.push({
collection: collectionName,
...stats
});
} catch (error) {
console.error(`Failed to analyze collection ${collectionName}:`, error);
}
}

return analysis;
}

// Get collection statistics
async getCollectionStats(collectionName) {
const collection = this.db.collection(collectionName);

// Get document count
const countResult = await collection.count();
const documentCount = countResult.total;

// Get newest and oldest documents
const latestDoc = await collection
.orderBy('_createTime', 'desc')
.limit(1)
.get();

const oldestDoc = await collection
.orderBy('_createTime', 'asc')
.limit(1)
.get();

// Estimate storage size (simplified)
const sampleDocs = await collection.limit(100).get();
const avgDocSize = sampleDocs.data.length > 0
? JSON.stringify(sampleDocs.data).length / sampleDocs.data.length
: 0;

const estimatedSize = (avgDocSize * documentCount) / (1024 * 1024); // MB

return {
documentCount,
estimatedSizeMB: Math.round(estimatedSize * 100) / 100,
oldestDocument: oldestDoc.data[0]?._createTime,
latestDocument: latestDoc.data[0]?._createTime,
avgDocumentSize: Math.round(avgDocSize)
};
}

// Cleanup recommendations
async getCleanupRecommendations() {
const recommendations = [];

// Check logs
const logStats = await this.getCollectionStats('logs');
if (logStats.documentCount > 100000) {
recommendations.push({
type: 'cleanup',
collection: 'logs',
reason: 'Log data is large; consider cleaning data older than 30 days',
action: 'DELETE_OLD_LOGS'
});
}

// Check temp data
const tempDataCount = await this.db.collection('temp_data')
.where({
_createTime: this.db.command.lt(new Date(Date.now() - 7 * 24 * 60 * 60 * 1000))
})
.count();

if (tempDataCount.total > 0) {
recommendations.push({
type: 'cleanup',
collection: 'temp_data',
reason: `Found ${tempDataCount.total} expired temporary records`,
action: 'DELETE_EXPIRED_TEMP_DATA'
});
}

return recommendations;
}
}

// Periodic analysis
const analyzer = new DatabaseAnalyzer();
analyzer.analyzeCollections().then(analysis => {
console.log('Database analysis result:', analysis);
});

2. Index optimization

Query performance analysis:

// scripts/index-optimizer.js
class IndexOptimizer {
constructor() {
this.app = cloud.init({
env: process.env.TCB_ENV
});
this.db = this.app.database();
}

// Analyze query performance
async analyzeQueryPerformance() {
const slowQueries = await this.db.collection('query_logs')
.where({
duration: this.db.command.gt(1000), // queries longer than 1s
timestamp: this.db.command.gte(new Date(Date.now() - 24 * 60 * 60 * 1000))
})
.orderBy('duration', 'desc')
.limit(50)
.get();

// Analyze patterns
const queryPatterns = this.analyzeQueryPatterns(slowQueries.data);

// Generate index recommendations
const indexRecommendations = this.generateIndexRecommendations(queryPatterns);

return {
slowQueries: slowQueries.data,
patterns: queryPatterns,
recommendations: indexRecommendations
};
}

// Analyze query patterns
analyzeQueryPatterns(queries) {
const patterns = {};

queries.forEach(query => {
const key = `${query.collection}_${JSON.stringify(query.where)}`;
if (!patterns[key]) {
patterns[key] = {
collection: query.collection,
whereClause: query.where,
count: 0,
totalDuration: 0,
avgDuration: 0
};
}

patterns[key].count++;
patterns[key].totalDuration += query.duration;
patterns[key].avgDuration = patterns[key].totalDuration / patterns[key].count;
});

return Object.values(patterns)
.sort((a, b) => b.avgDuration - a.avgDuration)
.slice(0, 10);
}

// Generate index recommendations
generateIndexRecommendations(patterns) {
const recommendations = [];

patterns.forEach(pattern => {
const whereFields = Object.keys(pattern.whereClause);

if (whereFields.length > 0) {
recommendations.push({
collection: pattern.collection,
fields: whereFields,
reason: `High query frequency and long duration (avg ${Math.round(pattern.avgDuration)}ms)`,
priority: pattern.avgDuration > 2000 ? 'high' : 'medium',
createIndexCommand: this.generateCreateIndexCommand(pattern.collection, whereFields)
});
}
});

return recommendations;
}

// Generate create index command
generateCreateIndexCommand(collection, fields) {
const indexSpec = fields.map(field => `{ name: '${field}', direction: 1 }`).join(', ');
return `db.collection('${collection}').createIndex({ keys: [${indexSpec}] })`;
}
}

// Example usage
const optimizer = new IndexOptimizer();
optimizer.analyzeQueryPerformance().then(analysis => {
console.log('Query performance analysis:', analysis);

if (analysis.recommendations.length > 0) {
console.log('\nIndex optimization suggestions:');
analysis.recommendations.forEach((rec, index) => {
console.log(`${index + 1}. ${rec.collection} collection`);
console.log(` Fields: ${rec.fields.join(', ')}`);
console.log(` Reason: ${rec.reason}`);
console.log(` Command: ${rec.createIndexCommand}`);
console.log('');
});
}
});

3. Data backup

Automated backup strategy:

// scripts/backup-manager.js
class BackupManager {
constructor() {
this.app = cloud.init({
env: process.env.TCB_ENV
});
this.db = this.app.database();
this.storage = this.app.storage();
}

// Perform data backup
async performBackup() {
const backupId = `backup_${Date.now()}`;
const backupPath = `backups/${backupId}`;

console.log(`Starting backup: ${backupId}`);

try {
const collections = ['users', 'tasks', 'categories'];
const backupData = {};

// Backup each collection
for (const collectionName of collections) {
console.log(`Backing up collection: ${collectionName}`);
const data = await this.backupCollection(collectionName);
backupData[collectionName] = data;
}

// Save backup file
const backupContent = JSON.stringify(backupData, null, 2);
const fileName = `${backupPath}/data.json`;

await this.storage.uploadFile({
cloudPath: fileName,
fileContent: Buffer.from(backupContent, 'utf8')
});

// Record backup info
await this.recordBackup({
backupId,
fileName,
size: backupContent.length,
collections: Object.keys(backupData),
documentCounts: Object.fromEntries(
Object.entries(backupData).map(([name, data]) => [name, data.length])
),
timestamp: new Date()
});

console.log(`Backup completed: ${fileName}`);
return { success: true, backupId, fileName };

} catch (error) {
console.error('Backup failed:', error);
return { success: false, error: error.message };
}
}

// Backup a single collection
async backupCollection(collectionName) {
const collection = this.db.collection(collectionName);
const batchSize = 1000;
let allData = [];
let lastDoc = null;

while (true) {
let query = collection.limit(batchSize);

if (lastDoc) {
query = query.where({
_id: this.db.command.gt(lastDoc._id)
});
}

const result = await query.orderBy('_id', 'asc').get();

if (result.data.length === 0) {
break;
}

allData = allData.concat(result.data);
lastDoc = result.data[result.data.length - 1];

console.log(`Backed up ${collectionName}: ${allData.length} records`);
}

return allData;
}

// Record backup info
async recordBackup(backupInfo) {
await this.db.collection('backup_logs').add(backupInfo);
}

// Cleanup old backups
async cleanupOldBackups(retentionDays = 30) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);

// Find expired backups
const oldBackups = await this.db.collection('backup_logs')
.where({
timestamp: this.db.command.lt(cutoffDate)
})
.get();

console.log(`Found ${oldBackups.data.length} expired backups`);

// Delete files and records
for (const backup of oldBackups.data) {
try {
// Delete storage file
await this.storage.deleteFile({
fileList: [backup.fileName]
});

// Remove backup record
await this.db.collection('backup_logs').doc(backup._id).remove();

console.log(`Deleted expired backup: ${backup.backupId}`);
} catch (error) {
console.error(`Failed to delete backup: ${backup.backupId}`, error);
}
}
}

// Restore from backup
async restoreFromBackup(backupId) {
console.log(`Start restoring from backup: ${backupId}`);

try {
// Find backup record
const backupRecord = await this.db.collection('backup_logs')
.where({ backupId })
.get();

if (backupRecord.data.length === 0) {
throw new Error('Backup record does not exist');
}

const backup = backupRecord.data[0];

// Download backup file
const fileResult = await this.storage.downloadFile({
fileID: backup.fileName
});

const backupData = JSON.parse(fileResult.fileContent.toString());

// Restore each collection
for (const [collectionName, data] of Object.entries(backupData)) {
console.log(`Restoring collection: ${collectionName}`);
await this.restoreCollection(collectionName, data);
}

console.log('Data restore completed');
return { success: true };

} catch (error) {
console.error('Data restore failed:', error);
return { success: false, error: error.message };
}
}

// Restore a single collection
async restoreCollection(collectionName, data) {
const collection = this.db.collection(collectionName);
const batchSize = 100;

for (let i = 0; i < data.length; i += batchSize) {
const batch = data.slice(i, i + batchSize);

// Bulk insert
const promises = batch.map(doc => {
const { _id, ...docData } = doc;
return collection.doc(_id).set(docData);
});

await Promise.all(promises);
console.log(`Restored ${collectionName}: ${Math.min(i + batchSize, data.length)}/${data.length}`);
}
}
}

// Scheduled backups
const backupManager = new BackupManager();

// Daily backup at 2:00 AM
const scheduleBackup = () => {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(2, 0, 0, 0);

const timeUntilBackup = tomorrow.getTime() - now.getTime();

setTimeout(async () => {
await backupManager.performBackup();
await backupManager.cleanupOldBackups();

// Schedule next backup
scheduleBackup();
}, timeUntilBackup);
};

// Start scheduled backups
scheduleBackup();

Storage management

1. File storage monitoring

Storage usage analysis:

// scripts/storage-analyzer.js
class StorageAnalyzer {
constructor() {
this.app = cloud.init({
env: process.env.TCB_ENV
});
this.storage = this.app.storage();
this.db = this.app.database();
}

// Analyze storage usage
async analyzeStorageUsage() {
console.log('Starting storage usage analysis...');

const analysis = {
totalFiles: 0,
totalSize: 0,
fileTypes: {},
largeFIles: [],
oldFiles: [],
duplicateFiles: []
};

// Get all files (example implementation)
const files = await this.getAllFiles();

files.forEach(file => {
analysis.totalFiles++;
analysis.totalSize += file.size;

// Classify by file extension
const extension = this.getFileExtension(file.name);
if (!analysis.fileTypes[extension]) {
analysis.fileTypes[extension] = { count: 0, size: 0 };
}
analysis.fileTypes[extension].count++;
analysis.fileTypes[extension].size += file.size;

// Identify large files (>10MB)
if (file.size > 10 * 1024 * 1024) {
analysis.largeFIles.push({
name: file.name,
size: file.size,
sizeFormatted: this.formatFileSize(file.size),
lastModified: file.lastModified
});
}

// Identify old files (>1 year)
const oneYearAgo = new Date();
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
if (file.lastModified < oneYearAgo) {
analysis.oldFiles.push({
name: file.name,
size: file.size,
lastModified: file.lastModified
});
}
});

// Sort large files
analysis.largeFIles.sort((a, b) => b.size - a.size);

// Sort old files
analysis.oldFiles.sort((a, b) => a.lastModified - b.lastModified);

return analysis;
}

// Get all files (example)
async getAllFiles() {
// Implement according to actual cloud storage API
const files = [];
let marker = null;

do {
const result = await this.storage.listFiles({
prefix: '',
marker,
maxKeys: 1000
});

files.push(...result.files);
marker = result.nextMarker;
} while (marker);

return files;
}

// Get file extension
getFileExtension(filename) {
const parts = filename.split('.');
return parts.length > 1 ? parts.pop().toLowerCase() : 'unknown';
}

// Format file size
formatFileSize(bytes) {
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
let size = bytes;
let unitIndex = 0;

while (size >= 1024 and unitIndex < units.length - 1) {
size /= 1024;
unitIndex++;
}

return `${Math.round(size * 100) / 100} ${units[unitIndex]}`;
}

// Cleanup suggestions
async getCleanupRecommendations(analysis) {
const recommendations = [];

// Large files suggestion
if (analysis.largeFIles.length > 0) {
const totalLargeFileSize = analysis.largeFIles.reduce((sum, file) => sum + file.size, 0);
recommendations.push({
type: 'large_files',
count: analysis.largeFIles.length,
totalSize: this.formatFileSize(totalLargeFileSize),
suggestion: 'Consider compressing or deleting large files',
files: analysis.largeFIles.slice(0, 10) // show top 10
});
}

// Old files suggestion
if (analysis.oldFiles.length > 0) {
const totalOldFileSize = analysis.oldFiles.reduce((sum, file) => sum + file.size, 0);
recommendations.push({
type: 'old_files',
count: analysis.oldFiles.length,
totalSize: this.formatFileSize(totalOldFileSize),
suggestion: 'Consider archiving or deleting long-unused files',
files: analysis.oldFiles.slice(0, 10)
});
}

// File type suggestions
const imageTypes = ['jpg', 'jpeg', 'png', 'gif', 'bmp'];
const totalImageSize = imageTypes.reduce((sum, type) => {
return sum + (analysis.fileTypes[type]?.size || 0);
}, 0);

if (totalImageSize > 100 * 1024 * 1024) { // >100MB
recommendations.push({
type: 'image_optimization',
totalSize: this.formatFileSize(totalImageSize),
suggestion: 'Consider compressing images to save storage'
});
}

return recommendations;
}
}

// Example usage
const analyzer = new StorageAnalyzer();
analyzer.analyzeStorageUsage().then(async analysis => {
console.log('Storage analysis result:');
console.log(`Total files: ${analysis.totalFiles}`);
console.log(`Total size: ${analyzer.formatFileSize(analysis.totalSize)}`);

console.log('\nFile type distribution:');
Object.entries(analysis.fileTypes)
.sort((a, b) => b[1].size - a[1].size)
.forEach(([type, info]) => {
console.log(` ${type}: ${info.count} files, ${analyzer.formatFileSize(info.size)}`);
});

const recommendations = await analyzer.getCleanupRecommendations(analysis);
if (recommendations.length > 0) {
console.log('\nCleanup suggestions:');
recommendations.forEach((rec, index) => {
console.log(`${index + 1}. ${rec.suggestion}`);
console.log(` Impact: ${rec.count} files, ${rec.totalSize}`);
});
}
});

Cost optimization

1. Cost analysis

Resource cost calculation:

// scripts/cost-analyzer.js
class CostAnalyzer {
constructor() {
this.pricing = {
functions: {
invocations: 0.0000002, // per invocation
gbSeconds: 0.000016667, // per GB-second
outboundTraffic: 0.12 // per GB outbound
},
database: {
storage: 0.07, // per GB per month
reads: 0.0000015, // per read
writes: 0.0000015 // per write
},
storage: {
storage: 0.043, // per GB per month
downloads: 0.12, // per GB download
requests: 0.0004 // per 1000 requests
},
cdn: {
traffic: 0.18 // per GB
}
};
}

// Calculate function costs
calculateFunctionCosts(metrics) {
const invocationCost = metrics.totalInvocations * this.pricing.functions.invocations;
const computeCost = (metrics.totalDuration / 1000) * (metrics.averageMemoryUsage / 1024) * this.pricing.functions.gbSeconds;
const trafficCost = metrics.outboundTraffic * this.pricing.functions.outboundTraffic;

return {
invocations: invocationCost,
compute: computeCost,
traffic: trafficCost,
total: invocationCost + computeCost + trafficCost
};
}

// Calculate database costs
calculateDatabaseCosts(metrics) {
const storageCost = metrics.storageUsed * this.pricing.database.storage;
const readCost = metrics.readOperations * this.pricing.database.reads;
const writeCost = metrics.writeOperations * this.pricing.database.writes;

return {
storage: storageCost,
reads: readCost,
writes: writeCost,
total: storageCost + readCost + writeCost
};
}

// Calculate storage costs
calculateStorageCosts(metrics) {
const storageCost = metrics.storageUsed * this.pricing.storage.storage;
const downloadCost = metrics.downloadTraffic * this.pricing.storage.downloads;
const requestCost = (metrics.totalRequests / 1000) * this.pricing.storage.requests;

return {
storage: storageCost,
downloads: downloadCost,
requests: requestCost,
total: storageCost + downloadCost + requestCost
};
}

// Calculate CDN costs
calculateCDNCosts(metrics) {
const trafficCost = metrics.bandwidth * this.pricing.cdn.traffic;

return {
traffic: trafficCost,
total: trafficCost
};
}

// Generate cost report
generateCostReport(resourceMetrics) {
const functionCosts = this.calculateFunctionCosts(resourceMetrics.functions);
const databaseCosts = this.calculateDatabaseCosts(resourceMetrics.database);
const storageCosts = this.calculateStorageCosts(resourceMetrics.storage);
const cdnCosts = this.calculateCDNCosts(resourceMetrics.cdn);

const totalCost = functionCosts.total + databaseCosts.total + storageCosts.total + cdnCosts.total;

return {
functions: functionCosts,
database: databaseCosts,
storage: storageCosts,
cdn: cdnCosts,
total: totalCost,
breakdown: {
functions: (functionCosts.total / totalCost * 100).toFixed(1),
database: (databaseCosts.total / totalCost * 100).toFixed(1),
storage: (storageCosts.total / totalCost * 100).toFixed(1),
cdn: (cdnCosts.total / totalCost * 100).toFixed(1)
}
};
}

// Generate optimization suggestions
generateOptimizationSuggestions(costReport, resourceMetrics) {
const suggestions = [];

// Functions optimization
if (costReport.functions.total > costReport.total * 0.4) {
suggestions.push({
category: 'functions',
priority: 'high',
suggestion: 'Function costs account for a large portion; optimize execution time and memory settings',
potentialSavings: costReport.functions.total * 0.2,
actions: [
'Optimize code logic to reduce execution time',
'Adjust memory configuration to avoid over-provisioning',
'Use connection pooling to reduce DB connection overhead',
'Enable function warm-up to reduce cold starts'
]
});
}

... (file truncated due to length) ...