Cloud Storage
Overview
Cloud Storage provides you with highly available, stable, and secure cloud storage services, supporting the storage of any amount and format of unstructured data such as videos and images. Within cloud functions, you can access Cloud Storage via either the Node.js SDK or HTTP API to upload, download, delete, and manage files.
Features of Cloud storage
- Secure and Reliable: Provides multiple data backups to ensure data security.
- Elastic Scaling: Supports massive file storage with pay-as-you-go pricing
- High Performance: Global CDN acceleration for fast access
- Permission Control: Flexible access permission management
- Multi-format Support: Supports various file formats including images, videos, documents, and more
📄️ Node.js SDK Access
Operating Cloud storage using the Node.js SDK within a cloud function
📄️ HTTP API Access
Accessing Cloud storage services via the HTTP API interface
📄️ File Operations
Basic operations including file upload, download, deletion, and more
📄️ Advanced Features
Batch operations, permission management, CDN acceleration, and other advanced features
Node.js SDK Access
Initialization Configuration
Before using Cloud storage in a cloud function, you need to initialize the SDK:
const tcb = require('@cloudbase/node-sdk');
// Initialize TCB
const app = tcb.init({
env: 'your-env-id',
secretId: 'your-secret-id',
secretKey: 'your-secret-key'
});
// Obtain the Cloud storage instance
const storage = app.storage();
File Upload
- Buffer Upload
- Streaming Upload
- Batch Upload
exports.main = async (event, context) => {
try {
// Obtain the file data (Base64-encoded) from the event
const { fileContent, fileName, contentType } = event;
// Convert Base64 to Buffer
const buffer = Buffer.from(fileContent, 'base64');
// Upload documents.
const result = await storage.uploadFile({
cloudPath: `uploads/${Date.now()}_${fileName}`, // Cloud path
fileContent: buffer, // File content
contentType: contentType || 'application/octet-stream' // File type
});
return {
success: true,
fileID: result.fileID,
downloadURL: result.download_url,
message: 'File uploaded successfully'
};
} catch (error) {
console.error('File upload failed:', error);
return {
success: false,
error: error.message
};
}
};
const fs = require('fs');
const path = require('path');
exports.main = async (event, context) => {
try {
// Create a temporary file
const tempFilePath = `/tmp/${Date.now()}_temp_file`;
const { fileContent, fileName } = event;
// Write to temporary file
fs.writeFileSync(tempFilePath, Buffer.from(fileContent, 'base64'));
// Create a read stream
const fileStream = fs.createReadStream(tempFilePath);
// Upload file stream
const result = await storage.uploadFile({
cloudPath: `streams/${fileName}`,
fileContent: fileStream
});
// Clean up temporary files
fs.unlinkSync(tempFilePath);
return {
success: true,
fileID: result.fileID,
downloadURL: result.download_url
};
} catch (error) {
console.error('Streaming upload failed:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { files } = event; // files is an array of files
// Upload multiple files concurrently
const uploadPromises = files.map(async (file, index) => {
const buffer = Buffer.from(file.content, 'base64');
return await storage.uploadFile({
cloudPath: `batch/${Date.now()}_${index}_${file.name}`,
fileContent: buffer,
contentType: file.type
});
});
const results = await Promise.all(uploadPromises);
return {
success: true,
files: results.map(result => ({
fileID: result.fileID,
downloadURL: result.download_url
})),
message: `Successfully uploaded ${results.length} files`
};
} catch (error) {
console.error('Batch upload failed:', error);
return {
success: false,
error: error.message
};
}
};
File Download
- Get Download Link
- Download File Content
- Batch Download
exports.main = async (event, context) => {
try {
const { fileID } = event;
// Obtain file download link
const result = await storage.getFileDownloadURL({
fileList: [fileID]
});
if (result.fileList && result.fileList.length > 0) {
const fileInfo = result.fileList[0];
return {
success: true,
downloadURL: fileInfo.download_url,
tempFileURL: fileInfo.tempFileURL,
maxAge: fileInfo.maxAge
};
} else {
throw new Error('File does not exist');
}
} catch (error) {
console.error('Failed to obtain download link:', error);
return {
success: false,
error: error.message
};
}
};
const axios = require('axios');
exports.main = async (event, context) => {
try {
const { fileID } = event;
// Obtain download link
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});
const downloadURL = urlResult.fileList[0].download_url;
// Download file content
const response = await axios({
method: 'GET',
url: downloadURL,
responseType: 'arraybuffer'
});
// Convert to Base64
const base64Content = Buffer.from(response.data).toString('base64');
return {
success: true,
content: base64Content,
contentType: response.headers['content-type'],
size: response.data.length
};
} catch (error) {
console.error('Download file failed:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { fileIDs } = event; // array of file IDs
// Batch obtain download links
const result = await storage.getFileDownloadURL({
fileList: fileIDs
});
// Download file content in parallel
const downloadPromises = result.fileList.map(async (fileInfo) => {
try {
const response = await axios({
method: 'GET',
url: fileInfo.download_url,
responseType: 'arraybuffer',
timeout: 30000
});
return {
fileID: fileInfo.fileID,
success: true,
content: Buffer.from(response.data).toString('base64'),
size: response.data.length
};
} catch (error) {
return {
fileID: fileInfo.fileID,
success: false,
error: error.message
};
}
});
const downloads = await Promise.all(downloadPromises);
return {
success: true,
files: downloads,
summary: {
total: downloads.length,
success: downloads.filter(d => d.success).length,
failed: downloads.filter(d => !d.success).length
}
};
} catch (error) {
console.error('Batch download failed:', error);
return {
success: false,
error: error.message
};
}
};
File Deletion
- Delete Single File
- Batch Delete
exports.main = async (event, context) => {
try {
const { fileID } = event;
// Delete the document.
const result = await storage.deleteFile({
fileList: [fileID]
});
if (result.fileList && result.fileList.length > 0) {
const deleteResult = result.fileList[0];
if (deleteResult.code === 'SUCCESS') {
return {
success: true,
message: 'File deleted successfully'
};
} else {
throw new Error(deleteResult.message || 'Deletion failed');
}
}
} catch (error) {
console.error('Failed to delete file:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { fileIDs } = event; // array of file IDs
// Batch deleting files
const result = await storage.deleteFile({
fileList: fileIDs
});
const deleteResults = result.fileList || [];
const successCount = deleteResults.filter(item => item.code === 'SUCCESS').length;
const failedCount = deleteResults.length - successCount;
return {
success: failedCount === 0,
summary: {
total: deleteResults.length,
success: successCount,
failed: failedCount
},
details: deleteResults.map(item => ({
fileID: item.fileID,
success: item.code === 'SUCCESS',
message: item.message
}))
};
} catch (error) {
console.error('Batch deletion failed:', error);
return {
success: false,
error: error.message
};
}
};
HTTP API Access
Basic Configuration
Using HTTP API to access Cloud storage requires prior authentication configuration:
const axios = require('axios');
// HTTP API Basic Configuration
const config = {
baseURL: 'https://your-env-id.api.tcloudbasegateway.com',
accessToken: 'your-access-token' // access token obtained through authentication
};
// General method for sending HTTP requests
async function callStorageAPI(endpoint, method = 'POST', data = null) {
try {
const response = await axios({
method: method,
url: `${config.baseURL}${endpoint}`,
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${config.accessToken}`
},
data: data
});
return response.data;
} catch (error) {
console.error('HTTP API invocation failed:', error);
throw error;
}
}
Obtain Object Upload Information
- Obtain Upload Information
- Direct File Upload
- Batch Upload
exports.main = async (event, context) => {
try {
const { objectIds } = event; // array of object IDs
// Construct the request body
const requestBody = objectIds.map(objectId => ({
objectId: objectId
}));
// Call the Obtain Object Upload Information API
const result = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', requestBody);
// Handle the response result
const uploadInfos = result.map(item => {
if (item.code) {
// Failure case
return {
objectId: item.objectId,
success: false,
error: {
code: item.code,
message: item.message
}
};
} else {
// Success case
return {
objectId: item.objectId,
success: true,
uploadUrl: item.uploadUrl,
downloadUrl: item.downloadUrl,
downloadUrlEncoded: item.downloadUrlEncoded,
token: item.token,
authorization: item.authorization,
cloudObjectMeta: item.cloudObjectMeta,
cloudObjectId: item.cloudObjectId
};
}
});
return {
success: true,
uploadInfos: uploadInfos
};
} catch (error) {
console.error('Failed to obtain upload information:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { objectId, fileContent } = event; // objectId: file path, fileContent: Base64-encoded file content
// Step 1: Obtain upload information
const uploadInfoResult = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', [
{ objectId: objectId }
]);
if (uploadInfoResult.length === 0 || uploadInfoResult[0].code) {
throw new Error(uploadInfoResult[0]?.message || 'Failed to obtain upload information');
}
const uploadInfo = uploadInfoResult[0];
// Step 2: Use the returned information to directly upload the file
const fileBuffer = Buffer.from(fileContent, 'base64');
const uploadResponse = await axios({
method: 'PUT',
url: uploadInfo.uploadUrl,
headers: {
'Authorization': uploadInfo.authorization,
'X-Cos-Security-Token': uploadInfo.token,
'X-Cos-Meta-Fileid': uploadInfo.cloudObjectMeta,
'Content-Type': 'application/octet-stream'
},
data: fileBuffer
});
if (uploadResponse.status === 200) {
return {
success: true,
objectId: objectId,
cloudObjectId: uploadInfo.cloudObjectId,
downloadUrl: uploadInfo.downloadUrl,
downloadUrlEncoded: uploadInfo.downloadUrlEncoded,
message: 'File uploaded successfully'
};
} else {
throw new Error('File upload failed');
}
} catch (error) {
console.error('Direct File Upload failed:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { files } = event; // files: [{ objectId, fileContent }]
// Step 1: Batch obtain upload information
const objectIds = files.map(file => ({ objectId: file.objectId }));
const uploadInfoResults = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', objectIds);
// Step 2: Upload files concurrently
const uploadPromises = files.map(async (file, index) => {
const uploadInfo = uploadInfoResults[index];
if (uploadInfo.code) {
return {
objectId: file.objectId,
success: false,
error: uploadInfo.message
};
}
try {
const fileBuffer = Buffer.from(file.fileContent, 'base64');
const uploadResponse = await axios({
method: 'PUT',
url: uploadInfo.uploadUrl,
headers: {
'Authorization': uploadInfo.authorization,
'X-Cos-Security-Token': uploadInfo.token,
'X-Cos-Meta-Fileid': uploadInfo.cloudObjectMeta,
'Content-Type': 'application/octet-stream'
},
data: fileBuffer,
timeout: 30000 // 30 seconds timeout
});
return {
objectId: file.objectId,
success: true,
cloudObjectId: uploadInfo.cloudObjectId,
downloadUrl: uploadInfo.downloadUrl
};
} catch (error) {
return {
objectId: file.objectId,
success: false,
error: error.message
};
}
});
const results = await Promise.all(uploadPromises);
const successCount = results.filter(r => r.success).length;
const failedCount = results.length - successCount;
return {
success: failedCount === 0,
summary: {
total: results.length,
success: successCount,
failed: failedCount
},
results: results
};
} catch (error) {
console.error('Batch upload failed:', error);
return {
success: false,
error: error.message
};
}
};
File Download API
exports.main = async (event, context) => {
try {
const { objectId } = event;
// Obtain upload information (including download links)
const result = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', [
{ objectId: objectId }
]);
if (result.length === 0 || result[0].code) {
throw new Error(result[0]?.message || 'Failed to obtain file information');
}
const fileInfo = result[0];
return {
success: true,
objectId: objectId,
downloadUrl: fileInfo.downloadUrl,
downloadUrlEncoded: fileInfo.downloadUrlEncoded,
cloudObjectId: fileInfo.cloudObjectId
};
} catch (error) {
console.error('Failed to obtain download link:', error);
return {
success: false,
error: error.message
};
}
};
File Deletion API
// Note: The Obtain Upload Information API in the official documentation is primarily used for uploads; deletion operations require the use of other APIs.
// Here provides a generic deletion method example
exports.main = async (event, context) => {
try {
const { fileIds } = event; // Cloud storage file ID array
// Call the file deletion API (adjust the specific endpoint according to the actual API documentation)
const result = await callStorageAPI('/v1/storages/delete-files', 'POST', {
fileIds: fileIds
});
return {
success: true,
result: result,
message: 'File deletion request sent'
};
} catch (error) {
console.error('Failed to delete file:', error);
return {
success: false,
error: error.message
};
}
};
File Operations
Image Processing
- Image Resizing
- Image Watermark
exports.main = async (event, context) => {
try {
const { fileID, width, height, quality } = event;
// Obtain raw download link
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});
const originalURL = urlResult.fileList[0].download_url;
// Construct image processing parameters
const imageParams = [];
if (width) imageParams.push(`w_${width}`);
if (height) imageParams.push(`h_${height}`);
if (quality) imageParams.push(`q_${quality}`);
// Generate processed image URL
const processedURL = `${originalURL}?imageView2/2/${imageParams.join('/')}`;
return {
success: true,
originalURL: originalURL,
processedURL: processedURL,
params: imageParams.join('/')
};
} catch (error) {
console.error('Image processing failed:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { fileID, watermarkText, position, opacity } = event;
// Obtain raw download link
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});
const originalURL = urlResult.fileList[0].download_url;
// Construct watermark parameters
const watermarkParams = [
'watermark/2', // text watermark
`text/${Buffer.from(watermarkText).toString('base64')}`, // watermark text
`gravity/${position || 'southeast'}`, // position
`dissolve/${opacity || 80}` // opacity
];
const watermarkedURL = `${originalURL}?${watermarkParams.join('/')}`;
return {
success: true,
originalURL: originalURL,
watermarkedURL: watermarkedURL
};
} catch (error) {
console.error('Failed to add watermark:', error);
return {
success: false,
error: error.message
};
}
};
File Information Query
exports.main = async (event, context) => {
try {
const { fileID } = event;
// Obtain file information
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});
if (urlResult.fileList && urlResult.fileList.length > 0) {
const fileInfo = urlResult.fileList[0];
// Obtain detailed file information
const response = await axios.head(fileInfo.download_url);
return {
success: true,
fileID: fileID,
downloadURL: fileInfo.download_url,
size: response.headers['content-length'],
contentType: response.headers['content-type'],
lastModified: response.headers['last-modified'],
etag: response.headers['etag']
};
} else {
throw new Error('File does not exist');
}
} catch (error) {
console.error('Failed to obtain file information:', error);
return {
success: false,
error: error.message
};
}
};
File List Query
exports.main = async (event, context) => {
try {
const { prefix, limit, offset } = event;
// Note: The Cloud storage SDK does not directly support file list queries.
// Requires implementation via the COS SDK or maintaining a file index
// Here we provide an example of maintaining a file index via a database.
const db = app.database();
const collection = db.collection('file_index');
let query = collection;
// Filter by prefix
if (prefix) {
query = query.where({
cloudPath: db.RegExp({
regexp: `^${prefix}`,
options: 'i'
})
});
}
// Pagination query
const result = await query
.skip(offset || 0)
.limit(limit || 20)
.orderBy('uploadTime', 'desc')
.get();
return {
success: true,
files: result.data,
total: result.data.length,
hasMore: result.data.length === (limit || 20)
};
} catch (error) {
console.error('File list query failed:', error);
return {
success: false,
error: error.message
};
}
};
Advanced Features
Permission Management
- Set Public Read
- Private Access Control
exports.main = async (event, context) => {
try {
const { fileID } = event;
// Obtain the download link (permanently valid)
const result = await storage.getFileDownloadURL({
fileList: [{
fileID: fileID,
maxAge: 7200 // 2-hour valid period
}]
});
if (result.fileList && result.fileList.length > 0) {
const fileInfo = result.fileList[0];
return {
success: true,
fileID: fileID,
publicURL: fileInfo.download_url,
maxAge: fileInfo.maxAge
};
}
} catch (error) {
console.error('Failed to set public read permission:', error);
return {
success: false,
error: error.message
};
}
};
exports.main = async (event, context) => {
try {
const { fileID, userId, permission } = event;
// Record file permissions in the database
const db = app.database();
const collection = db.collection('file_permissions');
await collection.add({
fileID: fileID,
userId: userId,
permission: permission, // 'read', 'write', 'delete'
createdAt: new Date(),
createdBy: context.requestId
});
return {
success: true,
message: 'Permission settings successful'
};
} catch (error) {
console.error('Failed to set file permissions:', error);
return {
success: false,
error: error.message
};
}
};
// Check file access permissions
exports.checkPermission = async (event, context) => {
try {
const { fileID, userId, action } = event;
const db = app.database();
const collection = db.collection('file_permissions');
const result = await collection
.where({
fileID: fileID,
userId: userId,
permission: action
})
.get();
return {
success: true,
hasPermission: result.data.length > 0
};
} catch (error) {
console.error('Failed to check permissions:', error);
return {
success: false,
error: error.message
};
}
};
CDN Acceleration
exports.main = async (event, context) => {
try {
const { fileID, enableCDN } = event;
// Obtain file download link
const result = await storage.getFileDownloadURL({
fileList: [fileID]
});
let downloadURL = result.fileList[0].download_url;
if (enableCDN) {
// Replace with CDN domain name (needs to be configured in the TCB console)
downloadURL = downloadURL.replace(
/https:\/\/[^.]+\.tcb\.qcloud\.la/,
'https://your-cdn-domain.com'
);
}
return {
success: true,
originalURL: result.fileList[0].download_url,
cdnURL: downloadURL,
cdnEnabled: enableCDN
};
} catch (error) {
console.error('CDN configuration failed:', error);
return {
success: false,
error: error.message
};
}
};
File Sync
exports.main = async (event, context) => {
try {
const { sourceFileID, targetPath, targetEnv } = event;
// Download source file
const downloadResult = await storage.getFileDownloadURL({
fileList: [sourceFileID]
});
const downloadURL = downloadResult.fileList[0].download_url;
// Download file content
const response = await axios({
method: 'GET',
url: downloadURL,
responseType: 'arraybuffer'
});
// Initialize the target environment
const targetApp = tcb.init({
env: targetEnv,
secretId: process.env.SECRET_ID,
secretKey: process.env.SECRET_KEY
});
const targetStorage = targetApp.storage();
// Upload to the target environment
const uploadResult = await targetStorage.uploadFile({
cloudPath: targetPath,
fileContent: Buffer.from(response.data)
});
return {
success: true,
sourceFileID: sourceFileID,
targetFileID: uploadResult.fileID,
targetURL: uploadResult.download_url
};
} catch (error) {
console.error('File synchronization failed:', error);
return {
success: false,
error: error.message
};
}
};
Scheduled Cleanup
exports.main = async (event, context) => {
try {
const { daysToKeep = 30, pathPrefix = 'temp/' } = event;
// Obtain the list of files to be cleaned (queried from the database index)
const db = app.database();
const collection = db.collection('file_index');
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - daysToKeep);
const result = await collection
.where({
cloudPath: db.RegExp({
regexp: `^${pathPrefix}`,
options: 'i'
}),
uploadTime: db.command.lt(cutoffDate)
})
.get();
const filesToDelete = result.data;
if (filesToDelete.length === 0) {
return {
success: true,
message: 'No files to clean',
deletedCount: 0
};
}
// Batch deleting files
const fileIDs = filesToDelete.map(file => file.fileID);
const deleteResult = await storage.deleteFile({
fileList: fileIDs
});
// Delete records from the database
const deletePromises = filesToDelete.map(file =>
collection.doc(file._id).remove()
);
await Promise.all(deletePromises);
return {
success: true,
deletedCount: fileIDs.length,
message: `Successfully cleaned up ${fileIDs.length} expired files`
};
} catch (error) {
console.error('Scheduled cleanup failed:', error);
return {
success: false,
error: error.message
};
}
};
Best Practices
File Naming Convention
// Generate canonical file path
function generateFilePath(category, userId, originalName) {
const timestamp = Date.now();
const randomStr = Math.random().toString(36).substr(2, 8);
const ext = path.extname(originalName);
const baseName = path.basename(originalName, ext);
// Sanitize special characters in filenames
const cleanName = baseName.replace(/[^a-zA-Z0-9\u4e00-\u9fa5]/g, '_');
return `${category}/${userId}/${timestamp}_${randomStr}_${cleanName}${ext}`;
}
// Usage example
const filePath = generateFilePath('avatars', 'user123', 'User Avatar.jpg');
// Output: avatars/user123/1640995200000_abc12345_User Avatar.jpg
File Type Validation
// File Type Validation
function validateFileType(fileName, allowedTypes) {
const ext = path.extname(fileName).toLowerCase();
const mimeTypes = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.pdf': 'application/pdf',
'.doc': 'application/msword',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
};
return allowedTypes.includes(ext) && mimeTypes[ext];
}
// File Size Validation
function validateFileSize(fileContent, maxSizeMB) {
const buffer = Buffer.from(fileContent, 'base64');
const sizeMB = buffer.length / (1024 * 1024);
return sizeMB <= maxSizeMB;
}
// Usage example
exports.main = async (event, context) => {
const { fileName, fileContent } = event;
// Validate file type
const allowedTypes = ['.jpg', '.jpeg', '.png', '.gif'];
const contentType = validateFileType(fileName, allowedTypes);
if (!contentType) {
return {
success: false,
error: 'Unsupported file type'
};
}
// Validate file size (Limit: 5MB)
if (!validateFileSize(fileContent, 5)) {
return {
success: false,
error: 'File size exceeds the limit (5MB)'
};
}
// Continue upload logic...
};
Error Handling and Retry
// File operations with retry mechanism
async function executeWithRetry(operation, maxRetries = 3) {
let lastError;
for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error) {
lastError = error;
// Determine whether the error is retryable
if (isRetryableError(error) && i < maxRetries - 1) {
const delay = Math.pow(2, i) * 1000; // Exponential backoff
await new Promise(resolve => setTimeout(resolve, delay));
continue;
}
throw error;
}
}
throw lastError;
}
function isRetryableError(error) {
const retryableCodes = [
'ECONNRESET',
'ETIMEDOUT',
'ENOTFOUND',
'NETWORK_ERROR'
];
return retryableCodes.includes(error.code) ||
error.message.includes('timeout') ||
error.message.includes('network');
}
// Usage example
exports.main = async (event, context) => {
try {
const result = await executeWithRetry(async () => {
return await storage.uploadFile({
cloudPath: event.path,
fileContent: Buffer.from(event.content, 'base64')
});
});
return { success: true, result };
} catch (error) {
console.error('File operation finally failed:', error);
return { success: false, error: error.message };
}
};
Performance Optimization
// File upload performance optimization
class FileUploadOptimizer {
constructor() {
this.uploadQueue = [];
this.processing = false;
this.maxConcurrent = 3;
}
async addUpload(uploadTask) {
return new Promise((resolve, reject) => {
this.uploadQueue.push({ task: uploadTask, resolve, reject });
this.processQueue();
});
}
async processQueue() {
if (this.processing || this.uploadQueue.length === 0) {
return;
}
this.processing = true;
while (this.uploadQueue.length > 0) {
const batch = this.uploadQueue.splice(0, this.maxConcurrent);
const promises = batch.map(async ({ task, resolve, reject }) => {
try {
const result = await task();
resolve(result);
} catch (error) {
reject(error);
}
});
await Promise.all(promises);
}
this.processing = false;
}
}
// Global Upload Optimizer
const uploadOptimizer = new FileUploadOptimizer();
exports.main = async (event, context) => {
const { files } = event; // Multiple files
try {
const uploadTasks = files.map(file => () =>
storage.uploadFile({
cloudPath: generateFilePath('uploads', context.requestId, file.name),
fileContent: Buffer.from(file.content, 'base64')
})
);
const results = await Promise.all(
uploadTasks.map(task => uploadOptimizer.addUpload(task))
);
return {
success: true,
files: results.map(result => ({
fileID: result.fileID,
downloadURL: result.download_url
}))
};
} catch (error) {
console.error('Batch upload failed:', error);
return {
success: false,
error: error.message
};
}
};
Related Documentation
📄️ Node.js SDK Storage API
Detailed API documentation for Node.js SDK Cloud storage
📄️ Cloud storage HTTP API
Cloud storage HTTP API interface documentation
- It is recommended to set different storage paths for different types of files
- Using CDN acceleration can significantly improve file access speed
- Regularly clean up temporary and expired files to save storage costs
- It is recommended to set up backup and version control for important files
- Be sure to validate the type and size before uploading files
- Sensitive files should have appropriate access permissions configured
- Avoid using special characters in filenames
- It is recommended to configure file access monitoring and alarms in the production environment