Skip to main content

Cloud Storage

Overview

Cloud Storage provides you with highly available, stable, and secure cloud storage services, supporting the storage of any amount and format of unstructured data such as videos and images. Within cloud functions, you can access Cloud Storage via either the Node.js SDK or HTTP API to upload, download, delete, and manage files.

Features of Cloud storage

  • Secure and Reliable: Provides multiple data backups to ensure data security.
  • Elastic Scaling: Supports massive file storage with pay-as-you-go pricing
  • High Performance: Global CDN acceleration for fast access
  • Permission Control: Flexible access permission management
  • Multi-format Support: Supports various file formats including images, videos, documents, and more

Node.js SDK Access

Initialization Configuration

Before using Cloud storage in a cloud function, you need to initialize the SDK:

const tcb = require('@cloudbase/node-sdk');

// Initialize TCB
const app = tcb.init({
env: 'your-env-id',
secretId: 'your-secret-id',
secretKey: 'your-secret-key'
});

// Obtain the Cloud storage instance
const storage = app.storage();

File Upload

exports.main = async (event, context) => {
try {
// Obtain the file data (Base64-encoded) from the event
const { fileContent, fileName, contentType } = event;

// Convert Base64 to Buffer
const buffer = Buffer.from(fileContent, 'base64');

// Upload documents.
const result = await storage.uploadFile({
cloudPath: `uploads/${Date.now()}_${fileName}`, // Cloud path
fileContent: buffer, // File content
contentType: contentType || 'application/octet-stream' // File type
});

return {
success: true,
fileID: result.fileID,
downloadURL: result.download_url,
message: 'File uploaded successfully'
};

} catch (error) {
console.error('File upload failed:', error);
return {
success: false,
error: error.message
};
}
};

File Download

exports.main = async (event, context) => {
try {
const { fileID } = event;

// Obtain file download link
const result = await storage.getFileDownloadURL({
fileList: [fileID]
});

if (result.fileList && result.fileList.length > 0) {
const fileInfo = result.fileList[0];

return {
success: true,
downloadURL: fileInfo.download_url,
tempFileURL: fileInfo.tempFileURL,
maxAge: fileInfo.maxAge
};
} else {
throw new Error('File does not exist');
}

} catch (error) {
console.error('Failed to obtain download link:', error);
return {
success: false,
error: error.message
};
}
};

File Deletion

exports.main = async (event, context) => {
try {
const { fileID } = event;

// Delete the document.
const result = await storage.deleteFile({
fileList: [fileID]
});

if (result.fileList && result.fileList.length > 0) {
const deleteResult = result.fileList[0];

if (deleteResult.code === 'SUCCESS') {
return {
success: true,
message: 'File deleted successfully'
};
} else {
throw new Error(deleteResult.message || 'Deletion failed');
}
}

} catch (error) {
console.error('Failed to delete file:', error);
return {
success: false,
error: error.message
};
}
};

HTTP API Access

Basic Configuration

Using HTTP API to access Cloud storage requires prior authentication configuration:

const axios = require('axios');

// HTTP API Basic Configuration
const config = {
baseURL: 'https://your-env-id.api.tcloudbasegateway.com',
accessToken: 'your-access-token' // access token obtained through authentication
};

// General method for sending HTTP requests
async function callStorageAPI(endpoint, method = 'POST', data = null) {
try {
const response = await axios({
method: method,
url: `${config.baseURL}${endpoint}`,
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${config.accessToken}`
},
data: data
});

return response.data;
} catch (error) {
console.error('HTTP API invocation failed:', error);
throw error;
}
}

Obtain Object Upload Information

exports.main = async (event, context) => {
try {
const { objectIds } = event; // array of object IDs

// Construct the request body
const requestBody = objectIds.map(objectId => ({
objectId: objectId
}));

// Call the Obtain Object Upload Information API
const result = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', requestBody);

// Handle the response result
const uploadInfos = result.map(item => {
if (item.code) {
// Failure case
return {
objectId: item.objectId,
success: false,
error: {
code: item.code,
message: item.message
}
};
} else {
// Success case
return {
objectId: item.objectId,
success: true,
uploadUrl: item.uploadUrl,
downloadUrl: item.downloadUrl,
downloadUrlEncoded: item.downloadUrlEncoded,
token: item.token,
authorization: item.authorization,
cloudObjectMeta: item.cloudObjectMeta,
cloudObjectId: item.cloudObjectId
};
}
});

return {
success: true,
uploadInfos: uploadInfos
};

} catch (error) {
console.error('Failed to obtain upload information:', error);
return {
success: false,
error: error.message
};
}
};

File Download API

exports.main = async (event, context) => {
try {
const { objectId } = event;

// Obtain upload information (including download links)
const result = await callStorageAPI('/v1/storages/get-objects-upload-info', 'POST', [
{ objectId: objectId }
]);

if (result.length === 0 || result[0].code) {
throw new Error(result[0]?.message || 'Failed to obtain file information');
}

const fileInfo = result[0];

return {
success: true,
objectId: objectId,
downloadUrl: fileInfo.downloadUrl,
downloadUrlEncoded: fileInfo.downloadUrlEncoded,
cloudObjectId: fileInfo.cloudObjectId
};

} catch (error) {
console.error('Failed to obtain download link:', error);
return {
success: false,
error: error.message
};
}
};

File Deletion API

// Note: The Obtain Upload Information API in the official documentation is primarily used for uploads; deletion operations require the use of other APIs.
// Here provides a generic deletion method example

exports.main = async (event, context) => {
try {
const { fileIds } = event; // Cloud storage file ID array

// Call the file deletion API (adjust the specific endpoint according to the actual API documentation)
const result = await callStorageAPI('/v1/storages/delete-files', 'POST', {
fileIds: fileIds
});

return {
success: true,
result: result,
message: 'File deletion request sent'
};

} catch (error) {
console.error('Failed to delete file:', error);
return {
success: false,
error: error.message
};
}
};

File Operations

Image Processing

exports.main = async (event, context) => {
try {
const { fileID, width, height, quality } = event;

// Obtain raw download link
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});

const originalURL = urlResult.fileList[0].download_url;

// Construct image processing parameters
const imageParams = [];
if (width) imageParams.push(`w_${width}`);
if (height) imageParams.push(`h_${height}`);
if (quality) imageParams.push(`q_${quality}`);

// Generate processed image URL
const processedURL = `${originalURL}?imageView2/2/${imageParams.join('/')}`;

return {
success: true,
originalURL: originalURL,
processedURL: processedURL,
params: imageParams.join('/')
};

} catch (error) {
console.error('Image processing failed:', error);
return {
success: false,
error: error.message
};
}
};

File Information Query

exports.main = async (event, context) => {
try {
const { fileID } = event;

// Obtain file information
const urlResult = await storage.getFileDownloadURL({
fileList: [fileID]
});

if (urlResult.fileList && urlResult.fileList.length > 0) {
const fileInfo = urlResult.fileList[0];

// Obtain detailed file information
const response = await axios.head(fileInfo.download_url);

return {
success: true,
fileID: fileID,
downloadURL: fileInfo.download_url,
size: response.headers['content-length'],
contentType: response.headers['content-type'],
lastModified: response.headers['last-modified'],
etag: response.headers['etag']
};
} else {
throw new Error('File does not exist');
}

} catch (error) {
console.error('Failed to obtain file information:', error);
return {
success: false,
error: error.message
};
}
};

File List Query

exports.main = async (event, context) => {
try {
const { prefix, limit, offset } = event;

// Note: The Cloud storage SDK does not directly support file list queries.
// Requires implementation via the COS SDK or maintaining a file index

// Here we provide an example of maintaining a file index via a database.
const db = app.database();
const collection = db.collection('file_index');

let query = collection;

// Filter by prefix
if (prefix) {
query = query.where({
cloudPath: db.RegExp({
regexp: `^${prefix}`,
options: 'i'
})
});
}

// Pagination query
const result = await query
.skip(offset || 0)
.limit(limit || 20)
.orderBy('uploadTime', 'desc')
.get();

return {
success: true,
files: result.data,
total: result.data.length,
hasMore: result.data.length === (limit || 20)
};

} catch (error) {
console.error('File list query failed:', error);
return {
success: false,
error: error.message
};
}
};

Advanced Features

Permission Management

exports.main = async (event, context) => {
try {
const { fileID } = event;

// Obtain the download link (permanently valid)
const result = await storage.getFileDownloadURL({
fileList: [{
fileID: fileID,
maxAge: 7200 // 2-hour valid period
}]
});

if (result.fileList && result.fileList.length > 0) {
const fileInfo = result.fileList[0];

return {
success: true,
fileID: fileID,
publicURL: fileInfo.download_url,
maxAge: fileInfo.maxAge
};
}

} catch (error) {
console.error('Failed to set public read permission:', error);
return {
success: false,
error: error.message
};
}
};

CDN Acceleration

exports.main = async (event, context) => {
try {
const { fileID, enableCDN } = event;

// Obtain file download link
const result = await storage.getFileDownloadURL({
fileList: [fileID]
});

let downloadURL = result.fileList[0].download_url;

if (enableCDN) {
// Replace with CDN domain name (needs to be configured in the TCB console)
downloadURL = downloadURL.replace(
/https:\/\/[^.]+\.tcb\.qcloud\.la/,
'https://your-cdn-domain.com'
);
}

return {
success: true,
originalURL: result.fileList[0].download_url,
cdnURL: downloadURL,
cdnEnabled: enableCDN
};

} catch (error) {
console.error('CDN configuration failed:', error);
return {
success: false,
error: error.message
};
}
};

File Sync

exports.main = async (event, context) => {
try {
const { sourceFileID, targetPath, targetEnv } = event;

// Download source file
const downloadResult = await storage.getFileDownloadURL({
fileList: [sourceFileID]
});

const downloadURL = downloadResult.fileList[0].download_url;

// Download file content
const response = await axios({
method: 'GET',
url: downloadURL,
responseType: 'arraybuffer'
});

// Initialize the target environment
const targetApp = tcb.init({
env: targetEnv,
secretId: process.env.SECRET_ID,
secretKey: process.env.SECRET_KEY
});

const targetStorage = targetApp.storage();

// Upload to the target environment
const uploadResult = await targetStorage.uploadFile({
cloudPath: targetPath,
fileContent: Buffer.from(response.data)
});

return {
success: true,
sourceFileID: sourceFileID,
targetFileID: uploadResult.fileID,
targetURL: uploadResult.download_url
};

} catch (error) {
console.error('File synchronization failed:', error);
return {
success: false,
error: error.message
};
}
};

Scheduled Cleanup

exports.main = async (event, context) => {
try {
const { daysToKeep = 30, pathPrefix = 'temp/' } = event;

// Obtain the list of files to be cleaned (queried from the database index)
const db = app.database();
const collection = db.collection('file_index');

const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - daysToKeep);

const result = await collection
.where({
cloudPath: db.RegExp({
regexp: `^${pathPrefix}`,
options: 'i'
}),
uploadTime: db.command.lt(cutoffDate)
})
.get();

const filesToDelete = result.data;

if (filesToDelete.length === 0) {
return {
success: true,
message: 'No files to clean',
deletedCount: 0
};
}

// Batch deleting files
const fileIDs = filesToDelete.map(file => file.fileID);
const deleteResult = await storage.deleteFile({
fileList: fileIDs
});

// Delete records from the database
const deletePromises = filesToDelete.map(file =>
collection.doc(file._id).remove()
);

await Promise.all(deletePromises);

return {
success: true,
deletedCount: fileIDs.length,
message: `Successfully cleaned up ${fileIDs.length} expired files`
};

} catch (error) {
console.error('Scheduled cleanup failed:', error);
return {
success: false,
error: error.message
};
}
};

Best Practices

File Naming Convention

// Generate canonical file path
function generateFilePath(category, userId, originalName) {
const timestamp = Date.now();
const randomStr = Math.random().toString(36).substr(2, 8);
const ext = path.extname(originalName);
const baseName = path.basename(originalName, ext);

// Sanitize special characters in filenames
const cleanName = baseName.replace(/[^a-zA-Z0-9\u4e00-\u9fa5]/g, '_');

return `${category}/${userId}/${timestamp}_${randomStr}_${cleanName}${ext}`;
}

// Usage example
const filePath = generateFilePath('avatars', 'user123', 'User Avatar.jpg');
// Output: avatars/user123/1640995200000_abc12345_User Avatar.jpg

File Type Validation

// File Type Validation
function validateFileType(fileName, allowedTypes) {
const ext = path.extname(fileName).toLowerCase();
const mimeTypes = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.pdf': 'application/pdf',
'.doc': 'application/msword',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
};

return allowedTypes.includes(ext) && mimeTypes[ext];
}

// File Size Validation
function validateFileSize(fileContent, maxSizeMB) {
const buffer = Buffer.from(fileContent, 'base64');
const sizeMB = buffer.length / (1024 * 1024);

return sizeMB <= maxSizeMB;
}

// Usage example
exports.main = async (event, context) => {
const { fileName, fileContent } = event;

// Validate file type
const allowedTypes = ['.jpg', '.jpeg', '.png', '.gif'];
const contentType = validateFileType(fileName, allowedTypes);

if (!contentType) {
return {
success: false,
error: 'Unsupported file type'
};
}

// Validate file size (Limit: 5MB)
if (!validateFileSize(fileContent, 5)) {
return {
success: false,
error: 'File size exceeds the limit (5MB)'
};
}

// Continue upload logic...
};

Error Handling and Retry

// File operations with retry mechanism
async function executeWithRetry(operation, maxRetries = 3) {
let lastError;

for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error) {
lastError = error;

// Determine whether the error is retryable
if (isRetryableError(error) && i < maxRetries - 1) {
const delay = Math.pow(2, i) * 1000; // Exponential backoff
await new Promise(resolve => setTimeout(resolve, delay));
continue;
}

throw error;
}
}

throw lastError;
}

function isRetryableError(error) {
const retryableCodes = [
'ECONNRESET',
'ETIMEDOUT',
'ENOTFOUND',
'NETWORK_ERROR'
];

return retryableCodes.includes(error.code) ||
error.message.includes('timeout') ||
error.message.includes('network');
}

// Usage example
exports.main = async (event, context) => {
try {
const result = await executeWithRetry(async () => {
return await storage.uploadFile({
cloudPath: event.path,
fileContent: Buffer.from(event.content, 'base64')
});
});

return { success: true, result };
} catch (error) {
console.error('File operation finally failed:', error);
return { success: false, error: error.message };
}
};

Performance Optimization

// File upload performance optimization
class FileUploadOptimizer {
constructor() {
this.uploadQueue = [];
this.processing = false;
this.maxConcurrent = 3;
}

async addUpload(uploadTask) {
return new Promise((resolve, reject) => {
this.uploadQueue.push({ task: uploadTask, resolve, reject });
this.processQueue();
});
}

async processQueue() {
if (this.processing || this.uploadQueue.length === 0) {
return;
}

this.processing = true;

while (this.uploadQueue.length > 0) {
const batch = this.uploadQueue.splice(0, this.maxConcurrent);

const promises = batch.map(async ({ task, resolve, reject }) => {
try {
const result = await task();
resolve(result);
} catch (error) {
reject(error);
}
});

await Promise.all(promises);
}

this.processing = false;
}
}

// Global Upload Optimizer
const uploadOptimizer = new FileUploadOptimizer();

exports.main = async (event, context) => {
const { files } = event; // Multiple files

try {
const uploadTasks = files.map(file => () =>
storage.uploadFile({
cloudPath: generateFilePath('uploads', context.requestId, file.name),
fileContent: Buffer.from(file.content, 'base64')
})
);

const results = await Promise.all(
uploadTasks.map(task => uploadOptimizer.addUpload(task))
);

return {
success: true,
files: results.map(result => ({
fileID: result.fileID,
downloadURL: result.download_url
}))
};

} catch (error) {
console.error('Batch upload failed:', error);
return {
success: false,
error: error.message
};
}
};
Tip
  • It is recommended to set different storage paths for different types of files
  • Using CDN acceleration can significantly improve file access speed
  • Regularly clean up temporary and expired files to save storage costs
  • It is recommended to set up backup and version control for important files
Warning
  • Be sure to validate the type and size before uploading files
  • Sensitive files should have appropriate access permissions configured
  • Avoid using special characters in filenames
  • It is recommended to configure file access monitoring and alarms in the production environment