跳到主要内容

Archive old documents from a room to an archive folder

This example scans a DocSpace room (or folder), identifies files older than a defined threshold, and moves them to an archive folder in batches.

Before you start

  1. Replace https://yourportal.onlyoffice.com and YOUR_API_KEY with your actual DocSpace portal URL and API key. Ensure you have the necessary data and permissions to perform these operations.
  2. Before you can make requests to the API, you need to authenticate. Check out the Personal access tokens page to learn how to obtain and use access tokens.
Full example
// Set API base URL
const API_HOST = process.env.DOCSPACE_API_HOST; // Set DOCSPACE_API_HOST in env (recommended). For quick tests you can temporarily paste your portal URL here.
const API_KEY = process.env.DOCSPACE_API_KEY; // Set DOCSPACE_API_KEY in env (recommended). For quick tests you can temporarily paste token here.

// Headers with API key for authentication
const HEADERS = {
Accept: 'application/json',
Authorization: `Bearer ${API_KEY}`,
'Content-Type': 'application/json',
};

// IDs and thresholds
const ROOM_ROOT_FOLDER_ID = 539564; // replace with a real folder/room root ID
const ARCHIVE_FOLDER_ID = 341029; // replace with a real archive folder ID
const MAX_FILE_AGE_MONTHS = 1;

// Convert months to days (simple approximation)
const MAX_FILE_AGE_DAYS = MAX_FILE_AGE_MONTHS * 30;

function toNumber(value: any) {
const n = Number(value);
return Number.isFinite(n) ? n : null;
}

// Step 1: Retrieve folder or room contents
async function getFolderContents(folderId: string) {
const url = `${API_HOST}/api/2.0/files/folder/${folderId}`;
const res = await fetch(url, { method: 'GET', headers: HEADERS });

if (!res.ok) {
const t = await res.text();
console.log(
`Folder contents retrieval failed for ID ${folderId}. ` +
`Status: ${res.status}, Message: ${t}`
);
return null;
}

const data = await res.json();
const contents = data?.response ?? null;
if (!contents || typeof contents !== 'object') {
console.log('Unexpected folder contents format.');
return null;
}

const files = Array.isArray(contents.files) ? contents.files : [];
const folders = Array.isArray(contents.folders) ? contents.folders : [];

return { files, folders };
}

function pickFileDateString(fileEntry: any) {
return (
fileEntry.modified ||
fileEntry.modifiedOn ||
fileEntry.updated ||
fileEntry.created ||
fileEntry.createdOn ||
null
);
}

function parseDate(dateStr: any) {
if (!dateStr) return null;

const s = String(dateStr).slice(0, 10);
const d = new Date(s);

if (Number.isNaN(d.getTime())) return null;
return d;
}

function getFileAgeDays(fileEntry: any) {
const dateStr = pickFileDateString(fileEntry);
const d = parseDate(dateStr);
if (!d) return null;

const today = new Date();
const diffMs = today.getTime() - d.getTime();
return Math.floor(diffMs / (1000 * 60 * 60 * 24));
}

// Step 2: Recursively scan folder tree and collect files
async function walkFolderTree(folderId: string, pathPrefix = '/') {
const contents = await getFolderContents(folderId);
if (!contents) return [];

const result: any[] = [];
const currentPath = pathPrefix || '/';

for (const fileEntry of contents.files) {
const title = fileEntry.title || 'Untitled';
const ageDays = getFileAgeDays(fileEntry);

result.push({
id: fileEntry.id,
title: title,
path: `${currentPath}${title}`,
ageDays: ageDays,
});
}

for (const folderEntry of contents.folders) {
const subFolderId = folderEntry.id;
const folderTitle = folderEntry.title || 'Folder';
const subPath = `${currentPath}${folderTitle}/`;

const subFiles = await walkFolderTree(String(subFolderId), subPath);
result.push(...subFiles);
}

return result;
}

// Step 3: Filter old files
function filterOldFiles(files: any[]) {
const oldFiles: any[] = [];

for (const f of files) {
const ageDays = toNumber(f.ageDays);
if (ageDays != null && ageDays >= MAX_FILE_AGE_DAYS) {
oldFiles.push(f);
}
}

return oldFiles;
}

// Step 4: Move files to archive (in batches)
async function moveFilesToArchive(fileIds: string[]) {
if (!fileIds || fileIds.length === 0) {
console.log('No files to move to archive.');
return true;
}

const url = `${API_HOST}/api/2.0/files/fileops/move`;
const batchSize = 50;

for (let i = 0; i < fileIds.length; i += batchSize) {
const batch = fileIds.slice(i, i + batchSize);

const payload = {
fileIds: batch,
destFolderId: ARCHIVE_FOLDER_ID,
deleteAfter: true,
content: true,
toFillOut: false,
};

const res = await fetch(url, { method: 'PUT', headers: HEADERS, body: JSON.stringify(payload) });

if (res.ok) {
console.log(`Moved files to archive: ${batch.join(', ')}`);
} else {
const t = await res.text();
console.log(`Failed to move files batch. Status: ${res.status}, Message: ${t}`);
return false;
}
}

return true;
}

// Optional: send report to admin (placeholder)
function sendReportToAdmin(reportText: string) {
console.log('\n[ADMIN REPORT]');
console.log(reportText);
}

function buildReport(allFiles: any[], oldFiles: any[]) {
const header = 'Old documents archive report\n';
const summary =
`Total files scanned: ${allFiles.length}\n` +
`Old files (>= ${MAX_FILE_AGE_MONTHS} months): ${oldFiles.length}\n`;

let details = '\nList of archived files (first 50):\n';
const sample = oldFiles.slice(0, 50);

if (sample.length === 0) {
details += 'No files were archived.\n';
} else {
for (const f of sample) {
const ageDays = f.ageDays != null ? `${f.ageDays} days` : 'unknown age';
details += `- ${f.path} (age: ${ageDays})\n`;
}
}

return header + summary + details;
}

// Main workflow
(async () => {
try {
console.log('Scanning room/folder tree for old documents...');
const allFiles = await walkFolderTree(String(ROOM_ROOT_FOLDER_ID));
console.log(`Collected ${allFiles.length} files in total.`);

const oldFiles = filterOldFiles(allFiles);
console.log(`Found ${oldFiles.length} files older than ${MAX_FILE_AGE_MONTHS} months.`);

if (oldFiles.length === 0) {
console.log('Nothing to archive. Exiting.');
return;
}

const fileIds = oldFiles
.map((f) => f.id)
.filter((id) => id != null)
.map((id) => String(id));

console.log('Moving old files to archive folder...');
const moved = await moveFilesToArchive(fileIds);
if (!moved) {
console.log('Archiving failed. Some batches could not be moved.');
process.exitCode = 1;
return;
}

const reportText = buildReport(allFiles, oldFiles);
sendReportToAdmin(reportText);

console.log('\nArchiving completed.');
} catch (err: any) {
console.error(err?.message || err);
}
})();

Step 1: Retrieve folder contents

A GET request is sent to /api/2.0/files/folder/:folderId to retrieve folder contents.

The response includes:

  • files: list of files in the folder
  • folders: list of subfolders to scan recursively
async function getFolderContents(folderId: string) {
const url = `${API_HOST}/api/2.0/files/folder/${folderId}`;
const res = await fetch(url, { method: 'GET', headers: HEADERS });

if (!res.ok) {
const t = await res.text();
console.log(
`Folder contents retrieval failed for ID ${folderId}. ` +
`Status: ${res.status}, Message: ${t}`
);
return null;
}

const data = await res.json();
const contents = data?.response ?? null;
if (!contents || typeof contents !== 'object') {
console.log('Unexpected folder contents format.');
return null;
}

const files = Array.isArray(contents.files) ? contents.files : [];
const folders = Array.isArray(contents.folders) ? contents.folders : [];

return { files, folders };
}

Step 2: Scan folders recursively

The script walks through the folder tree starting from ROOM_ROOT_FOLDER_ID and collects all files with:

  • file ID — used later to identify the file in DocSpace API operations
  • title — the file name as displayed in the DocSpace interface
  • full path — the file location inside the room, used for reporting
  • age in days — calculated from available date fields such as creation or last update time
async function walkFolderTree(folderId: string, pathPrefix = '/') {
const contents = await getFolderContents(folderId);
if (!contents) return [];

const result: any[] = [];
const currentPath = pathPrefix || '/';

for (const fileEntry of contents.files) {
const title = fileEntry.title || 'Untitled';
const ageDays = getFileAgeDays(fileEntry);

result.push({
id: fileEntry.id,
title: title,
path: `${currentPath}${title}`,
ageDays: ageDays,
});
}

for (const folderEntry of contents.folders) {
const subFolderId = folderEntry.id;
const folderTitle = folderEntry.title || 'Folder';
const subPath = `${currentPath}${folderTitle}/`;

const subFiles = await walkFolderTree(String(subFolderId), subPath);
result.push(...subFiles);
}

return result;
}

Step 3: Filter old files

The script compares each file age with the threshold:

  • MAX_FILE_AGE_DAYS = MAX_FILE_AGE_MONTHS * 30

Files older than the threshold are selected for archiving.

function filterOldFiles(files: any[]) {
const oldFiles: any[] = [];

for (const f of files) {
const ageDays = toNumber(f.ageDays);
if (ageDays != null && ageDays >= MAX_FILE_AGE_DAYS) {
oldFiles.push(f);
}
}

return oldFiles;
}

Step 4: Move old files to the archive folder

A PUT request is sent to /api/2.0/files/fileops/move. The script moves files in batches (50 IDs per request) to ARCHIVE_FOLDER_ID.

async function moveFilesToArchive(fileIds: string[]) {
if (!fileIds || fileIds.length === 0) {
console.log('No files to move to archive.');
return true;
}

const url = `${API_HOST}/api/2.0/files/fileops/move`;
const batchSize = 50;

for (let i = 0; i < fileIds.length; i += batchSize) {
const batch = fileIds.slice(i, i + batchSize);

const payload = {
fileIds: batch,
destFolderId: ARCHIVE_FOLDER_ID,
deleteAfter: true,
content: true,
toFillOut: false,
};

const res = await fetch(url, { method: 'PUT', headers: HEADERS, body: JSON.stringify(payload) });

if (res.ok) {
console.log(`Moved files to archive: ${batch.join(', ')}`);
} else {
const t = await res.text();
console.log(`Failed to move files batch. Status: ${res.status}, Message: ${t}`);
return false;
}
}

return true;
}