torrents.csv/scripts/scan_torrent_files.js

74 lines
2.2 KiB
JavaScript
Raw Normal View History

// jq -r 'to_entries[] | {hash: .key, val: .value[]} | {hash: .hash, i: .val.i, p: .val.p, l: .val.l}' torrent_files.json
// jq -r 'to_entries[] | {hash: .key, val: .value[]} | [.hash, .val.i, .val.p, .val.l] | join(";")' torrent_files.json
var fs = require('fs'),
path = require('path'),
readTorrent = require('read-torrent'),
argv = require('minimist')(process.argv.slice(2));
var torrentFiles = {};
var jsonFile = '../torrent_files.json';
main();
async function main() {
await fillTorrentFiles();
await scanFolder();
writeFile();
}
async function fillTorrentFiles() {
if (fs.existsSync(jsonFile)) {
var fileContents = await fs.promises.readFile(jsonFile, 'utf8');
torrentFiles = JSON.parse(fileContents);
}
}
async function scanFolder() {
console.log('Scanning dir: ' + argv.dir + '...');
var files = fs.readdirSync(argv.dir).filter(f => {
var f = f.split('.');
var ext = f[1];
var hash = f[0];
return (ext == 'torrent' && !Object.keys(torrentFiles).includes(hash));
});
for (const file of files) {
var fullPath = argv.dir + '/' + file;
console.log(`Scanning File ${fullPath}`);
var torrent = await read(fullPath).catch(e => console.log('Read error'));
torrentFiles = { ...torrentFiles, ...torrent }; // concat them
};
console.log('Done scanning.')
}
function writeFile() {
torrentFiles = Object.keys(torrentFiles).sort().reduce((r, k) => (r[k] = torrentFiles[k], r), {});
fs.writeFileSync(jsonFile, JSON.stringify(torrentFiles));
console.log(`${jsonFile} written.`);
}
function read(uri, options) {
return new Promise((resolve, reject) => {
readTorrent(uri, (err, info) => {
if (!err) {
// Removing some extra fields from files
if (info.files) {
info.files.forEach((f, i) => {
f.i = i;
f.p = f.path;
f.l = f.length;
delete f.name;
delete f.offset;
delete f.path;
delete f.length;
});
}
resolve({ [info.infoHash]: info.files });
} else {
console.error('Error in read-torrent: ' + err.message + ' for torrent uri: ' + uri);
reject(err);
}
});
});
}