2019-02-06 01:23:45 +00:00
|
|
|
var fs = require('fs'),
|
|
|
|
path = require('path'),
|
|
|
|
readTorrent = require('read-torrent'),
|
|
|
|
argv = require('minimist')(process.argv.slice(2));
|
|
|
|
|
|
|
|
var torrentFiles = {};
|
2019-02-06 18:02:13 +00:00
|
|
|
var torrentCsvHashes = new Set();
|
|
|
|
|
2019-02-06 01:23:45 +00:00
|
|
|
var jsonFile = '../torrent_files.json';
|
2019-02-06 18:02:13 +00:00
|
|
|
var torrentsCsvFile = '../torrents.csv';
|
2019-02-06 18:34:20 +00:00
|
|
|
console.log(`Scanning torrent files from ${argv.dir} into ${jsonFile} ...`);
|
2019-02-06 01:23:45 +00:00
|
|
|
main();
|
|
|
|
|
|
|
|
async function main() {
|
|
|
|
await fillTorrentFiles();
|
2019-02-06 18:02:13 +00:00
|
|
|
await fillTorrentCsvHashes();
|
2019-02-06 01:23:45 +00:00
|
|
|
await scanFolder();
|
|
|
|
writeFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
async function fillTorrentFiles() {
|
|
|
|
if (fs.existsSync(jsonFile)) {
|
|
|
|
var fileContents = await fs.promises.readFile(jsonFile, 'utf8');
|
|
|
|
torrentFiles = JSON.parse(fileContents);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-06 18:02:13 +00:00
|
|
|
async function fillTorrentCsvHashes() {
|
|
|
|
var fileContents = await fs.promises.readFile(torrentsCsvFile, 'utf8');
|
|
|
|
var lines = fileContents.split('\n');
|
|
|
|
for (const line of lines) {
|
|
|
|
var hash = line.split(';')[0];
|
|
|
|
torrentCsvHashes.add(hash);
|
|
|
|
}
|
|
|
|
torrentCsvHashes.delete('infohash');
|
|
|
|
}
|
|
|
|
|
2019-02-06 01:23:45 +00:00
|
|
|
async function scanFolder() {
|
|
|
|
console.log('Scanning dir: ' + argv.dir + '...');
|
2019-02-07 01:48:49 +00:00
|
|
|
var fileHashes = new Set(Object.keys(torrentFiles));
|
|
|
|
|
2019-02-06 01:23:45 +00:00
|
|
|
var files = fs.readdirSync(argv.dir).filter(f => {
|
2019-02-07 01:48:49 +00:00
|
|
|
var sp = f.split('.');
|
|
|
|
var ext = sp[1];
|
|
|
|
var hash = sp[0];
|
|
|
|
var fullPath = argv.dir + '/' + f;
|
|
|
|
// It must be a torrent file,
|
|
|
|
// NOT in the torrent_files.json
|
|
|
|
// must be in the CSV file
|
|
|
|
// must have a file size
|
2019-02-06 18:02:13 +00:00
|
|
|
return (ext == 'torrent' &&
|
2019-02-07 01:48:49 +00:00
|
|
|
!fileHashes.has(hash) &&
|
|
|
|
torrentCsvHashes.has(hash) &&
|
|
|
|
getFilesizeInBytes(fullPath) > 0);
|
2019-02-06 01:23:45 +00:00
|
|
|
});
|
|
|
|
for (const file of files) {
|
|
|
|
var fullPath = argv.dir + '/' + file;
|
|
|
|
console.log(`Scanning File ${fullPath}`);
|
|
|
|
var torrent = await read(fullPath).catch(e => console.log('Read error'));
|
|
|
|
torrentFiles = { ...torrentFiles, ...torrent }; // concat them
|
|
|
|
};
|
2019-02-07 03:44:07 +00:00
|
|
|
console.log('Done scanning.');
|
2019-02-06 01:23:45 +00:00
|
|
|
}
|
|
|
|
|
2019-02-07 01:48:49 +00:00
|
|
|
function getFilesizeInBytes(filename) {
|
2019-02-07 03:44:07 +00:00
|
|
|
var stats = fs.statSync(filename);
|
|
|
|
var fileSizeInBytes = stats["size"];
|
|
|
|
return fileSizeInBytes;
|
2019-02-07 01:48:49 +00:00
|
|
|
}
|
|
|
|
|
2019-02-06 01:23:45 +00:00
|
|
|
function writeFile() {
|
2019-02-06 18:02:13 +00:00
|
|
|
torrentFiles = Object.keys(torrentFiles)
|
|
|
|
.sort()
|
|
|
|
.filter(hash => torrentCsvHashes.has(hash))
|
|
|
|
.reduce((r, k) => (r[k] = torrentFiles[k], r), {});
|
2019-02-07 01:48:49 +00:00
|
|
|
fs.writeFileSync(jsonFile, JSON.stringify(torrentFiles, null, 2));
|
2019-02-06 01:23:45 +00:00
|
|
|
console.log(`${jsonFile} written.`);
|
|
|
|
}
|
|
|
|
|
|
|
|
function read(uri, options) {
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
readTorrent(uri, (err, info) => {
|
|
|
|
if (!err) {
|
|
|
|
// Removing some extra fields from files
|
|
|
|
if (info.files) {
|
|
|
|
info.files.forEach((f, i) => {
|
|
|
|
f.i = i;
|
|
|
|
f.p = f.path;
|
|
|
|
f.l = f.length;
|
|
|
|
delete f.name;
|
|
|
|
delete f.offset;
|
|
|
|
delete f.path;
|
|
|
|
delete f.length;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
resolve({ [info.infoHash]: info.files });
|
|
|
|
} else {
|
|
|
|
console.error('Error in read-torrent: ' + err.message + ' for torrent uri: ' + uri);
|
|
|
|
reject(err);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|