2018-11-25 23:21:52 +00:00
|
|
|
#!/bin/bash
|
|
|
|
|
|
|
|
# Checking arguments
|
|
|
|
# Help line
|
2019-02-06 18:02:13 +00:00
|
|
|
|
|
|
|
torrents_csv="`pwd`/../torrents.csv"
|
|
|
|
scanned_out="`pwd`/../infohashes_scanned.txt"
|
2019-02-07 16:41:15 +00:00
|
|
|
tmp_torrent_dir="`pwd`/../tmp_torrents-$RANDOM"
|
2019-06-25 21:28:44 +00:00
|
|
|
torrent_files_csv="`pwd`/../torrent_files.csv"
|
2018-12-21 21:31:21 +00:00
|
|
|
touch $scanned_out
|
|
|
|
|
2018-11-25 23:21:52 +00:00
|
|
|
help="Run ./scan_torrents.sh [TORRENTS_DIR] \nor goto https://gitlab.com/dessalines/torrents.csv for more help"
|
|
|
|
if [ "$1" == "-h" ] || [ -z "$1" ]; then
|
|
|
|
echo -e $help
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
torrents_dir="$1"
|
|
|
|
echo "Torrents dir=$torrents_dir"
|
|
|
|
|
|
|
|
# Check dependencies
|
|
|
|
|
|
|
|
if command -v "torrent-tracker-health" >/dev/null 2>&1 ; then
|
|
|
|
echo "torrent-tracker-health installed."
|
|
|
|
else
|
|
|
|
echo -e "Installing torrent-tracker-health:\nnpm i -g dessalines/torrent-tracker-health \nhttps://github.com/dessalines/torrent-tracker-health\n"
|
|
|
|
npm i -g install dessalines/torrent-tracker-health
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Loop over all torrents
|
|
|
|
pushd $torrents_dir
|
2019-01-24 22:45:18 +00:00
|
|
|
# Copy the unscanned torrent files to a temp dir
|
|
|
|
mkdir $tmp_torrent_dir
|
2019-02-10 00:30:17 +00:00
|
|
|
# TODO need to find a better way to do this for huge dirs
|
|
|
|
find `pwd` -name "*.torrent" | grep -vFf $scanned_out | while read torrent_file ; do
|
2019-01-24 22:45:18 +00:00
|
|
|
cp "$torrent_file" "$tmp_torrent_dir"
|
|
|
|
done
|
2018-11-25 23:21:52 +00:00
|
|
|
|
2019-01-28 23:01:03 +00:00
|
|
|
# Split these into many directories ( since torrent-tracker-health can't do too many full size torrents)
|
|
|
|
cd $tmp_torrent_dir
|
|
|
|
# i=1;while read l;do mkdir $i;mv $l $((i++));done< <(ls|xargs -n100)
|
|
|
|
ls|parallel -n100 mkdir {#}\;mv {} {#}
|
2019-01-26 08:25:07 +00:00
|
|
|
|
2019-01-28 23:01:03 +00:00
|
|
|
for tmp_torrent_dir_sub in *; do
|
2019-02-07 16:41:15 +00:00
|
|
|
echo "sub dir:$tmp_torrent_dir/$tmp_torrent_dir_sub"
|
2019-01-28 23:01:03 +00:00
|
|
|
find $tmp_torrent_dir_sub -type f -exec basename {} .torrent \; > names.out
|
2019-02-06 18:02:13 +00:00
|
|
|
|
2019-01-28 23:01:03 +00:00
|
|
|
# Delete null torrents from the temp dir
|
|
|
|
find $tmp_torrent_dir_sub -name "*.torrent" -size -2k -delete
|
2018-11-25 23:53:55 +00:00
|
|
|
|
2019-01-28 23:01:03 +00:00
|
|
|
if [ -z "$(ls -A $tmp_torrent_dir_sub)" ]; then
|
|
|
|
echo "No new torrents."
|
2019-01-26 08:25:07 +00:00
|
|
|
else
|
2019-01-28 23:01:03 +00:00
|
|
|
# Scrape it
|
|
|
|
torrent-tracker-health --torrent "$tmp_torrent_dir_sub"/ > health.out
|
|
|
|
|
|
|
|
# Convert the json results to csv format
|
2019-02-07 16:41:15 +00:00
|
|
|
# Created is sometimes null, and a weird date
|
|
|
|
results=$(jq -r '.results[] | select (.created != null ) | [.hash, .name, .length, (.created | .[0:16] | strptime("%Y-%m-%dT%H:%M") | mktime), .seeders, .leechers, .completed, (now | floor)] | join(";")' health.out)
|
2019-01-28 23:01:03 +00:00
|
|
|
# If there are no results
|
|
|
|
if [ -z "$results" ]; then
|
|
|
|
echo "There were no results for some reason."
|
2019-02-07 16:41:15 +00:00
|
|
|
cat health.out
|
2019-01-28 23:01:03 +00:00
|
|
|
else
|
|
|
|
echo "Torrents.csv updated with new torrents."
|
|
|
|
# Update the torrents.csv and infohashes scanned file
|
|
|
|
echo -e "$results" >> $torrents_csv
|
|
|
|
cat names.out >> $scanned_out
|
|
|
|
fi
|
2019-01-26 08:25:07 +00:00
|
|
|
fi
|
2019-01-28 23:01:03 +00:00
|
|
|
done
|
2018-11-26 17:08:18 +00:00
|
|
|
|
2019-01-28 23:01:03 +00:00
|
|
|
popd
|
|
|
|
. prune.sh
|
2019-01-24 22:45:18 +00:00
|
|
|
# Remove the temp dir
|
|
|
|
rm -rf "$tmp_torrent_dir"
|
2018-11-25 23:21:52 +00:00
|
|
|
|
2019-02-06 18:02:13 +00:00
|
|
|
# Scan the torrent dir for new files, and add them
|
2019-06-25 21:28:44 +00:00
|
|
|
node --max-old-space-size=8096 scan_torrent_files.js --dir "$torrents_dir"
|
2018-11-25 23:21:52 +00:00
|
|
|
|