Adding a hash file fetcher.
This commit is contained in:
parent
993e6d4f08
commit
f77da92e5c
|
@ -1,3 +1,5 @@
|
||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "adler32"
|
name = "adler32"
|
||||||
version = "1.0.3"
|
version = "1.0.3"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import cloudscraper
|
import cfscrape
|
||||||
request = "GET / HTTP/1.1\r\n"
|
request = "GET / HTTP/1.1\r\n"
|
||||||
|
|
||||||
cookie_value, user_agent = cloudscraper.get_cookie_string("https://itorrents.org/torrent/B415C913643E5FF49FE37D304BBB5E6E11AD5101.torrent")
|
cookie_value, user_agent = cfscrape.get_cookie_string("https://itorrents.org/torrent/B415C913643E5FF49FE37D304BBB5E6E11AD5101.torrent")
|
||||||
request += "Cookie: %s\r\nUser-Agent: %s\r\n" % (cookie_value, user_agent)
|
request += "Cookie: %s\r\nUser-Agent: %s\r\n" % (cookie_value, user_agent)
|
||||||
# cookie = "Cookie: %s" % (cookie_value)
|
# cookie = "Cookie: %s" % (cookie_value)
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,8 @@ use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::{thread, time};
|
use std::{thread, time};
|
||||||
|
use std::io::{BufRead, BufReader};
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
static mut COOKIE: &str = "";
|
static mut COOKIE: &str = "";
|
||||||
static mut USER_AGENT: &str = "";
|
static mut USER_AGENT: &str = "";
|
||||||
|
@ -21,38 +23,67 @@ fn main() {
|
||||||
.about("Fetches new torrent files from various sites.")
|
.about("Fetches new torrent files from various sites.")
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("TORRENT_SAVE_DIR")
|
Arg::with_name("TORRENT_SAVE_DIR")
|
||||||
.short("s")
|
.short("s")
|
||||||
.long("save_dir")
|
.long("save_dir")
|
||||||
.value_name("DIR")
|
.value_name("DIR")
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.help("Where to save the torrent files.")
|
.help("Where to save the torrent files.")
|
||||||
.required(true),
|
.required(true),
|
||||||
)
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("FETCH_SITES")
|
||||||
|
.short("fs")
|
||||||
|
.long("fetch_sites")
|
||||||
|
.help("Fetches from various torrent sites.")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("HASH_FILE")
|
||||||
|
.short("hf")
|
||||||
|
.long("hash_file")
|
||||||
|
.value_name("FILE")
|
||||||
|
.takes_value(true)
|
||||||
|
.help("The location of a file containing rows of infohashes. If given, it will download those infohashes."),
|
||||||
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("TORRENTS_CSV_FILE")
|
Arg::with_name("TORRENTS_CSV_FILE")
|
||||||
.short("t")
|
.short("t")
|
||||||
.long("torrents_csv")
|
.long("torrents_csv")
|
||||||
.value_name("FILE")
|
.value_name("FILE")
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.help("The location of a torrents.csv file. If given, it will download those infohashes."),
|
.help("The location of a torrents.csv file. If given, it will download those infohashes."),
|
||||||
)
|
)
|
||||||
.get_matches();
|
.get_matches();
|
||||||
|
|
||||||
let save_dir = Path::new(matches.value_of("TORRENT_SAVE_DIR").unwrap());
|
let save_dir = Path::new(matches.value_of("TORRENT_SAVE_DIR").unwrap());
|
||||||
|
|
||||||
fetch_cloudflare_cookie();
|
fetch_cloudflare_cookie();
|
||||||
|
|
||||||
// torrentz2(save_dir);
|
if matches.is_present("FETCH_SITES") {
|
||||||
thepiratebay(save_dir);
|
// torrentz2(save_dir);
|
||||||
magnetdl(save_dir);
|
thepiratebay(save_dir);
|
||||||
leetx(save_dir);
|
magnetdl(save_dir);
|
||||||
skytorrents(save_dir);
|
leetx(save_dir);
|
||||||
|
skytorrents(save_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(t) = matches.value_of("HASH_FILE") {
|
||||||
|
hash_file_scan(Path::new(t), save_dir);
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(t) = matches.value_of("TORRENTS_CSV_FILE") {
|
if let Some(t) = matches.value_of("TORRENTS_CSV_FILE") {
|
||||||
torrents_csv_scan(Path::new(t), save_dir);
|
torrents_csv_scan(Path::new(t), save_dir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn hash_file_scan(hash_file: &Path, save_dir: &Path) {
|
||||||
|
let f = File::open(&hash_file).expect("Unable to open file");
|
||||||
|
let f = BufReader::new(f);
|
||||||
|
for line in f.lines() {
|
||||||
|
let hash = line.expect("Unable to read line");
|
||||||
|
fetch_torrent(hash, save_dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn torrents_csv_scan(torrents_csv_file: &Path, save_dir: &Path) {
|
fn torrents_csv_scan(torrents_csv_file: &Path, save_dir: &Path) {
|
||||||
for hash in collect_info_hashes(torrents_csv_file) {
|
for hash in collect_info_hashes(torrents_csv_file) {
|
||||||
fetch_torrent(hash, save_dir);
|
fetch_torrent(hash, save_dir);
|
||||||
|
@ -285,9 +316,9 @@ fn leetx(save_dir: &Path) {
|
||||||
|
|
||||||
for row in document.find(
|
for row in document.find(
|
||||||
Class("table-list")
|
Class("table-list")
|
||||||
.descendant(Name("tbody"))
|
.descendant(Name("tbody"))
|
||||||
.descendant(Name("tr")),
|
.descendant(Name("tr")),
|
||||||
) {
|
) {
|
||||||
let detail_page_url_col = match row.find(Class("coll-1")).nth(0) {
|
let detail_page_url_col = match row.find(Class("coll-1")).nth(0) {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
None => continue,
|
None => continue,
|
||||||
|
@ -312,10 +343,10 @@ fn leetx(save_dir: &Path) {
|
||||||
let hash = match detail_document
|
let hash = match detail_document
|
||||||
.find(Class("infohash-box").descendant(Name("span")))
|
.find(Class("infohash-box").descendant(Name("span")))
|
||||||
.nth(0)
|
.nth(0)
|
||||||
{
|
{
|
||||||
Some(t) => t.text().to_lowercase(),
|
Some(t) => t.text().to_lowercase(),
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
fetch_torrent(hash, save_dir);
|
fetch_torrent(hash, save_dir);
|
||||||
}
|
}
|
||||||
|
@ -328,7 +359,7 @@ fn fetch_torrent(hash: String, save_dir: &Path) {
|
||||||
let url = format!(
|
let url = format!(
|
||||||
"https://itorrents.org/torrent/{}.torrent",
|
"https://itorrents.org/torrent/{}.torrent",
|
||||||
&hash.to_ascii_uppercase()
|
&hash.to_ascii_uppercase()
|
||||||
);
|
);
|
||||||
|
|
||||||
let full_path = save_dir
|
let full_path = save_dir
|
||||||
.join(&file_name)
|
.join(&file_name)
|
||||||
|
@ -340,15 +371,15 @@ fn fetch_torrent(hash: String, save_dir: &Path) {
|
||||||
unsafe {
|
unsafe {
|
||||||
Command::new("curl")
|
Command::new("curl")
|
||||||
.args(&[
|
.args(&[
|
||||||
&url,
|
&url,
|
||||||
"-H",
|
"-H",
|
||||||
USER_AGENT,
|
USER_AGENT,
|
||||||
"-H",
|
"-H",
|
||||||
COOKIE,
|
COOKIE,
|
||||||
"--compressed",
|
"--compressed",
|
||||||
"-o",
|
"-o",
|
||||||
&full_path,
|
&full_path,
|
||||||
"-s",
|
"-s",
|
||||||
])
|
])
|
||||||
.output()
|
.output()
|
||||||
.expect("curl command failed");
|
.expect("curl command failed");
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# This fetches from several torrent websites for new updates
|
# This fetches from several torrent websites for new updates
|
||||||
cd ../new_torrents_fetcher
|
cd ../new_torrents_fetcher
|
||||||
sort -r --field-separator=';' -n --key=5 ../torrents.csv > ../torrents.csv.seeders.desc
|
sort -r --field-separator=';' -n --key=5 ../torrents.csv > ../torrents.csv.seeders.desc
|
||||||
cargo run --release -- -s "$1" -t ../torrents.csv.seeders.desc
|
cargo run --release -- -s "$1" -f -t ../torrents.csv.seeders.desc
|
||||||
rm ../torrents.csv.seeders.desc
|
rm ../torrents.csv.seeders.desc
|
||||||
cd ../scripts
|
cd ../scripts
|
||||||
. scan_torrents.sh "$1"
|
. scan_torrents.sh "$1"
|
||||||
|
|
Loading…
Reference in New Issue