From 2544dfb8a5782d97bfd8c6a422675bf088043241 Mon Sep 17 00:00:00 2001 From: Dessalines Date: Mon, 28 Jan 2019 15:27:11 -0800 Subject: [PATCH] Starting to add torrentz2 --- new_torrents_fetcher/.gitignore | 1 + new_torrents_fetcher/src/main.rs | 86 ++++++++++++++++++++++++++------ 2 files changed, 72 insertions(+), 15 deletions(-) diff --git a/new_torrents_fetcher/.gitignore b/new_torrents_fetcher/.gitignore index 53eaa21..c977131 100644 --- a/new_torrents_fetcher/.gitignore +++ b/new_torrents_fetcher/.gitignore @@ -1,2 +1,3 @@ /target **/*.rs.bk +.vscode diff --git a/new_torrents_fetcher/src/main.rs b/new_torrents_fetcher/src/main.rs index d032cf4..f2488b8 100644 --- a/new_torrents_fetcher/src/main.rs +++ b/new_torrents_fetcher/src/main.rs @@ -2,6 +2,7 @@ extern crate clap; extern crate csv; extern crate reqwest; extern crate select; + use clap::{App, Arg}; use select::document::Document; use select::predicate::{Attr, Class, Name, Predicate}; @@ -13,7 +14,6 @@ use std::{thread, time}; static mut COOKIE: &str = ""; static mut USER_AGENT: &str = ""; - fn main() { let matches = App::new("New Torrents Fetcher") .version("0.1.0") @@ -41,6 +41,8 @@ fn main() { let save_dir = Path::new(matches.value_of("TORRENT_SAVE_DIR").unwrap()); fetch_cloudflare_cookie(); + + // torrentz2(save_dir); magnetdl(save_dir); skytorrents(save_dir); leetx(save_dir); @@ -65,6 +67,59 @@ fn collect_info_hashes(torrents_csv_file: &Path) -> Vec { rdr.records().map(|x| x.unwrap()[0].to_string()).collect() } +fn torrentz2(save_dir: &Path) { + // https://torrentz2.eu/search?f=&p=19 + + let page_limit = 19; + + let base_url = "https://torrentz2.eu"; + + let mut pages: Vec = Vec::new(); + + let types = [ + "application", + "tv", + "movie", + "adult", + "music", + "mp3", + "anime", + "game", + "ebook", + "adult", + "x265", + "hevc", + "yify", + "discography", + ]; + for c_type in types.iter() { + for i in 0..page_limit { + let page = format!("{}/search?f={}&p={}", base_url, c_type, i); + pages.push(page); + } + } + + for page in pages.iter() { + println!("Fetching page {}", page); + let html = match fetch_html(page) { + Ok(t) => t, + _err => continue, + }; + + let document = Document::from(&html[..]); + println!("This is weird am I'm not sure about any of this"); + + for row in document.find(Name("dt").descendant(Name("a"))) { + let hash = match row.attr("href") { + Some(t) => t.to_string(), + None => continue, + }; + println!("{}", &hash); + fetch_torrent(hash, save_dir); + } + } +} + fn magnetdl(save_dir: &Path) { let page_limit = 30; @@ -125,6 +180,7 @@ fn skytorrents(save_dir: &Path) { for page in pages.iter() { println!("Fetching page {}", page); + let html = match fetch_html(page) { Ok(t) => t, _err => continue, @@ -235,21 +291,21 @@ fn fetch_torrent(hash: String, save_dir: &Path) { if !Path::new(&full_path).exists() { unsafe { Command::new("curl") - .args(&[ - &url, - "-H", - USER_AGENT, - "-H", - COOKIE, - "--compressed", - "-o", - &full_path, - "-s", - ]) - .output() - .expect("curl command failed"); + .args(&[ + &url, + "-H", + USER_AGENT, + "-H", + COOKIE, + "--compressed", + "-o", + &full_path, + "-s", + ]) + .output() + .expect("curl command failed"); check_cloud_flare(Path::new(&full_path)); - thread::sleep(time::Duration::from_millis(2000)); + thread::sleep(time::Duration::from_millis(2742)); println!("{} saved.", &full_path); } }