174 lines
4.3 KiB
Rust
174 lines
4.3 KiB
Rust
extern crate actix_web;
|
|
extern crate serde;
|
|
extern crate serde_json;
|
|
#[macro_use]
|
|
extern crate serde_derive;
|
|
extern crate rusqlite;
|
|
extern crate time;
|
|
|
|
use actix_web::{fs, fs::NamedFile, http, server, App, HttpRequest, HttpResponse, Query};
|
|
use std::env;
|
|
use std::ops::Deref;
|
|
|
|
use rusqlite::{Connection};
|
|
|
|
fn main() {
|
|
println!("Access me at {}", endpoint());
|
|
server::new(|| {
|
|
App::new()
|
|
.route("/service/search", http::Method::GET, search)
|
|
.resource("/", |r| r.f(index))
|
|
.handler(
|
|
"/static",
|
|
fs::StaticFiles::new(front_end_dir())
|
|
.unwrap()
|
|
)
|
|
.finish()
|
|
}).bind(endpoint())
|
|
.unwrap()
|
|
.run();
|
|
}
|
|
|
|
fn index(_req: &HttpRequest) -> Result<NamedFile, actix_web::error::Error> {
|
|
Ok(NamedFile::open(front_end_dir() + "/index.html")?)
|
|
}
|
|
|
|
fn front_end_dir() -> String {
|
|
env::var("TORRENTS_CSV_FRONT_END_DIR").unwrap_or("../ui/dist".to_string())
|
|
}
|
|
|
|
fn torrents_db_file() -> String {
|
|
env::var("TORRENTS_CSV_DB_FILE").unwrap_or("../../torrents.db".to_string())
|
|
}
|
|
|
|
fn endpoint() -> String {
|
|
env::var("TORRENTS_CSV_ENDPOINT").unwrap_or("0.0.0.0:8080".to_string())
|
|
}
|
|
#[derive(Deserialize)]
|
|
struct SearchQuery {
|
|
q: String,
|
|
page: Option<usize>,
|
|
size: Option<usize>,
|
|
type_: Option<String>
|
|
}
|
|
|
|
fn search(query: Query<SearchQuery>) -> HttpResponse {
|
|
HttpResponse::Ok()
|
|
.header("Access-Control-Allow-Origin", "*")
|
|
.content_type("application/json")
|
|
.body(search_query(query))
|
|
}
|
|
|
|
fn search_query(query: Query<SearchQuery>) -> String {
|
|
let page = query.page.unwrap_or(1);
|
|
let size = query.size.unwrap_or(10);
|
|
let type_ = query.type_.as_ref().map_or("torrent", String::deref);
|
|
let offset = size * (page - 1);
|
|
|
|
println!("query = {}, type = {}, page = {}, size = {}", query.q, type_, page, size);
|
|
|
|
if type_ == "file" {
|
|
let results = torrent_file_search(&query.q, size, offset);
|
|
serde_json::to_string(&results).unwrap()
|
|
} else {
|
|
let results = torrent_search(&query.q, size, offset);
|
|
serde_json::to_string(&results).unwrap()
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
|
struct Torrent {
|
|
infohash: String,
|
|
name: String,
|
|
size_bytes: isize,
|
|
created_unix: u32,
|
|
seeders: u32,
|
|
leechers: u32,
|
|
completed: Option<u32>,
|
|
scraped_date: u32,
|
|
}
|
|
|
|
fn torrent_search(query: &str, size: usize, offset: usize) -> Vec<Torrent> {
|
|
let stmt_str = "select * from torrents where name like '%' || ?1 || '%' limit ?2 offset ?3";
|
|
let conn = Connection::open(torrents_db_file()).unwrap();
|
|
let mut stmt = conn.prepare(&stmt_str).unwrap();
|
|
let torrent_iter = stmt
|
|
.query_map(&[
|
|
query.replace(" ", "%"),
|
|
size.to_string(),
|
|
offset.to_string(),
|
|
], |row| Torrent {
|
|
infohash: row.get(0),
|
|
name: row.get(1),
|
|
size_bytes: row.get(2),
|
|
created_unix: row.get(3),
|
|
seeders: row.get(4),
|
|
leechers: row.get(5),
|
|
completed: row.get(6),
|
|
scraped_date: row.get(7),
|
|
}).unwrap();
|
|
|
|
let mut torrents = Vec::new();
|
|
for torrent in torrent_iter {
|
|
torrents.push(torrent.unwrap());
|
|
}
|
|
torrents
|
|
}
|
|
|
|
#[derive(Debug, Serialize, Deserialize)]
|
|
struct File {
|
|
infohash: String,
|
|
index_: u32,
|
|
path: String,
|
|
size_bytes: isize,
|
|
created_unix: u32,
|
|
seeders: u32,
|
|
leechers: u32,
|
|
completed: Option<u32>,
|
|
scraped_date: u32,
|
|
}
|
|
|
|
fn torrent_file_search(query: &str, size: usize, offset: usize) -> Vec<File> {
|
|
let stmt_str = "select * from files where path like '%' || ?1 || '%' limit ?2 offset ?3";
|
|
let conn = Connection::open(torrents_db_file()).unwrap();
|
|
let mut stmt = conn.prepare(&stmt_str).unwrap();
|
|
let file_iter = stmt
|
|
.query_map(&[
|
|
query.replace(" ", "%"),
|
|
size.to_string(),
|
|
offset.to_string(),
|
|
], |row| File {
|
|
infohash: row.get(0),
|
|
index_: row.get(1),
|
|
path: row.get(2),
|
|
size_bytes: row.get(3),
|
|
created_unix: row.get(4),
|
|
seeders: row.get(5),
|
|
leechers: row.get(6),
|
|
completed: row.get(7),
|
|
scraped_date: row.get(8),
|
|
}).unwrap();
|
|
|
|
let mut files = Vec::new();
|
|
for file in file_iter {
|
|
files.push(file.unwrap());
|
|
}
|
|
files
|
|
}
|
|
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use time::PreciseTime;
|
|
|
|
#[test]
|
|
fn test() {
|
|
let start = PreciseTime::now();
|
|
let results =
|
|
super::torrent_search("sherlock", 10, 0);
|
|
assert!(results.len() > 2);
|
|
let end = PreciseTime::now();
|
|
println!("Query took {} seconds.", start.to(end));
|
|
}
|
|
}
|