Fixing docker build. #19

This commit is contained in:
Dessalines 2019-03-18 12:31:32 -07:00
parent e790b7af12
commit e1894a602c
7 changed files with 51 additions and 62 deletions

View File

@ -1,6 +1,5 @@
FROM node:10-jessie as node
#If encounter Invalid cross-device error -run on host 'echo N | sudo tee /sys/module/overlay/parameters/metacopy'
ARG TORRENTS_CSV_ENDPOINT
COPY server/ui /app/server/ui
RUN cd /app/server/ui && yarn && yarn build
@ -9,22 +8,27 @@ COPY server/service /app/server/service
RUN cd /app/server/service && cargo build --release
FROM debian:jessie-slim as volume
COPY torrents.csv /db/
COPY torrent_files.json /db/
FROM debian:jessie-slim
RUN apt update && apt install -y sqlite3
RUN apt update && apt install -y sqlite3 curl
RUN curl -LO https://github.com/BurntSushi/ripgrep/releases/download/0.10.0/ripgrep_0.10.0_amd64.deb
RUN dpkg -i ripgrep_0.10.0_amd64.deb
RUN curl -LO https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64
RUN chmod +x jq-linux64
RUN mv jq-linux64 /usr/bin/jq
COPY --from=node /app/server/ui/dist /app/dist
COPY --from=rust /app/server/service/target/release/torrents-csv-service /app/
COPY --from=volume /db/torrents.csv /app/
COPY --from=volume /db/torrent_files.json /app/
COPY scripts /app/scripts
RUN cd /app/scripts && . ./build_sqlite.sh
WORKDIR /app/scripts/
RUN ["/bin/bash", "-c", "/app/scripts/build_sqlite.sh"]
EXPOSE 8080
WORKDIR /app/
ARG TORRENTS_CSV_DB_FILE
RUN sqlite3 ${TORRENTS_CSV_DB_FILE} 'select * from torrents limit 10'
CMD /app/torrents-csv-service

View File

@ -15,20 +15,21 @@ Its initially populated with a January 2017 backup of the pirate bay, and new to
To request more torrents, or add your own, go [here](https://gitlab.com/dessalines/torrents.csv/issues).
Made with [Rust](https://www.rust-lang.org), [ripgrep](https://github.com/BurntSushi/ripgrep), [Actix](https://actix.rs/), [Inferno](https://www.infernojs.org), [Typescript](https://www.typescriptlang.org/).
## Webserver
`Torrents.csv` comes with a simple webserver. [Demo Server](https://torrents-csv.ml)
### Requirements
### Docker
```
git clone https://gitlab.com/dessalines/torrents.csv
cd torrents.csv
docker-compose up
```
### Local
#### Requirements
- [Rust](https://www.rust-lang.org/)
- [Yarn](https://yarnpkg.com/en/)
- [SQLite3](https://www.sqlite.org/index.html)
- [jq command line JSON parser: Needs at least jq-1.6](https://stedolan.github.io/jq/)
### Running
#### Running
```
git clone https://gitlab.com/dessalines/torrents.csv
cd torrents.csv/scripts
@ -36,7 +37,7 @@ cd torrents.csv/scripts
```
and goto http://localhost:8080
If running on a different host, run `export TORRENTS_CSV_ENDPOINT=http://whatever.com` to change the hostname.
If running on a different host, run `export TORRENTS_CSV_ENDPOINT=http://whatever.com` to change the hostname, or use a reverse proxy with nginx or apache2.
The torrent data is updated daily, and to do so, run, or place this in a crontab:
@ -44,14 +45,6 @@ The torrent data is updated daily, and to do so, run, or place this in a crontab
This updates the repository, and rebuilds the sqlite cache necessary for searching.
### Docker
```
git clone https://gitlab.com/dessalines/torrents.csv
cd torrents.csv
docker-compose up
```
## Command Line Searching
### Requirements
@ -68,13 +61,10 @@ bleh season 1 (1993-)
size: 13GiB
link: magnet:?xt=urn:btih:INFO_HASH_HERE
```
## Uploading / Adding Torrents from a Directory
An *upload*, consists of making a pull request after running the `scan_torrents.sh` script, which adds torrents from a directory you choose to the `.csv` file, after checking that they aren't already there, and that they have seeders. It also adds their files to `torrent_files.json`.
### Requirements
- [Torrent-Tracker-Health Dessalines branch](https://github.com/dessalines/torrent-tracker-health)
- `npm i -g dessalines/torrent-tracker-health`
- [jq command line JSON parser: Needs at least jq-1.6](https://stedolan.github.io/jq/)
@ -82,7 +72,6 @@ An *upload*, consists of making a pull request after running the `scan_torrents.
- [Gnu Parallel](https://www.gnu.org/software/parallel/)
### Running
[Click here](https://gitlab.com/dessalines/torrents.csv/forks/new) to fork this repo.
```sh
git clone https://gitlab.com/[MY_USER]/torrents.csv
@ -93,19 +82,15 @@ git push
```
Then [click here](https://gitlab.com/dessalines/torrents.csv/merge_requests/new) to do a pull/merge request to my branch.
## Web scraping torrents
`Torrents.csv` has a `Rust` repository for scraping new and top torrents from some torrent sites in the `new_torrents_fetcher` folder. It currently scrapes skytorrents, magnetdl, and leetx.
### Requirements
- Rust
- [Cloudflare Scrape](https://github.com/Anorov/cloudflare-scrape)
- `sudo pip install cfscrape`
### Running
```
git clone https://gitlab.com/dessalines/torrents.csv
cd torrents.csv/scripts
@ -113,20 +98,17 @@ cd torrents.csv/scripts
```
## API
A JSON output of search results is available at:
http://localhost:8080/service/search?q=[QUERY]&size=[NUMBER_OF_RESULTS]&offset=[PAGE]
## How the torrents.csv file looks
```sh
infohash;name;size_bytes;created_unix;seeders;leechers;completed;scraped_date
# torrents here...
```
## How the torrent_files.json looks
```
{
"012ae083ec82bf911f4fe503b9f6df1effaad9ac": [

View File

@ -4,12 +4,9 @@ services:
torrents-csv:
build:
context: .
args:
TORRENTS_CSV_ENDPOINT: http://0.0.0.0:8080
command: /bin/sh -c /app/torrents-csv-service
ports:
- "8080:8080"
environment:
TORRENTS_CSV_DB_FILE: /app/torrents.db
TORRENTS_CSV_FRONT_END_DIR: /app/dist
TORRENTS_CSV_ENDPOINT: 0.0.0.0:8080

View File

@ -1,7 +1,7 @@
#!/bin/bash
csv_file="${TORRENTS_CSV_FILE:-../torrents.csv}"
csv_file="../torrents.csv"
torrent_files_json="../torrent_files.json"
db_file="${TORRENTS_CSV_DB_FILE:-../torrents.db}"
torrent_files_json="`pwd`/../torrent_files.json"
echo "Creating temporary torrents.db file from $csv_file ..."
@ -40,35 +40,43 @@ if [ -f $torrent_files_json ]; then
rg "^([^;]*;){3}[^;]+$" torrent_files_temp > torrent_files_temp_2
mv torrent_files_temp_2 torrent_files_temp
sqlite3 -batch db_tmp 2>/dev/null <<"EOF"
drop table if exists files;
create table files(
"infohash" TEXT,
"index_" INTEGER,
"path" TEXT,
"size_bytes" INTEGER,
"created_unix" INTEGER,
"seeders" INTEGER,
"leechers" INTEGER,
"completed" INTEGER,
"scraped_date" INTEGER);
sqlite3 -batch db_tmp<<EOF
create table files_tmp(
"infohash" TEXT,
"index_" INTEGER,
"path" TEXT,
"size_bytes" INTEGER
);
.separator ";"
.import torrent_files_temp files
.import torrent_files_temp files_tmp
-- Filling the extra columns
create table files(
"infohash" TEXT,
"index_" INTEGER,
"path" TEXT,
"size_bytes" INTEGER,
"created_unix" INTEGER,
"seeders" INTEGER,
"leechers" INTEGER,
"completed" INTEGER,
"scraped_date" INTEGER
);
insert into files
select files.infohash,
files.index_,
files.path,
files.size_bytes,
select files_tmp.infohash,
files_tmp.index_,
files_tmp.path,
files_tmp.size_bytes,
torrents.created_unix,
torrents.seeders,
torrents.leechers,
torrents.completed,
torrents.scraped_date
from files
inner join torrents on files.infohash = torrents.infohash
order by torrents.seeders desc, files.size_bytes desc;
from files_tmp
inner join torrents on files_tmp.infohash = torrents.infohash
order by torrents.seeders desc, files_tmp.size_bytes desc;
delete from files where seeders is null;
drop table files_tmp;
EOF
rm torrent_files_temp
fi

View File

@ -76,5 +76,5 @@ popd
rm -rf "$tmp_torrent_dir"
# Scan the torrent dir for new files, and add them
node --max-old-space-size=2096 scan_torrent_files.js --dir "$torrents_dir"
node --max-old-space-size=4096 scan_torrent_files.js --dir "$torrents_dir"

View File

@ -1,7 +1,6 @@
# Optionally use environment variables
# export TORRENTS_CSV_ENDPOINT=http://0.0.0.0:8080
# export TORRENTS_CSV_FILE=`pwd`/../../torrents.db
# export TORRENTS_CSV_DB_FILE=`pwd`/../../torrents.db
# export TORRENTS_CSV_FRONT_END_DIR=`pwd`/../ui/dist

View File

@ -28,4 +28,3 @@ test/data/result.json
package-lock.json
*.orig
src/env.ts