diff --git a/pirokit b/pirokit index 071c430..3b5e4a0 100755 --- a/pirokit +++ b/pirokit @@ -1,76 +1,75 @@ -#!/usr/bin/env bash +#!/bin/sh -#ensures there are no accidental downloads -set -eo pipefail +list() { + num=1 + printf '%s\n' "$2" | + while read -r line; do + uploader=$(printf '%s' "$3" | sed -n "${num}p") + seeder=$(printf '%s' "$4" | sed -n "${num}p") + leecher=$(printf '%s' "$5" | sed -n "${num}p") + size=$(printf '%s' "$6" | sed -n "${num}p") + printf '%s\n' "$num - $line | $uploader | $seeder | $leecher | $size" + num=$((num + 1)) + done + # next and previous + [ "$1" = "1" ] && printf '%s\n' "next" || + printf '%s\n%s' "next" "previous" +} baseurl="https://www.1377x.to/" -cachedir="$HOME/.cache/dl" #get query from dmenu -query=$(printf '%s' | dmenu -i -p "Search Torrent:" | sed 's/ /+/g') - -#setup working dir -mkdir -p $cachedir -cd $cachedir +query=$(printf '%s' "" | dmenu -i -p "Search Torrent:" | sed 's/ /+/g') case "$(printf "All\\nMovies\\nTV\\nGames\\nMusic\\nApplications\\nDocumentaries\\nOther" | dmenu -i -p "Category to search in: ")" in - All) url="$baseurl/search/$query" ;; - Movies) url="$baseurl/category-search/$query/Movies" ;; - TV) url="$baseurl/category-search/$query/TV" ;; - Games) url="$baseurl/category-search/$query/Games" ;; - Music) url"$baseurl/category-search/$query/Music" ;; - Applications) url="$baseurl/category-search/$query/Apps" ;; - Documentaries) url="$baseurl/category-search/$query/Documentaries" ;; - Other) url="$baseurl/category-search/$query/Other" ;; - *) exit ;; +All) search_url="$baseurl/search/$query" ;; +Movies) search_url="$baseurl/category-search/$query/Movies" ;; +TV) search_url="$baseurl/category-search/$query/TV" ;; +Games) search_url="$baseurl/category-search/$query/Games" ;; +Music) search_url"$baseurl/category-search/$query/Music" ;; +Applications) search_url="$baseurl/category-search/$query/Apps" ;; +Documentaries) search_url="$baseurl/category-search/$query/Documentaries" ;; +Other) search_url="$baseurl/category-search/$query/Other" ;; +*) exit ;; esac page=1 while true; do - # get the page html - curl -s $url/$page/ -o tmp.html + # get the page html + tmp=$(curl -s "$search_url"/$page/) + #get titles + titles=$(printf '%s\n' "$tmp" | sed -nE "s/.*([^>]*)<.*/\1/p") + #get seeders + seeders=$(printf "%s\n" "$tmp" | sed -nE "s/.*([^>]*)<.*/\1/p") + #get leechers + leeches=$(printf "%s\n" "$tmp" | sed -nE "s/.*([^>]*)<.*/\1/p") + #get sizes + sizes=$(printf "%s\n" "$tmp" | sed -nE "s/.*([^>]*)<.*/\1/p") + #get uploaders + uploaders=$(printf "%s\n" "$tmp" | sed -nE "s/.*([^>]*)<.*/\1/p") + #get links + links=$(printf "%s\n" "$tmp" | sed -nE "s/.*]*)\">.*/\1/p") - #get titles - grep -o '' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags - #get seeders - grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > seeders.bw - #get leechers - grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > leechers.bw - #get sizes - grep -P '.*' tmp.html | sed 's/<[^>]*>//g' > sizes.bw - #get uploaders - grep -P ']*>//g' > uploaders.bw - #get links - grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw + #gets line number (that groupthe user selected in dmenu) + LINE=$(list "$page" "$titles" "$uploaders" "$seeders" "$leeches" "$sizes" | + column -t -s '|' | dmenu -i -l 25 | cut -d- -f1) - #generates line numbers - awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw + [ "$LINE" = "next" ] && page=$((page += 1)) && continue - # prepend next and previous - [ "$page" == "1" ] && printf '%s\n' "next" >> titlesNumbered.bw || \ - printf '%s\n%s' "next" "previous" >> titlesNumbered.bw + [ "$LINE" = "previous" ] && page=$((page -= 1)) && continue - #gets line number (that groupthe user selected in dmenu) - LINE=$(paste -d\| titlesNumbered.bw uploaders.bw seeders.bw leechers.bw sizes.bw | \ - sed 's/|||//' | column -t -s '|' | dmenu -i -l 25 | cut -d- -f1) - - [ "$LINE" == "next" ] && let "page+=1" && continue - - [ "$LINE" == "previous" ] && let "page-=1" && continue - - [ -z "$LINE" ] && exit || break + if [ -z "$LINE" ]; then exit; else break; fi done -suburl=$(sed "${LINE}q;d" links.bw) -url="$baseurl$suburl/" +suburl=$(printf '%s\n' "$links" | sed -n "${LINE}p") +url="$baseurl$suburl" -#get page that contains magnet link -curl -s $url > tmp.html +site=$(curl -s "$url") #scrape magnet link -magnet=$(paste tmp.html | tr -d '\n' | sed -E 's#.*(magnet:\?xt=urn:btih:[^"]*).*#\1#') +magnet=$(printf '%s\n' "$site" | sed -nE "s/.*]*)\" onclick=.*/\1/p") transmission-remote -a "$magnet"