From 3bd9eace9bca9c3d273a15c8abc11828a390d3d1 Mon Sep 17 00:00:00 2001 From: Crony Akatsuki Date: Thu, 4 Aug 2022 17:11:46 +0200 Subject: [PATCH] added next/previous page viewing --- pirokit | 62 +++++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 40 insertions(+), 22 deletions(-) diff --git a/pirokit b/pirokit index 423298d..c2b3249 100755 --- a/pirokit +++ b/pirokit @@ -14,33 +14,51 @@ mkdir -p $cachedir cd $cachedir case "$(printf "All\\nMovies\\nTV\\nGames\\nMusic\\nApplications\\nDocumentaries\\nOther" | dmenu -p "Category to search in: ")" in - All) curl -s $baseurl/search/$query/1/ -o tmp.html;; - Movies) curl -s $baseurl/category-search/$query/Movies/1/ -o tmp.html ;; - TV) curl -s $baseurl/category-search/$query/TV/1/ -o tmp.html ;; - Games) curl -s $baseurl/category-search/$query/Games/1/ -o tmp.html ;; - Music) curl -s $baseurl/category-search/$query/Music/1/ -o tmp.html ;; - Applications) curl -s $baseurl/category-search/$query/Apps/1/ -o tmp.html ;; - Documentaries) curl -s $baseurl/category-search/$query/Documentaries/1/ -o tmp.html ;; - Other) curl -s $baseurl/category-search/$query/Other/1/ -o tmp.html ;; + All) url="$baseurl/search/$query" ;; + Movies) url="$baseurl/category-search/$query/Movies" ;; + TV) url="$baseurl/category-search/$query/TV" ;; + Games) url="$baseurl/category-search/$query/Games" ;; + Music) url"$baseurl/category-search/$query/Music" ;; + Applications) url="$baseurl/category-search/$query/Apps" ;; + Documentaries) url="$baseurl/category-search/$query/Documentaries" ;; + Other) url="$baseurl/category-search/$query/Other" ;; *) exit ;; esac -#get titles -grep -o '' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags -#get seeders -grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > seeders.bw -#get leechers -grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > leechers.bw -#get sizes -grep -P '.*' tmp.html | sed 's/<[^>]*>//g' > sizes.bw -#get links -grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw +page=1 -#generates line numbers -awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw +while true; do + curl -s $url/$page/ -o tmp.html -#gets line number (that groupthe user selected in dmenu) -LINE=$(paste -d\| titlesNumbered.bw seeders.bw leechers.bw sizes.bw | dmenu -i -l 25 | cut -d- -f1) + #get titles + grep -o '' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags + #get seeders + grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > seeders.bw + #get leechers + grep -P '\d*' tmp.html | sed 's/<[^>]*>//g' > leechers.bw + #get sizes + grep -P '.*' tmp.html | sed 's/<[^>]*>//g' > sizes.bw + #get links + grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw + + #generates line numbers + awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw + + # prepend next and previous + + [ "$page" == "1" ] && printf '%s\n' "next" >> titlesNumbered.bw || \ + printf '%s\n%s' "next" "previous" >> titlesNumbered.bw + + #gets line number (that groupthe user selected in dmenu) + LINE=$(paste -d\| titlesNumbered.bw seeders.bw leechers.bw sizes.bw | \ + sed 's/|||//' | dmenu -i -l 25 | cut -d- -f1) + + [ "$LINE" == "next" ] && let "page+=1" + + [ "$LINE" == "previous" ] && let "page-=1" + + [ -z "$LINE" ] && exit +done suburl=$(sed "${LINE}q;d" links.bw) url="$baseurl$suburl/"