added next/previous page viewing
This commit is contained in:
parent
3243edce01
commit
3bd9eace9b
62
pirokit
62
pirokit
@ -14,33 +14,51 @@ mkdir -p $cachedir
|
|||||||
cd $cachedir
|
cd $cachedir
|
||||||
|
|
||||||
case "$(printf "All\\nMovies\\nTV\\nGames\\nMusic\\nApplications\\nDocumentaries\\nOther" | dmenu -p "Category to search in: ")" in
|
case "$(printf "All\\nMovies\\nTV\\nGames\\nMusic\\nApplications\\nDocumentaries\\nOther" | dmenu -p "Category to search in: ")" in
|
||||||
All) curl -s $baseurl/search/$query/1/ -o tmp.html;;
|
All) url="$baseurl/search/$query" ;;
|
||||||
Movies) curl -s $baseurl/category-search/$query/Movies/1/ -o tmp.html ;;
|
Movies) url="$baseurl/category-search/$query/Movies" ;;
|
||||||
TV) curl -s $baseurl/category-search/$query/TV/1/ -o tmp.html ;;
|
TV) url="$baseurl/category-search/$query/TV" ;;
|
||||||
Games) curl -s $baseurl/category-search/$query/Games/1/ -o tmp.html ;;
|
Games) url="$baseurl/category-search/$query/Games" ;;
|
||||||
Music) curl -s $baseurl/category-search/$query/Music/1/ -o tmp.html ;;
|
Music) url"$baseurl/category-search/$query/Music" ;;
|
||||||
Applications) curl -s $baseurl/category-search/$query/Apps/1/ -o tmp.html ;;
|
Applications) url="$baseurl/category-search/$query/Apps" ;;
|
||||||
Documentaries) curl -s $baseurl/category-search/$query/Documentaries/1/ -o tmp.html ;;
|
Documentaries) url="$baseurl/category-search/$query/Documentaries" ;;
|
||||||
Other) curl -s $baseurl/category-search/$query/Other/1/ -o tmp.html ;;
|
Other) url="$baseurl/category-search/$query/Other" ;;
|
||||||
*) exit ;;
|
*) exit ;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
#get titles
|
page=1
|
||||||
grep -o '<a href=./torrent/.*</a>' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags
|
|
||||||
#get seeders
|
|
||||||
grep -P '<td class="coll-2 seeds">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > seeders.bw
|
|
||||||
#get leechers
|
|
||||||
grep -P '<td class="coll-3 leeches">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > leechers.bw
|
|
||||||
#get sizes
|
|
||||||
grep -P '<td class="coll-4 size mob-uploader">.*</td>' tmp.html | sed 's/<[^>]*>//g' > sizes.bw
|
|
||||||
#get links
|
|
||||||
grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw
|
|
||||||
|
|
||||||
#generates line numbers
|
while true; do
|
||||||
awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw
|
curl -s $url/$page/ -o tmp.html
|
||||||
|
|
||||||
#gets line number (that groupthe user selected in dmenu)
|
#get titles
|
||||||
LINE=$(paste -d\| titlesNumbered.bw seeders.bw leechers.bw sizes.bw | dmenu -i -l 25 | cut -d- -f1)
|
grep -o '<a href=./torrent/.*</a>' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags
|
||||||
|
#get seeders
|
||||||
|
grep -P '<td class="coll-2 seeds">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > seeders.bw
|
||||||
|
#get leechers
|
||||||
|
grep -P '<td class="coll-3 leeches">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > leechers.bw
|
||||||
|
#get sizes
|
||||||
|
grep -P '<td class="coll-4 size mob-uploader">.*</td>' tmp.html | sed 's/<[^>]*>//g' > sizes.bw
|
||||||
|
#get links
|
||||||
|
grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw
|
||||||
|
|
||||||
|
#generates line numbers
|
||||||
|
awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw
|
||||||
|
|
||||||
|
# prepend next and previous
|
||||||
|
|
||||||
|
[ "$page" == "1" ] && printf '%s\n' "next" >> titlesNumbered.bw || \
|
||||||
|
printf '%s\n%s' "next" "previous" >> titlesNumbered.bw
|
||||||
|
|
||||||
|
#gets line number (that groupthe user selected in dmenu)
|
||||||
|
LINE=$(paste -d\| titlesNumbered.bw seeders.bw leechers.bw sizes.bw | \
|
||||||
|
sed 's/|||//' | dmenu -i -l 25 | cut -d- -f1)
|
||||||
|
|
||||||
|
[ "$LINE" == "next" ] && let "page+=1"
|
||||||
|
|
||||||
|
[ "$LINE" == "previous" ] && let "page-=1"
|
||||||
|
|
||||||
|
[ -z "$LINE" ] && exit
|
||||||
|
done
|
||||||
|
|
||||||
suburl=$(sed "${LINE}q;d" links.bw)
|
suburl=$(sed "${LINE}q;d" links.bw)
|
||||||
url="$baseurl$suburl/"
|
url="$baseurl$suburl/"
|
||||||
|
Loading…
Reference in New Issue
Block a user