Rewrite for posix compliancy and improve sed regex

This commit is contained in:
CronyAkatsuki 2023-11-03 21:39:17 +01:00
parent f5c98b094c
commit 7e63026018

83
pirokit
View File

@ -1,27 +1,35 @@
#!/usr/bin/env bash
#!/bin/sh
#ensures there are no accidental downloads
set -eo pipefail
list() {
num=1
printf '%s\n' "$2" |
while read -r line; do
uploader=$(printf '%s' "$3" | sed -n "${num}p")
seeder=$(printf '%s' "$4" | sed -n "${num}p")
leecher=$(printf '%s' "$5" | sed -n "${num}p")
size=$(printf '%s' "$6" | sed -n "${num}p")
printf '%s\n' "$num - $line | $uploader | $seeder | $leecher | $size"
num=$((num + 1))
done
# next and previous
[ "$1" = "1" ] && printf '%s\n' "next" ||
printf '%s\n%s' "next" "previous"
}
baseurl="https://www.1377x.to/"
cachedir="$HOME/.cache/dl"
#get query from dmenu
query=$(printf '%s' | dmenu -i -p "Search Torrent:" | sed 's/ /+/g')
#setup working dir
mkdir -p $cachedir
cd $cachedir
query=$(printf '%s' "" | dmenu -i -p "Search Torrent:" | sed 's/ /+/g')
case "$(printf "All\\nMovies\\nTV\\nGames\\nMusic\\nApplications\\nDocumentaries\\nOther" | dmenu -i -p "Category to search in: ")" in
All) url="$baseurl/search/$query" ;;
Movies) url="$baseurl/category-search/$query/Movies" ;;
TV) url="$baseurl/category-search/$query/TV" ;;
Games) url="$baseurl/category-search/$query/Games" ;;
Music) url"$baseurl/category-search/$query/Music" ;;
Applications) url="$baseurl/category-search/$query/Apps" ;;
Documentaries) url="$baseurl/category-search/$query/Documentaries" ;;
Other) url="$baseurl/category-search/$query/Other" ;;
All) search_url="$baseurl/search/$query" ;;
Movies) search_url="$baseurl/category-search/$query/Movies" ;;
TV) search_url="$baseurl/category-search/$query/TV" ;;
Games) search_url="$baseurl/category-search/$query/Games" ;;
Music) search_url"$baseurl/category-search/$query/Music" ;;
Applications) search_url="$baseurl/category-search/$query/Apps" ;;
Documentaries) search_url="$baseurl/category-search/$query/Documentaries" ;;
Other) search_url="$baseurl/category-search/$query/Other" ;;
*) exit ;;
esac
@ -30,47 +38,38 @@ page=1
while true; do
# get the page html
curl -s $url/$page/ -o tmp.html
tmp=$(curl -s "$search_url"/$page/)
#get titles
grep -o '<a href=./torrent/.*</a>' tmp.html | sed 's/<[^>]*>//g' > titles.bw #deletes tags
titles=$(printf '%s\n' "$tmp" | sed -nE "s/.*<a href=\"\/torrent\/.*\">([^>]*)<.*/\1/p")
#get seeders
grep -P '<td class="coll-2 seeds">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > seeders.bw
seeders=$(printf "%s\n" "$tmp" | sed -nE "s/.*<td class=\"coll-2 seeds\">([^>]*)<.*/\1/p")
#get leechers
grep -P '<td class="coll-3 leeches">\d*</td>' tmp.html | sed 's/<[^>]*>//g' > leechers.bw
leeches=$(printf "%s\n" "$tmp" | sed -nE "s/.*<td class=\"coll-3 leeches\">([^>]*)<.*/\1/p")
#get sizes
grep -P '<td class="coll-4 size mob-uploader">.*</td>' tmp.html | sed 's/<[^>]*>//g' > sizes.bw
sizes=$(printf "%s\n" "$tmp" | sed -nE "s/.*<td class=\"coll-4 size mob-uploader\">([^>]*)<.*/\1/p")
#get uploaders
grep -P '<td class="coll-5 uploader"><a href=.*' tmp.html | sed 's/<[^>]*>//g' > uploaders.bw
uploaders=$(printf "%s\n" "$tmp" | sed -nE "s/.*<td class=\"coll-5 uploader\"><a href=.*>([^>]*)<.*/\1/p")
#get links
grep -E '/torrent/' tmp.html | sed -E 's#.*(/torrent/.*)/">.*/#\1#' | sed 's/td>//g' > links.bw
#generates line numbers
awk '{print NR " - "$0""}' titles.bw > titlesNumbered.bw
# prepend next and previous
[ "$page" == "1" ] && printf '%s\n' "next" >> titlesNumbered.bw || \
printf '%s\n%s' "next" "previous" >> titlesNumbered.bw
links=$(printf "%s\n" "$tmp" | sed -nE "s/.*<a href=\"(\/torrent\/[^>]*)\">.*/\1/p")
#gets line number (that groupthe user selected in dmenu)
LINE=$(paste -d\| titlesNumbered.bw uploaders.bw seeders.bw leechers.bw sizes.bw | \
sed 's/|||//' | column -t -s '|' | dmenu -i -l 25 | cut -d- -f1)
LINE=$(list "$page" "$titles" "$uploaders" "$seeders" "$leeches" "$sizes" |
column -t -s '|' | dmenu -i -l 25 | cut -d- -f1)
[ "$LINE" == "next" ] && let "page+=1" && continue
[ "$LINE" = "next" ] && page=$((page += 1)) && continue
[ "$LINE" == "previous" ] && let "page-=1" && continue
[ "$LINE" = "previous" ] && page=$((page -= 1)) && continue
[ -z "$LINE" ] && exit || break
if [ -z "$LINE" ]; then exit; else break; fi
done
suburl=$(sed "${LINE}q;d" links.bw)
url="$baseurl$suburl/"
suburl=$(printf '%s\n' "$links" | sed -n "${LINE}p")
url="$baseurl$suburl"
#get page that contains magnet link
curl -s $url > tmp.html
site=$(curl -s "$url")
#scrape magnet link
magnet=$(paste tmp.html | tr -d '\n' | sed -E 's#.*(magnet:\?xt=urn:btih:[^"]*).*#\1#')
magnet=$(printf '%s\n' "$site" | sed -nE "s/.*<a class=\".*\" href=\"(magnet[^>]*)\" onclick=.*/\1/p")
transmission-remote -a "$magnet"