Skip to content

Commit

Permalink
Merge branch 'development'
Browse files Browse the repository at this point in the history
  • Loading branch information
Euro20179 committed Dec 8, 2022
2 parents 046b6b0 + 33f7e53 commit b716e94
Show file tree
Hide file tree
Showing 11 changed files with 896 additions and 680 deletions.
16 changes: 0 additions & 16 deletions addons/extensions/ani-url-handler

This file was deleted.

4 changes: 0 additions & 4 deletions addons/extensions/gui
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,3 @@ load_interface gui || die 2 "The gui interface addon could not be found\n"
print_help_gui () {
printf "%s\n" "Use a gui instead of tui as a menu"
}

on_clean_up_gui(){
rm -f "${session_temp_dir}/data.gui"
}
10 changes: 4 additions & 6 deletions addons/interfaces/gui
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,14 @@
: "${YTFZF_GUI_CSS:=$YTFZF_CONFIG_DIR/interface-gui.css}"

display_text_gui () {
echo "$*" > "${session_temp_dir}/data.gui"
MAIN_DIALOG="
<window>
<edit>
<width>500</width>
<height>500</height>
<input file>${session_temp_dir}/data.gui</input>
<default>$*</default>
</edit>
</window>" gtkdialog
rm -f "${session_temp_dir}/data.gui"
}

info_wait_prompt_gui () {
Expand Down Expand Up @@ -98,7 +96,7 @@ search_prompt_menu_gui () {
<vbox>
<entry>
<default>Search...</default>
<variable>search</variable>
<variable>SEARCH</variable>
</entry>
<button>
<label>Search!</label>
Expand All @@ -108,12 +106,12 @@ search_prompt_menu_gui () {

while read -r line; do
case "$line" in
search*)
*SEARCH*)
_search=$(printf "%s" "$line" | tr -d '"')
_search="${_search#*=}" ;;
EXIT*) break ;;
esac
done <<EOF
done <<-EOF
$vars
EOF
}
2 changes: 0 additions & 2 deletions addons/scrapers/ani
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
#!/bin/sh

submenu_opts="$submenu_opts --ext=ani-url-handler"

_ani_get_categories () {
#stolen from pystardust/ani-cli
sed -n 's_^[[:space:]]*<a href="/category/\([^"]*\)" title="\([^"]*\)".*_\1_p' "$1"
Expand Down
127 changes: 23 additions & 104 deletions addons/scrapers/ani-category
Original file line number Diff line number Diff line change
@@ -1,112 +1,31 @@
#!/bin/sh
#

#stolen from pystardust/ani-cli
_decrypt_link() {
ajax_url="https://goload.pro/encrypt-ajax.php"
id=$(printf "%s" "$1" | sed -nE 's/.*id=(.*)&title.*/\1/p')
resp=$(curl -s "$1")
secret_key=$(printf "%s" "$resp" | sed -nE 's/.*class="container-(.*)">/\1/p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
iv=$(printf "%s" "$resp" | sed -nE 's/.*class="wrapper container-(.*)">/\1/p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
second_key=$(printf "%s" "$resp" | sed -nE 's/.*class=".*videocontent-(.*)">/\1/p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
token=$(printf "%s" "$resp" | sed -nE 's/.*data-value="(.*)">.*/\1/p' | base64 -d | openssl enc -d -aes256 -K "$secret_key" -iv "$iv" | sed -nE 's/.*&(token.*)/\1/p')
ajax=$(printf '%s' "$id" | openssl enc -e -aes256 -K "$secret_key" -iv "$iv" | base64)
data=$(curl -s -H "X-Requested-With:XMLHttpRequest" "${ajax_url}?id=${ajax}&alias=${id}&${token}" | sed -e 's/{"data":"//' -e 's/"}/\n/' -e 's/\\//g')
printf '%s' "$data" | base64 -d | openssl enc -d -aes256 -K "$second_key" -iv "$iv" | sed -e 's/\].*/\]/' -e 's/\\//g' |
grep -Eo 'https:\/\/[-a-zA-Z0-9@:%._\+~#=][a-zA-Z0-9][-a-zA-Z0-9@:%_\+.~#?&\/\/=]*'
}

_get_video_link () {
dpage_url="$1"
video_links=$(_decrypt_link "$dpage_url")
if printf '%s' "$video_links" | grep -q "mp4"; then
video_url=$(printf "%s" "$video_links" | head -n 4 | tail -n 1)
idx=1
else
video_url="$video_links"
_get_video_quality_m3u8
fi

}

_get_video_quality_mp4() {
case $quality in
best)
video_url=$(printf '%s' "$1" | head -n 4 | tail -n 1) ;;
worst)
video_url=$(printf '%s' "$1" | head -n 1) ;;
*)
video_url=$(printf '%s' "$1" | grep -i "${quality}p" | head -n 1)
if [ -z "$video_url" ]; then
err "Current video quality is not available (defaulting to best quality)"
quality=best
video_url=$(printf '%s' "$1" | head -n 4 | tail -n 1)
fi
;;
esac
printf '%s' "$video_url"
}

_get_video_quality_m3u8() {
case $quality in
worst|360)
idx=2 ;;
480)
idx=3 ;;
720)
idx=4 ;;
1080|best)
idx=5 ;;
*)
idx=5 ;;
esac
printf '%s' "$video_url" | grep -qE "gogocdn.*m3u.*" && idx=$((idx-1))
}

#stolen from pystardust/ani-cli
_ani_category_get_episodes () {
sed -n -E '
/^[[:space:]]*<a href="#" class="active" ep_start/{
s/.* '\''([0-9]*)'\'' ep_end = '\''([0-9]*)'\''.*/\2/p
q
}' "$1"
}

_get_dpage_link () {
# get the download page url
anime_id="$1"
ep_no="$2"
curl -s "https://goload.pro/videos/${anime_id}-episode-${ep_no}" | sed -nE 's_^[[:space:]]*<iframe src="([^"]*)".*_\1_p' |
sed 's/^/https:/g'
_supports_echo_e () {
_test_against="$EOT"
#if this is true, echo printed the correct char
[ "$(echo -e "\x3")" = "$_test_against" ]
}

scrape_ani_category () {
_supports_echo_e || die 1 "Your shell does not support echo -e\n"
search="$1"
[ "$search" = ":help" ] && print_info "Search should be the specific anime to watch from gogoanime\n" && return 100
output_json_file="$2"
#stolen from pystardust/ani-cli
base_url=$(curl -s -L -o /dev/null -w "%{url_effective}\n" https://gogoanime.cm)
_tmp_html="${session_temp_dir}/ani-category.html"
_get_request "$base_url/category/$search" > "$_tmp_html"
episode_count="$(_ani_category_get_episodes "$_tmp_html")"

ep_start=${pages_start:-1}
[ "$pages_to_scrape" -eq 1 ] && ep_max="$episode_count" || ep_max="$((ep_start + pages_to_scrape))"
[ "$ep_max" -gt "$episode_count" ] && ep_max=$episode_count

command_exists "openssl" || die 3 "openssl is a required dependency for ani, please install it\n"
_start_series_of_threads
while [ $ep_start -le "$ep_max" ]; do
{
print_info "Scraping anime episode $ep_start\n"
_tmp_json="${session_temp_dir}/ani-category-$ep_start.json.final"
#stolen from pystardust/ani-cli
dpage_link=$(_get_dpage_link "$search" "$ep_start")
_get_video_link "$dpage_link"
echo "[]" | jq --arg idx "$idx" --arg dpage "$dpage_link" --arg url "$video_url" --arg title "$search episode $ep_start" '[{"url": $url, "title": $title, "ID": $title, "idx": $idx, "dpage": $dpage}]' > "$_tmp_json"
} &
ep_start=$((ep_start+1))
_thread_started "$!"
done
wait
_concatinate_json_file "${session_temp_dir}/ani-category-" "$episode_count" "$output_json_file"
base_url='https://animixplay.to'
#This pipeline is mostly a copy paste from https://github.com/pystardust/ani-cli
#this pipeline does the following
#1: send request to the url with the search
#2: does some webscraping to get a list of links to gogohd.net
#3: lastly uses sed to prepend https: to the front of each link
#4: writes the whole thing to ${session_cache_dir}/ani-category-episodes.list
_get_request "$base_url/v1/${search}" | sed -n "s_.*epslistplace.*>\(.*\)</div>_\1_p" | tr ',' '\n' | sed -e '/extra/d' -e '/PV/d' | sed -n 's_".*":"\(.*\)".*_\1_p' | sed 's/\(.*\)/https:\1/' > "${session_cache_dir}/ani-category-episodes.list"
json="["
while read -r url; do
id="$(printf "%s" "$url" | sed -n 's/.*title=\([^&]\+\).*/\1/p')"
#this replaces all percents with \\x so that printf can convert them to real values
title="$(echo -e "$(printf "%s" "$id" | sed 'y/+/-/; s/%/\\x/g')")"
json=''"${json}"'{"ID": "'"$title"'", "title": "'"$title"'", "url": "'"$url"'", "scraper": "ani-category", "action": "scrape type=ani-gogohd-link search='"$url"'"},'
done < "${session_cache_dir}/ani-category-episodes.list"
json="${json%,}]"
printf "%s\n" "$json" >> "$output_json_file"
}
101 changes: 101 additions & 0 deletions addons/scrapers/ani-gogohd-link
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
#!/bin/sh
#


_provider_number_to_name () {
case "$1" in
1) name="Animixplay" ;;
2) name="Xstreamcdn" ;;
*) name="Gogoanime" ;;
esac
echo "$name"
}

_ani_gogohd_generate_link () {
id="$1"
provider="$2"
full_gogo_resp="$3"
case "$provider" in
1)
refr="https://animixplay.to"
[ -z "$id" ] && return 0
command_exists "base64" || { pring_warning "base64 is not installed\n" && return 0; }
enc_id="$(printf "%s" "$id" | base64)"
ani_id="$(printf "%sLTXs3GrU8we9O%s", "$id" "$enc_id" | base64)"
result_links="$(_get_request "$refr/api/cW9${ani_id}" -I | sed -nE 's_[L|l]ocation: https?://[^#]*#([^#]*).*_\1_p' | base64 -d)"
_generate_json () {
echo '[{"ID": "'"${title}-ani-gogohd"'", "title": "'"${title}"'", "scraper": "ani-gogohd-link", "url": "'"$result_links"'"}]'
} ;;
2)
fb_id=$(printf "%s" "$full_gogo_resp" | sed -n "s_.*fembed.*/v/__p")
refr="https://fembed-hd.com/v/$fb_id"
[ -z "$fb_id" ] && return 0
result_links="$(curl -a "$useragent" -s -x post "https://fembed-hd.com/api/source/$fb_id" -h "x-requested-with:xmlhttprequest" |
sed -e 's/\\//g' -e 's/.*data"://' | tr "}" "\n" | sed -n 's/.*file":"\(.*\)","label":"\(.*\)","type.*/\2>\1/p')"

_generate_json () {
json="["
while read -r quality_and_url; do
quality="${quality_and_url%%">"*}"
url="${quality_and_url#*">"}"
json=''"${json}"'{"id":"'"$id"'", "url": "'"$url"'", "title": "'"${title} (${quality})"'", "scraper": "ani-gogohd-link"},'
done <<-eof
$result_links
eof
json="${json%,}]"
echo "$json"
} ;;
*)
command_exists "openssl" || die 3 "openssl is necessary for gogoanime scraping\n"
refr="https://gogohd.net/"
[ -z "$id" ] && return 0
secret_key=$(printf "%s" "$full_gogo_resp" | sed -n '2p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
iv=$(printf "%s" "$resp" | sed -n '3p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
second_key=$(printf "%s" "$resp" | sed -n '4p' | tr -d "\n" | od -A n -t x1 | tr -d " |\n")
token=$(printf "%s" "$resp" | head -1 | base64 -d | openssl enc -d -aes256 -K "$secret_key" -iv "$iv" | sed -n 's/.*&\(token.*\)/\1/p')
ajax=$(printf '%s' "$id" | openssl enc -e -aes256 -K "$secret_key" -iv "$iv" -a)
data=$(curl -A "$useragent" -sL -H "X-Requested-With:XMLHttpRequest" "${gogohd_url}encrypt-ajax.php?id=${ajax}&alias=${id}&${token}" | sed -e 's/{"data":"//' -e 's/"}/\n/' -e 's/\\//g')
result_links="$(printf '%s' "$data" | base64 -d 2>/dev/null | openssl enc -d -aes256 -K "$second_key" -iv "$iv" 2>/dev/null |
tr -d \\\\ | sed -n "s_.*file\":\"\([^\"]*\)\".*source.*_\1_p")"
_generate_json () {
echo '[{"ID": "'"${title}-ani-gogohd"'", "title": "'"${title}"'", "scraper": "ani-gogohd-link", "url": "'"$result_links"'"}]'
} ;;

esac
[ -n "$result_links" ] && _generate_json
}

_supports_echo_e () {
_test_against="$EOT"
#if this is true, echo printed the correct char
[ "$(echo -e "\x3")" = "$_test_against" ]
}


scrape_ani_gogohd_link () {
_supports_echo_e || die 1 "Your shell does not support echo -e\n"
link="$1"
output_json_file="$2"
id="$(printf "%s" "$link" | sed -n 's/.*title=\([^&]\+\).*/\1/p')"
#this replaces all percents with \\x so that printf can convert them to real values
title="$(echo -e "$(printf "%s" "$id" | sed 'y/+/-/; s/%/\\x/g')")"
id="$(printf "%s" "$link" | sed -n 's/.*id=\([^&]*\).*/\1/p')"
gogohd_url="https://gogohd.net/"
resp=$(_get_request "${gogohd_url}streaming.php?id=$id" -l |
sed -n 's/.*class="container-\(.*\)">/\1/p ;
s/.*class="wrapper container-\(.*\)">/\1/p ;
s/.*class=".*videocontent-\(.*\)">/\1/p ;
s/.*data-value="\(.*\)">.*/\1/p ;
s/.*data-status="1".*data-video="\(.*\)">.*/\1/p')

provider=1
i=0
while [ "$i" -lt 3 ] && [ -z "$result_links" ]; do
print_info "Trying provider: $(_provider_number_to_name $provider)\n"
data="$(_ani_gogohd_generate_link "$id" "$provider" "$resp")"
[ -n "$data" ] && break
provider=$((provider % 3 + 1))
i=$((i+1))
done
printf "%s\n" "$data" >> "$output_json_file"
}
2 changes: 1 addition & 1 deletion addons/scrapers/ddg
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ scrape_ddg () {
video_player|audio_player|downloader|multimedia_player) die 1 "It appears your url_handler is not a webbrowser" ;;
esac
search=$1
[ "$search" = ":help" ] && print_info "search ddg" && return 10
[ "$search" = ":help" ] && print_info "search ddg" && return 100
command_exists "ddgr" || die 3 "ddgr is used to scrape duckduckgo"
output_json_file="$2"

Expand Down
10 changes: 5 additions & 5 deletions addons/scrapers/invidious-popular
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ __invidious_search_json_videos () {
;
[ .[] | select(.type=="shortVideo") |
{
scraper: "youtube_search",
scraper: "invidious_popular",
ID: .videoId,
url: "'"${yt_video_link_domain}"'/watch?v=\(.videoId)",
title: .title,
Expand Down Expand Up @@ -38,10 +38,10 @@ scrape_invidious_popular(){
_get_invidious_thumb_quality_name

{
_invidious_search_json_live < "$_tmp_json"
__invidious_search_json_videos < "$_tmp_json"
_invidious_search_json_channel < "$_tmp_json"
_invidious_search_json_playlist < "$_tmp_json"
_invidious_search_json_live < "$_tmp_json"| jq '[.[]|.scraper="invidious_popular"]'
__invidious_search_json_videos "invidious_popular" < "$_tmp_json"
_invidious_search_json_channel < "$_tmp_json" | jq '[ .[]|.scraper="invidious_popular" ]'
_invidious_search_json_playlist < "$_tmp_json" | jq '[ .[]|.scraper="invidious_popular" ]'
} >> "$output_json_file"
i=$((i+1))
done
Expand Down
Loading

0 comments on commit b716e94

Please sign in to comment.