mirror of
https://github.com/The-Repo-Club/DotFiles.git
synced 2024-11-28 18:28:39 -05:00
270 lines
7.0 KiB
Bash
Executable File
270 lines
7.0 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
#-*-coding:utf-8 -*-
|
|
#Auto updated?
|
|
# Yes
|
|
#File:
|
|
# fzf_youtube_subs
|
|
#Author:
|
|
# The-Repo-Club [wayne6324@gmail.com]
|
|
#Github:
|
|
# https://github.com/The-Repo-Club/
|
|
#
|
|
#Created:
|
|
# Sun 03 January 2021, 05:09:33 PM [GMT]
|
|
#Modified:
|
|
# Thu 20 October 2022, 03:27:18 PM [GMT+1]
|
|
#
|
|
#Description:
|
|
# Watch your youtube subscriptions without a youtube account
|
|
# via curl, fzf, browser and basic unix commands.
|
|
#
|
|
# The $SUBS_FILE is a text file containing usernames or channel IDs
|
|
# comments and blank lines are ignored.
|
|
#
|
|
#
|
|
#Dependencies:
|
|
# fzf
|
|
#
|
|
|
|
fzf_menu() {
|
|
fzf --prompt="Select a video: " --border=rounded --margin=1% --color=dark --height 100% --reverse --header=" YOUTUBE SUBS MENU " --info=hidden --header-first
|
|
}
|
|
|
|
# -/-/-/-/- Settings -/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/
|
|
: "${SUBS_FILE:=${HOME}/.config/fzf/subs.ini}"
|
|
: "${SUBS_MENU_PROG:=fzf_menu}"
|
|
: "${SUBS:=${HOME}/.cache/subs}"
|
|
: "${SUBS_LINKS:=$SUBS/links}"
|
|
: "${SUBS_OPEN:=$(gobble videoplayer)}"
|
|
: "${SUBS_CACHE:=$SUBS/cache}"
|
|
: "${SUBS_SLEEP_VALUE:=0.05}" # raise this if you experience problems
|
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
SEP=^^^^^ # shouldn't need to change this
|
|
# -/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/
|
|
|
|
die() {
|
|
printf >&2 '%s\n' "$*"
|
|
exit 1
|
|
}
|
|
|
|
usage() {
|
|
die 'Usage: fzf_youtube_subs [-c cat_subs] [-g gen_links] [-u update_subs] [-d daemonize]'
|
|
}
|
|
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
# Synopsis: $SUBS_FILE [txt] -> $SUBS_LINKS [xml links]
|
|
#
|
|
# Updates local cache of xml subscription links from the
|
|
# subscription file containing either usernames or channel ids.
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
gen_links() {
|
|
: >"$SUBS_LINKS"
|
|
|
|
count=0
|
|
total=$(sed -e '/^$/d' -e '/^#/d' <"$SUBS_FILE" | wc -l)
|
|
|
|
while read -r line; do
|
|
|
|
# ignore comments and blank lines
|
|
case $line in '' | ' ' | '#'*) continue ;; esac
|
|
|
|
# strip off in-line comments and any trailing whitespace
|
|
line=${line%%#*}
|
|
line=${line%% *}
|
|
|
|
count=$((count + 1))
|
|
|
|
case $line in
|
|
UC*)
|
|
# YT channel IDs always begin with 'UC' and are 24 chars long
|
|
printf "[%s/%s] using channel ID '%s' for xml link\n" "$count" "$total" "$line"
|
|
|
|
[ ${#line} -eq 24 ] &&
|
|
printf 'https://youtube.com/feeds/videos.xml?%s\n' \
|
|
"channel_id=$line" >>"$SUBS_LINKS"
|
|
;;
|
|
*)
|
|
# otherwise we are given a username, we must find out its channel ID
|
|
printf "fetching channel ID for %s...\n" "$line"
|
|
|
|
curl -sfL --retry 10 "https://youtube.com/user/$line/about" |
|
|
while read -r line; do
|
|
case $line in
|
|
*channel/UC??????????????????????*)
|
|
line=${line##*channel/}
|
|
line=${line%%\"*}
|
|
printf "[%s/%s] using channel ID '%s' for xml link\n" "$count" "$total" "$line"
|
|
printf 'https://youtube.com/feeds/videos.xml?channel_id=%s\n' \
|
|
"$line" >>"$SUBS_LINKS"
|
|
break
|
|
;;
|
|
esac
|
|
done &
|
|
sleep "${SUBS_SLEEP_VALUE:-0}"
|
|
;;
|
|
esac
|
|
|
|
done <"$SUBS_FILE"
|
|
|
|
count=0
|
|
while [ "$count" -ne "$total" ]; do
|
|
count=$(wc -l <"$SUBS_LINKS")
|
|
printf "[%s/%s] waiting for jobs to complete...\n" "$count" "$total"
|
|
sleep 0.5
|
|
done
|
|
}
|
|
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
# Synopsis: $1 [LINK] -> $SUBS_CACHE/$chan_name/concat [CHANNEL INFO]
|
|
#
|
|
# Takes a channel rss feed link and creates a file
|
|
# with a line of its videos dates, titles, and urls.
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
get_vids() {
|
|
data=$(curl -sfL --retry 15 "$1")
|
|
|
|
# hide the first <published> tag which is the channel
|
|
# creation date
|
|
data=${data#*\<\/published\>}
|
|
|
|
# trim off outer <name> tags
|
|
chan=${data%%</name*}
|
|
chan=${chan##*name>}
|
|
|
|
printf "%s\n" "$data" |
|
|
while read -r line; do
|
|
case $line in
|
|
*'link rel='*)
|
|
line=${line#*href=\"}
|
|
line=${line%\"/\>}
|
|
line=https://${line#*www.}
|
|
url=$line
|
|
;;
|
|
*'<published>'*)
|
|
line=${line%+00:*}
|
|
line=${line#*<published>}
|
|
date=$line
|
|
;;
|
|
*'<media:title>'*)
|
|
line=${line%</*}
|
|
line=${line#*:title>}
|
|
title=$line
|
|
printf '%s\n' \
|
|
"${date}${SEP}${chan}${SEP}${title}${SEP}${url}" \
|
|
>>"$SUBS_CACHE/$chan"
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
# Updates the local cache of subscriptions. ([-u] flag)
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
update_subs() {
|
|
[ -f "$SUBS_LINKS" ] || die 'Subs links have not been generated.'
|
|
|
|
rm -r "${SUBS_CACHE:-?}" 2>/dev/null || :
|
|
mkdir -p "$SUBS_CACHE"
|
|
|
|
total=$(wc -l <"$SUBS_LINKS")
|
|
|
|
count=0
|
|
while read -r link; do
|
|
count=$((count + 1))
|
|
printf 'starting job [%s/%s] for %s\n' "$count" "$total" "$link"
|
|
get_vids "$link" &
|
|
sleep "${SUBS_SLEEP_VALUE:-0}"
|
|
done <"$SUBS_LINKS"
|
|
|
|
count=0
|
|
while [ "$count" -ne "$total" ]; do
|
|
count=$(printf '%s\n' "$SUBS_CACHE"/* | wc -l)
|
|
printf "[%s/%s] waiting for fetch jobs to complete...\n" "$count" "$total"
|
|
sleep 0.5
|
|
done
|
|
|
|
printf '%s\n\n' 'done!'
|
|
}
|
|
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
# Grab current cache of subscriptions, sort by date uploaded
|
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
|
cat_subs() {
|
|
sort -r "$SUBS_CACHE"/* |
|
|
while read -r line; do
|
|
chan=${line#*$SEP}
|
|
chan=${chan%%$SEP*}
|
|
title=${line#*$chan$SEP}
|
|
title=${title%%$SEP*}
|
|
date=${line%%$SEP*}
|
|
date=${date#*-}
|
|
date=${date%T*}
|
|
printf '[%s %s] %s\n' "$date" "$chan" "$title"
|
|
done
|
|
}
|
|
|
|
# Split the concatenated lines into entities, send to menu program.
|
|
# Finally, play the result with mpv.
|
|
get_sel() {
|
|
if [ -d "$SUBS_CACHE" ]; then
|
|
sel=$(cat_subs | $SUBS_MENU_PROG)
|
|
else
|
|
die 'Subs cache has not been retrieved.'
|
|
fi
|
|
|
|
[ "$sel" ] || die Interrupted
|
|
|
|
chan="${sel#* }"
|
|
chan="${chan%%] *}"
|
|
title=${sel#*"$chan"\] }
|
|
while read -r line; do
|
|
case $line in
|
|
*"$SEP$title$SEP"*)
|
|
url=${line##*$SEP}
|
|
if [ "$url" ]; then
|
|
printf 'playing: %s\n' "$url"
|
|
# Play the selection.
|
|
# shellcheck disable=2086
|
|
exec devour $SUBS_OPEN "$url"
|
|
fi
|
|
break
|
|
;;
|
|
esac
|
|
done <"$SUBS_CACHE/$chan"
|
|
}
|
|
|
|
daemonize() {
|
|
# create a cached copy of the subs file to check for changes
|
|
# if changes occur, re-generate links automatically
|
|
daemon_file=${HOME}/.cache/subs_daemon.cache
|
|
if [ ! -f "$daemon_file" ]; then
|
|
cp -f "${SUBS_FILE:=${HOME}/.config/fzf/subs.ini}" "$daemon_file"
|
|
fi
|
|
|
|
while true; do
|
|
if ! cmp "${SUBS_FILE:=${HOME}/.config/fzf/subs.ini}" "$daemon_file"; then
|
|
cp -f "${SUBS_FILE:=${HOME}/.config/fzf/subs.ini}" "$daemon_file"
|
|
fi
|
|
gen_links
|
|
update_subs
|
|
interval=${SUBS_DAEMON_INTERVAL:-$((10 * 60))}
|
|
printf 'Sleeping for %s seconds...\n' "$interval"
|
|
sleep "$interval"
|
|
done
|
|
}
|
|
|
|
main() {
|
|
mkdir -p "$SUBS"
|
|
|
|
case ${1#-} in
|
|
h) usage ;;
|
|
g) gen_links ;;
|
|
u) update_subs ;;
|
|
c) cat_subs ;;
|
|
d) daemonize ;;
|
|
*) get_sel ;;
|
|
esac
|
|
}
|
|
|
|
main "$@"
|