Files
subs/subs
2020-12-06 12:43:17 -06:00

237 lines
6.9 KiB
Bash
Executable File

#!/bin/sh
#
# Watch your youtube subscriptions without a youtube account
# via curl, dmenu, mpv and basic unix commands.
#
# The $SUBS_FILE is a text file containing usernames or channel IDs
# comments and blank lines are ignored.
#
# For more information and examples, see:
# http://github.com/mitchweaver/subs
#
# -/-/-/-/- Settings -/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/
: "${SUBS_FILE:=~/files/subs.txt}"
: "${SUBS_MENU_PROG:='dmenu -p Subs:'}"
: "${SUBS:=${XDG_CACHE_HOME:-~/.cache}/subs}"
: "${SUBS_LINKS:=$SUBS/links}"
: "${SUBS_CACHE:=$SUBS/cache}"
: "${SUBS_SLEEP_VALUE:=0.1}" # raise this if you experience problems
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
SEP=^^^^^ # shouldn't need to change this
# -/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/-/
die() {
>&2 printf '%s\n' "$*"
exit 1
}
usage() {
die 'Usage: subs [-m no-video] [-g gen-links] [-u update-cache] [-d daemonize]'
}
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
# Synopsis: $SUBS_FILE [txt] -> $SUBS_LINKS [xml links]
#
# Updates local cache of xml subscription links from the
# subscription file containing either usernames or channel ids.
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
gen_links() {
:>"$SUBS_LINKS"
count=0
total=$(sed -e '/^$/d' -e '/^#/d' <"$SUBS_FILE" | wc -l)
while read -r line ; do
# ignore comments and blank lines
case $line in ''|' '|'#'*) continue ;; esac
# strip off in-line comments and any trailing whitespace
line=${line%%#*}
line=${line%% *}
count=$(( count + 1 ))
printf "[%s/%s] fetching channel xml link for %s\n" "$count" "$total" "$line"
case $line in
UC*)
# YT channel IDs always begin with 'UC' and are 24 chars long
if [ ${#line} -eq 24 ] ; then
printf 'https://youtube.com/feeds/videos.xml?%s\n' \
"channel_id=$line" >>"$SUBS_LINKS"
continue
fi
esac
# otherwise we are given a username, we must find out its channel ID
curl -sfL --retry 10 "https://youtube.com/user/$line/about" | \
while read -r line ; do
case $line in
*channel/UC??????????????????????*)
line=${line##*channel/}
line=${line%%\"*}
printf 'https://youtube.com/feeds/videos.xml?channel_id=%s\n' \
"$line" >>"$SUBS_LINKS"
break
esac
done
done <"$SUBS_FILE"
}
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
# Synopsis: $1 [LINK] -> $SUBS_CACHE/$chan_name/concat [CHANNEL INFO]
#
# Takes a channel rss feed link and creates a file
# with a line of its videos dates, titles, and urls.
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
get_vids() {
data=$(curl -sfL --retry 15 "$1")
# hide the first <published> tag which is the channel
# creation date
data=${data#*\<\/published\>}
# trim off outer <name> tags
chan=${data%%</name*}
chan=${chan##*name>}
printf "%s\n" "$data" | \
while read -r line ; do
case $line in
*'link rel='*)
line=${line#*href=\"}
line=${line%\"/\>}
line=https://${line#*www.}
url=$line
;;
*'<published>'*)
line=${line%+00:*}
line=${line#*<published>}
date=$line
;;
*'<media:title>'*)
line=${line%</*}
line=${line#*:title>}
title=$line
printf '%s\n' \
"${date}${SEP}${chan}${SEP}${title}${SEP}${url}" \
>>"$SUBS_CACHE/$chan"
esac
done
}
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
# Updates the local cache of subscriptions. ([-u] flag)
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
update_subs() {
[ -f "$SUBS_LINKS" ] || die 'Subs links have not been generated.'
rm -r "${SUBS_CACHE:-?}" 2>/dev/null ||:
mkdir -p "$SUBS_CACHE"
total=$(wc -l <"$SUBS_LINKS")
count=0
while read -r link ; do
count=$(( count + 1 ))
printf 'starting job [%s/%s] for %s\n' "$count" "$total" "$link"
get_vids "$link" &
sleep "${SUBS_SLEEP_VALUE:-0}"
done <"$SUBS_LINKS"
count=0
while [ "$count" -ne "$total" ] ; do
count=$(printf '%s\n' "$SUBS_CACHE"/* | wc -l)
printf "[%s/%s] waiting for fetch jobs to complete...\n" "$count" "$total"
sleep 0.5
done
printf '%s\n\n' 'done!'
}
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
# Grab current cache of subscriptions, sort by date uploaded
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
cat_subs() {
sort -r "$SUBS_CACHE"/* | \
while read -r line ; do
chan=${line#*$SEP}
chan=${chan%%$SEP*}
title=${line#*$chan$SEP}
title=${title%%$SEP*}
date=${line%%$SEP*}
date=${date#*-}
date=${date%T*}
printf '[%s %s] %s\n' "$date" "$chan" "$title"
done
}
# Split the concatenated lines into entities, send to menu program.
# Finally, play the result with mpv.
get_sel() {
if [ -d "$SUBS_CACHE" ] ; then
sel=$(cat_subs | $SUBS_MENU_PROG)
else
die 'Subs cache has not been retrieved.'
fi
[ "$sel" ] || die Interrupted
chan="${sel#* }"
chan="${chan%%] *}"
title=${sel#*"$chan"\] }
while read -r line ; do
case $line in
*"$SEP$title$SEP"*)
url=${line##*$SEP}
if [ "$url" ] ; then
printf 'playing: %s\n' "$url"
# Play the selection.
# shellcheck disable=2086
exec mpv $MPV_OPTS "$url"
fi
break
esac
done <"$SUBS_CACHE/$chan"
}
daemonize() {
# create a cached copy of the subs file to check for changes
# if changes occur, re-generate links automatically
daemon_file=${XDG_CACHE_HOME:-~/.cache}/subs_daemon.cache
if [ ! -f "$daemon_file" ] ; then
cp -f "${SUBS_FILE:=~/files/subs.txt}" "$daemon_file"
fi
while true ; do
if ! cmp "${SUBS_FILE:=~/files/subs.txt}" "$daemon_file" ; then
gen_links
cp -f "${SUBS_FILE:=~/files/subs.txt}" "$daemon_file"
fi
update_subs
interval=${SUBS_DAEMON_INTERVAL:-$(( 5 * 60 ))}
printf 'Sleeping for %s seconds...\n' "$interval"
sleep "$interval"
done
}
main() {
mkdir -p "$SUBS"
case ${1#-} in
m)
export MPV_OPTS="$MPV_OPTS --no-video"
shift
esac
case ${1#-} in
h) usage ;;
g) gen_links ;;
u) update_subs ;;
c) cat_subs ;;
d) daemonize ;;
*) get_sel
esac
}
main "$@"