initial commit
This commit is contained in:
2
.travis.yml
Normal file
2
.travis.yml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
script:
|
||||||
|
- shellcheck -s sh subs
|
||||||
201
subs
Executable file
201
subs
Executable file
@@ -0,0 +1,201 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Watch your youtube subscriptions without a youtube account
|
||||||
|
# via curl, dmenu, mpv and basic unix commands.
|
||||||
|
#
|
||||||
|
# The $SUBS_FILE is a text file containing usernames or channel IDs
|
||||||
|
# comments and blank lines are ignored.
|
||||||
|
#
|
||||||
|
# For more information and examples, see:
|
||||||
|
# http://github.com/mitchweaver/subs
|
||||||
|
#
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*#
|
||||||
|
|
||||||
|
# -*-*-*-*-* Settings *-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*--*-*-*
|
||||||
|
: "${SUBS_FILE:=~/files/subs.txt}"
|
||||||
|
: "${SUBS_MENU_PROG:='dmenu -p Subs:'}"
|
||||||
|
: "${PLUMBER:=head}"
|
||||||
|
: "${SUBS:=${XDG_CACHE_HOME:-~/.cache}/subs}"
|
||||||
|
: "${SUBS_LINKS:=$SUBS/links}"
|
||||||
|
: "${SUBS_CACHE:=$SUBS/cache}"
|
||||||
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
|
SEP=^^^^^ # shouldn't need to change this
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
|
||||||
|
die() {
|
||||||
|
>&2 printf '%s\n' "$*"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
die 'Usage: subs [-m no-video] [-g gen-links] [-u update-cache]'
|
||||||
|
}
|
||||||
|
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
# Synopsis: $SUBS_FILE [txt] -> $SUBS_LINKS [xml links]
|
||||||
|
#
|
||||||
|
# Updates local cache of xml subscription links from the
|
||||||
|
# subscription file containing either usernames or channel ids.
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
gen_links() {
|
||||||
|
:>"$SUBS_LINKS"
|
||||||
|
|
||||||
|
count=0
|
||||||
|
total=$(sed -e '/^$/d' -e '/^#/d' <"$SUBS_FILE" | wc -l)
|
||||||
|
|
||||||
|
while read -r line ; do
|
||||||
|
# ignore comments and blank lines
|
||||||
|
case $line in ''|' '|'#'*) continue ;; esac
|
||||||
|
# strip off in-line comments and any trailing whitespace
|
||||||
|
line=${line%%#*}
|
||||||
|
line=${line%% *}
|
||||||
|
|
||||||
|
count=$(( count + 1 ))
|
||||||
|
printf "[%s/%s] fetching channel xml link for %s\n" "$count" "$total" "$line"
|
||||||
|
|
||||||
|
case $line in
|
||||||
|
UC*)
|
||||||
|
# YT channel IDs always begin with 'UC' and are 24 chars long
|
||||||
|
if [ ${#line} -eq 24 ] ; then
|
||||||
|
printf 'https://youtube.com/feeds/videos.xml?%s\n' \
|
||||||
|
"channel_id=$line" >>"$SUBS_LINKS"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
esac
|
||||||
|
|
||||||
|
# otherwise we are given a username, we must find out its channel ID
|
||||||
|
curl -sfL --retry 10 "https://youtube.com/user/$line/about" | grep channel_id | \
|
||||||
|
sed -e 's|www\.||' -e 's|.*href="||' -e 's|">.*||' >>"$SUBS_LINKS"
|
||||||
|
|
||||||
|
done <"$SUBS_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
# Synopsis: $1 [LINK] -> $SUBS_CACHE/$chan_name/concat [CHANNEL INFO]
|
||||||
|
#
|
||||||
|
# Takes a channel rss feed link and creates a file
|
||||||
|
# with a line of its videos dates, titles, and urls.
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
get_vids() {
|
||||||
|
data=$(curl -sfL --retry 15 "$1")
|
||||||
|
|
||||||
|
# hide the first <published> tag which is the channel
|
||||||
|
# creation date
|
||||||
|
data=${data#*\<\/published\>}
|
||||||
|
|
||||||
|
chan=${data%%</name*}
|
||||||
|
chan=${chan##*name>}
|
||||||
|
|
||||||
|
printf "%s\n" "$data" | \
|
||||||
|
while read -r line ; do
|
||||||
|
case $line in
|
||||||
|
*'link rel='*)
|
||||||
|
line=${line#*href=\"}
|
||||||
|
line=${line%\"/\>}
|
||||||
|
line=https://${line#*www.}
|
||||||
|
url=$line
|
||||||
|
;;
|
||||||
|
*'<published>'*)
|
||||||
|
line=${line%+00:*}
|
||||||
|
line=${line#*<published>}
|
||||||
|
date=$line
|
||||||
|
;;
|
||||||
|
*'<media:title>'*)
|
||||||
|
line=${line%</*}
|
||||||
|
line=${line#*:title>}
|
||||||
|
title=$line
|
||||||
|
printf '%s\n' \
|
||||||
|
"${date}${SEP}${chan}${SEP}${title}${SEP}${url}" \
|
||||||
|
>>"$SUBS_CACHE/$chan"
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
# Updates the local cache of subscriptions. ([-u] flag)
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
update_subs() {
|
||||||
|
[ -f "$SUBS_LINKS" ] || die 'Subs links have not been generated.'
|
||||||
|
|
||||||
|
rm -r "${SUBS_CACHE:-?}" 2>/dev/null ||:
|
||||||
|
mkdir -p "$SUBS_CACHE"
|
||||||
|
|
||||||
|
total=$(wc -l <"$SUBS_LINKS")
|
||||||
|
|
||||||
|
count=0
|
||||||
|
while read -r link ; do
|
||||||
|
count=$(( count + 1 ))
|
||||||
|
printf 'starting job [%s/%s] for %s\n' "$count" "$total" "$link"
|
||||||
|
get_vids "$link" &
|
||||||
|
sleep 0.05
|
||||||
|
done <"$SUBS_LINKS"
|
||||||
|
|
||||||
|
count=0
|
||||||
|
while [ "$count" -ne "$total" ] ; do
|
||||||
|
count=$(printf '%s\n' "$SUBS_CACHE"/* | wc -l)
|
||||||
|
printf "[%s/%s] waiting for fetch jobs to complete...\n" "$count" "$total"
|
||||||
|
sleep 0.5
|
||||||
|
done
|
||||||
|
|
||||||
|
printf '%s\n\n' 'done!'
|
||||||
|
}
|
||||||
|
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
# Grab current cache of subscriptions, sort by date uploaded
|
||||||
|
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*
|
||||||
|
cat_subs() {
|
||||||
|
[ -d "$SUBS_CACHE" ] || die 'Subs cache has not been retrieved.'
|
||||||
|
|
||||||
|
sort -r "$SUBS_CACHE"/* | \
|
||||||
|
while read -r line ; do
|
||||||
|
chan=${line#*$SEP}
|
||||||
|
chan=${chan%%$SEP*}
|
||||||
|
title=${line#*$chan$SEP}
|
||||||
|
title=${title%%$SEP*}
|
||||||
|
date=${line%%$SEP*}
|
||||||
|
date=${date#*-}
|
||||||
|
date=${date%T*}
|
||||||
|
printf '[%s %s] %s\n' "$date" "$chan" "$title"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Split the concatenated lines into entities, send to menu program.
|
||||||
|
# Finally, pipe this result to the ${PLUMBER}.
|
||||||
|
get_sel() {
|
||||||
|
sel=$(cat_subs | $SUBS_MENU_PROG)
|
||||||
|
|
||||||
|
[ "$sel" ] || die Interrupted
|
||||||
|
|
||||||
|
chan="${sel#* }"
|
||||||
|
chan="${chan%%] *}"
|
||||||
|
title=${sel#*"$chan"\] }
|
||||||
|
while read -r line ; do
|
||||||
|
case $line in
|
||||||
|
*"$SEP$title$SEP"*)
|
||||||
|
url=${line##*$SEP}
|
||||||
|
printf 'playing: %s\n' "$url"
|
||||||
|
printf '%s\n' "$url" | $PLUMBER
|
||||||
|
break
|
||||||
|
esac
|
||||||
|
done <"$SUBS_CACHE/$chan"
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
mkdir -p "$SUBS"
|
||||||
|
|
||||||
|
case ${1#-} in
|
||||||
|
m)
|
||||||
|
export MPV_OPTS="$MPV_OPTS --no-video"
|
||||||
|
shift
|
||||||
|
esac
|
||||||
|
|
||||||
|
case ${1#-} in
|
||||||
|
h) usage ;;
|
||||||
|
g) gen_links ;;
|
||||||
|
u) update_subs ;;
|
||||||
|
c) cat_subs ;;
|
||||||
|
*) get_sel
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
Reference in New Issue
Block a user