2023-06-03 01:33:23 -03:00
|
|
|
#!/bin/bash
|
2023-06-04 20:21:33 -03:00
|
|
|
# TODO:
|
|
|
|
# - save last 5 VODlists and offer replay function to avoid refetching unnecesarily
|
|
|
|
|
2023-06-03 01:33:23 -03:00
|
|
|
|
|
|
|
# ${1} (optional) username to search VODs of
|
|
|
|
|
2023-06-04 20:21:33 -03:00
|
|
|
[ "${1}" = "-h" ] || [ "${1}" = "--help" ] && {
|
|
|
|
printf "usage: getvods <username(optional)>\n"
|
|
|
|
exit 0
|
|
|
|
}
|
|
|
|
|
2023-06-03 01:33:23 -03:00
|
|
|
# ${1} is a string with error reason stated
|
|
|
|
# ${2} is an optional string to report a different error status
|
|
|
|
on_fail(){
|
|
|
|
printf "last ran command error status: %s\n" "${2:-${?}}"
|
|
|
|
printf "error: %s\n" "${1}"
|
|
|
|
exit 2
|
|
|
|
}
|
|
|
|
|
2023-06-04 20:21:33 -03:00
|
|
|
CFG_DIR="${HOME}/.config/getvods"
|
|
|
|
[ ! -d "${CFG_DIR}" ] && {
|
|
|
|
mkdir -p "${CFG_DIR}" || on_fail "${CFG_DIR} doesn't exist and couldn't be made (check write permissions to ${CFG_DIR})"
|
|
|
|
}
|
2023-06-03 01:33:23 -03:00
|
|
|
|
2023-06-04 20:21:33 -03:00
|
|
|
# get twitch api credentials
|
|
|
|
[ ! -f "${CFG_DIR}/client_id" ] && {
|
|
|
|
read -p "enter your twitch API client_id >" client_id
|
|
|
|
[ ! -z "${client_id}" ] && printf "%s" "${client_id}" > "${CFG_DIR}/client_id" || on_fail "couldn't write client_id to ${CFG_DIR}/client_id"
|
|
|
|
} || client_id="$(cat "${CFG_DIR}/client_id")"
|
|
|
|
[ ! -f "${CFG_DIR}/client_secret" ] && {
|
|
|
|
read -p "enter your twitch API client_secret >" client_secret
|
|
|
|
[ ! -z "${client_secret}" ] && printf "%s" "${client_secret}" > "${CFG_DIR}/client_secret" || on_fail "couldn't write client_secret to ${CFG_DIR}/client_secret"
|
|
|
|
} || client_secret="$(cat "${CFG_DIR}/client_secret")"
|
|
|
|
|
|
|
|
# get access token
|
2023-06-03 01:33:23 -03:00
|
|
|
get_token(){
|
|
|
|
curl -s -X POST https://id.twitch.tv/oauth2/token -d "client_id=${client_id}" -d "client_secret=${client_secret}" -d "grant_type=client_credentials" > "${CFG_DIR}/token.json" || {
|
|
|
|
last_err="${?}"
|
|
|
|
rm "${CFG_DIR}/token.json" 2>/dev/null
|
|
|
|
on_fail "couldn't get token (curl error)" "${last_err}"
|
|
|
|
}
|
|
|
|
bc -l <<< "$(date +%s) + $(grep -o "expires_in\":[0-9]\+" "${CFG_DIR}/token.json" | awk -F : '{print $2}')" > "${CFG_DIR}/expiration.txt" || {
|
|
|
|
last_err="${?}"
|
|
|
|
rm "${CFG_DIR}/expiration.txt" 2>/dev/null
|
|
|
|
on_fail "couldn't save expiration date" "${last_err}"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
[ -f "${CFG_DIR}/token.json" ] && [ -f "${CFG_DIR}/expiration.txt" ] && {
|
2023-06-16 04:20:03 -03:00
|
|
|
[ "$(cat "${CFG_DIR}/expiration.txt")" -le "$(date +%s)" ] && get_token || :
|
2023-06-03 01:33:23 -03:00
|
|
|
} || {
|
|
|
|
get_token
|
|
|
|
}
|
|
|
|
|
|
|
|
access_token="$(grep -o "access_token\":\"[a-zA-Z0-9]\+" "${CFG_DIR}/token.json" | awk -F \" '{print $3}')"
|
|
|
|
[ -z "${access_token}" ] && {
|
|
|
|
last_err="${?}"
|
|
|
|
rm "${CFG_DIR}/token.json"
|
|
|
|
on_fail "couldn't get token (token empty)" "${last_err}"
|
|
|
|
}
|
|
|
|
|
2023-06-04 20:21:33 -03:00
|
|
|
# get the username to search for
|
|
|
|
declare -i retries=0
|
|
|
|
[ ! -z "${1}" ] && {
|
2023-06-04 21:56:04 -03:00
|
|
|
while :; do
|
|
|
|
[ "${#1}" -ge "4" ] && [ "${#1}" -le "25" ] && grep "[a-zA-Z0-9_]\{4,25\}" <<< "${1}" >/dev/null && {
|
|
|
|
login="${1}"
|
|
|
|
break
|
|
|
|
}
|
|
|
|
printf "username must be between 4 and 25 characters long\n"
|
|
|
|
printf "username may have only characters a-z A-Z 0-9 and _\n"
|
|
|
|
retries+=1
|
|
|
|
done
|
2023-06-04 20:21:33 -03:00
|
|
|
}
|
2023-06-04 21:56:04 -03:00
|
|
|
[ -z "${login}" ] && {
|
2023-06-04 20:21:33 -03:00
|
|
|
while :; do
|
|
|
|
read -p 'type in username >' login
|
|
|
|
[ "${#login}" -ge "4" ] && [ "${#login}" -le "25" ] && grep "[a-zA-Z0-9_]\{4,25\}" <<< "${login}" >/dev/null && break
|
|
|
|
printf "username must be between 4 and 25 characters long\n"
|
|
|
|
printf "username may have only characters a-z A-Z 0-9 and _\n"
|
|
|
|
retries+=1
|
|
|
|
[ "${retries}" -ge 3 ] && on_fail "exceeded max retries" "1"
|
|
|
|
done
|
|
|
|
}
|
2023-06-03 01:33:23 -03:00
|
|
|
|
|
|
|
# get user id
|
|
|
|
[ ! -f "${CFG_DIR}/userlist.txt" ] || [ "${login}" != "$(grep "${login}" "${CFG_DIR}/userlist.txt" | awk -F ":|," '{print $4}')" ] && {
|
|
|
|
user_response="$(curl -s -G -X GET "https://api.twitch.tv/helix/users" -d "login=${login}" -H "Authorization: Bearer ${access_token}" -H "client-id: ${client_id}")"
|
2023-06-04 20:21:33 -03:00
|
|
|
last_err="${?}"
|
|
|
|
[ "${last_err}" -gt 0 ] && on_fail "couldn't get user_id (curl error)" "${last_err}"
|
|
|
|
parsed_user_response="$(grep -o "id\":\"[0-9]\+\",\"login\":\"[a-zA-Z0-9_]\{4,25\}" <<< "${user_response}")"
|
|
|
|
last_err="${?}"
|
|
|
|
[ "${last_err}" -gt 0 ] && on_fail "couldn't get user_id (user_id empty)" "${last_err}"
|
2023-06-03 01:33:23 -03:00
|
|
|
unset user_response
|
|
|
|
sed 's/\"//g' <<< "${parsed_user_response}" >> "${CFG_DIR}/userlist.txt"
|
|
|
|
unset parsed_user_response
|
|
|
|
}
|
|
|
|
user_id="$(grep "${login}" "${CFG_DIR}/userlist.txt" | awk -F ":|," '{print $2}')"
|
|
|
|
|
|
|
|
# save json with first 20 videos tied to the user id we just got
|
2023-06-04 20:21:33 -03:00
|
|
|
vodlist="$(curl -s -G -X GET "https://api.twitch.tv/helix/videos" -d "type=archive" -d "user_id=${user_id}" -H "Authorization: Bearer ${access_token}" -H "client-id: ${client_id}")"
|
|
|
|
last_err="${?}"
|
|
|
|
unset client_secret client_id
|
|
|
|
[ "${last_err}" -gt 0 ] && on_fail "couldn't get VOD list (curl error)" "${last_err}"
|
|
|
|
parsed_vodlist="$(sed 's/^{"data":\[\|],"pagination":{\("cursor":"[a-zA-Z0-9]\+"\)\?}}$//g' <<< "${vodlist}")"
|
|
|
|
unset vodlist
|
|
|
|
[ -z "${parsed_vodlist}" ] && on_fail "vodlist empty (no VODs found for ${login})"
|
|
|
|
printf "%s" "${parsed_vodlist}" > "${CFG_DIR}/streamarchives.json"
|
|
|
|
unset parsed_vodlist
|
2023-06-03 01:33:23 -03:00
|
|
|
|
2023-06-04 20:21:33 -03:00
|
|
|
unset user_id last_err vodlist parsed_vodlist
|
|
|
|
|
|
|
|
utf-16-surrogate-pair-decode() {
|
|
|
|
# shamelessly stolen from stackoverflow
|
|
|
|
local out="$1"
|
|
|
|
local remain=""
|
|
|
|
local regexp='(.*)\\u[dD]([0-9a-fA-F]{3})\\u[dD]([0-9a-fA-F]{3})(.*)'
|
|
|
|
while [[ "${out}" =~ "${regexp}" ]] ; do
|
|
|
|
# match 2 \udxxx hex values, calculate new U, then split and replace
|
|
|
|
local W1="$(( ( 0xd${BASH_REMATCH[2]} & 0x3ff) <<10 ))"
|
|
|
|
local W2="$(( 0xd${BASH_REMATCH[3]} & 0x3ff ))"
|
|
|
|
U="$(( ( W1 | W2 ) + 0x10000 ))"
|
|
|
|
remain="$(printf '\\U%8.8x' "${U}")${BASH_REMATCH[4]}${remain}"
|
|
|
|
out="${BASH_REMATCH[1]}"
|
|
|
|
done
|
|
|
|
printf "%s%s\n" "${out}" "${remain}"
|
|
|
|
}
|
|
|
|
|
|
|
|
printf "VODs of ${login}\n"
|
|
|
|
|
|
|
|
declare -a titles
|
|
|
|
declare -a duration
|
|
|
|
declare -a urls
|
|
|
|
|
|
|
|
declare -i j=0
|
|
|
|
while IFS= read -r i; do
|
|
|
|
titles[${j}]="$(utf-16-surrogate-pair-decode "${i:9:-2}")"
|
|
|
|
j+=1
|
|
|
|
done < <(grep -o '"title":"\(\\"\|[^"]*\)*",' "${CFG_DIR}/streamarchives.json")
|
|
|
|
|
|
|
|
j=0
|
|
|
|
while IFS= read -r i; do
|
2023-06-15 15:10:37 -03:00
|
|
|
duration[${j}]="${i:12:-1}"
|
2023-06-04 20:21:33 -03:00
|
|
|
j+=1
|
|
|
|
done < <(grep -o "\"duration\":\"[a-z0-9]\+\"" "${CFG_DIR}/streamarchives.json")
|
|
|
|
|
|
|
|
j=0
|
|
|
|
while IFS= read -r i; do
|
|
|
|
urls[${j}]="${i:7:-1}"
|
|
|
|
j+=1
|
|
|
|
done < <(grep -o "\"url\":\"https://www\.twitch\.tv/videos/[0-9]\+\"" "${CFG_DIR}/streamarchives.json")
|
|
|
|
unset j
|
|
|
|
|
|
|
|
for i in {0..19}; do
|
|
|
|
printf "%s:\t%s %s\n" "$((${i}+1))" "${titles[${i}]}" "${duration[${i}]}"
|
|
|
|
done
|
|
|
|
|
2023-06-04 21:56:04 -03:00
|
|
|
retries=0
|
2023-06-04 20:21:33 -03:00
|
|
|
while :; do
|
|
|
|
read -p "choose a video from the list (type 1-20) >" choice
|
2023-06-04 21:56:04 -03:00
|
|
|
grep "[0-9]\{1,2\}" <<< "${choice}" >/dev/null && [ "${choice}" -ge 0 ] && [ "${choice}" -le 20 ] && break
|
2023-06-04 20:21:33 -03:00
|
|
|
printf "%s is not a number from 1-20\n" "${choice}"
|
2023-06-04 21:56:04 -03:00
|
|
|
retries+=1
|
|
|
|
[ "${retries}" -ge 3 ] && on_fail "exceeded max retries" "1"
|
2023-06-04 20:21:33 -03:00
|
|
|
done
|
2023-06-04 21:56:04 -03:00
|
|
|
printf "%s" "${urls[$((${choice}-1))]}" | xclip -selection clipboard
|