#! /bin/bash check_every="37s" # Put `m' or `s' on the end because this will be used with the # `sleep' command. seen_issues_log="seen_issues_log" atom_feed_log="atom_feed_log" atom_url="https://labs.parabola.nu/activity.atom" bot_ipc="/tmp/un-provoked-message-store" # fauno wants him to deliver messages spaced out by two minutes so I need nother # fifo another_fifo="/tmp/pbot-ng_chili_change_detector_fifo" # If the fifo doesn't exist then create it. [[ -p ${another_fifo} ]] || mkfifo "${another_fifo}" # OK this code will forward no more than two messages onto pbot-ng every two # minutes (fauno doesn't want flood). It runs in the background. It could be # sensible to split this out into a seperate file so it can tidily be used by # other pbot-ng modules but I cba. This code is quite l33t actually because it # doesn't delay the messages at all unless two messages get delivered in two # minutes. tail -f "${another_fifo}" | while true do read smile && echo "${smile}" >> "${bot_ipc}" read -t 120 smile && echo "${smile}" >> "${bot_ipc}" || continue sleep 2m done & declare -A bugs declare -A old_bugs function get_feed_make_array { # Download the feed. feed=$( mktemp ) header_dump=$( mktemp ) if [[ -n ${etag} ]] then curl --user-agent "pbot-ng" -D "${header_dump}" -H "If-None-Match: \"${etag}\"" "${atom_url}" 2> /dev/null > "${feed}" ret_val="${?}" else curl --user-agent "pbot-ng" -D "${header_dump}" "${atom_url}" 2> /dev/null > "${feed}" ret_val="${?}" fi # Get the first line of the headers. read http_response < "${header_dump}" # If curl's exit status was zero and the http response was 200. if (( ! ret_val )) && [[ ${http_response} == *' 200 '* ]] then # Is there an ETag? if etag_line=$( grep 'ETag:' "${header_dump}" ) then etag_line="${etag_line#*\"}" etag="${etag_line%\"*}" # Now we have the ETag. else # It seems that the server no longer sends Etags unset etag fi else # This indicates either a failure to download the page or that the page # has not changed since the last time it was downloaded. rm "${feed}" rm "${header_dump}" return 1 fi unset bugs declare -A -g bugs # We want an array of all the bug titles and their urls. stage=0 while read -r line do case "${stage}" in 0 ) [[ ${line} == *''* ]] && { title="" url="" updated="" stage=1 } ;; 1 ) case "${line}" in *''*''* ) booboo="${line#*>}" title="${booboo%<*}" ;; *''*''* ) foobar="${line#*>}" updated="${foobar%<*}" ;; *''* ) # We don't want revisions. if [[ -n ${title} ]] && [[ -n ${url} ]] && [[ -n ${updated} ]] && [[ "${title}" != *' - Revision '* ]] then # Add this to the array, fixing the url if it's # broken. bugs["${title}🐵${updated}"]="${url/chili/labs.parabola.nu}" fi stage=0 ;; esac ;; esac done < "${feed}" rm "${feed}" rm "${header_dump}" return 0 } function slide_and_write { # We need to make the new array the old one. unset old_bugs declare -A -g old_bugs for key in "${!bugs[@]}" do ii=$( sed 's/([^)]\+)//' <<< "${key}" ) old_bugs[${ii}]=${bugs[${key}]} done # We now write this to the file. We seperate the fields with some sort of # cat face: 🐱 for i in "${!old_bugs[@]}" do echo "${i}🐱 ${old_bugs[${i}]}" done > ${atom_feed_log} } if [[ -f ${atom_feed_log} ]] then # Get the array that stores the info of all bug titles and their urls out of # the file it's stored in. while read -r line do old_bugs["${line%%🐱 *}"]="${line##*🐱 }" done < "${atom_feed_log}" else # The log does not exist so we create it! get_feed_make_array slide_and_write # Also we need to create the log of seen issues. for i in "${!bugs[@]}" do ii=$( sed 's/([^)]\+)//' <<< "${i}" ) [[ "${ii%%🐵*}" =~ \#[[:digit:]]+ ]] # match the bug number e.g. #390 echo "${BASH_REMATCH}" done > "${seen_issues_log}" fi while true do # Download the feed and make an array of the bugs in it. If it fails or # hasn't changed then we just wait until it's time to check again and then # continue with the next iteration of the loop. get_feed_make_array || { sleep "${check_every}" ; continue ; } temp_changes=$( mktemp ) # Compare this array to the previous. If any new have appeared since last # time then we check if it's in a log of seen issues, and if not, we add it, # and we know that this is a creation, not a change. for i in "${!bugs[@]}" do ii=$( sed 's/([^)]\+)//' <<< "${i}" ) # If this bug is new since last time. if [[ -z ${old_bugs[${ii}]} ]] then # Check the log of seen issues to find out if this is a creation or # a change. [[ "${ii%%🐵*}" =~ \#[[:digit:]]+ ]] # match the bug number e.g. #390 if grep "${BASH_REMATCH}" "${seen_issues_log}" > /dev/null 2> /dev/null then creation_or_change="changed" else creation_or_change="created" # Add this issue to the log. echo "${BASH_REMATCH}" >> "${seen_issues_log}" fi real_title="${i%%🐵*}" real_title_a="${real_title%%:*}" real_title_b="${real_title#*:}" cat="${real_title_a%% - *}" # e.g. `pbot-ng' num="${real_title_a#* - }" echo "${num} ${creation_or_change}: ( ${cat} -${real_title_b} ) ${bugs[${i}]}" >> "${temp_changes}" fi done if (( $( wc -l "${temp_changes}" 2> /dev/null | cut -d ' ' -f 1 ) > 12 )) then echo "More than 12 changes have been detected on the bug tracker. Ignoring." >> "${bot_ipc}" else while read line do echo "${line}" >> "${another_fifo}" done < "${temp_changes}" fi rm -f "${temp_changes}" # Make this array the old one and write it to the file in case we need to # retrieve it later. slide_and_write sleep "${check_every}" done