You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

258 rivejä
9.4 KiB

  1. #!/bin/bash
  2. export TZ=UTC
  3. envvars=(HTTP2IRC_GET_URL HTTP2IRC_POST_URL IA_S3_ACCESS IA_S3_SECRET)
  4. for envvar in "${envvars[@]}"; do
  5. if [[ ! -v "${envvar}" ]]; then
  6. { printf 'Error: one or more of the required environment variables (%s' "${envvars[0]}"; printf ', %s' "${envvars[@]:1}"; printf ') missing\n'; } >&2
  7. exit 1
  8. fi
  9. done
  10. for dep in awk codearchiver curl ia-upload-stream python3 sha256sum tee zstd; do
  11. if ! command -v "${dep}" &>/dev/null; then
  12. printf 'Error: %s not found\n' "${dep}" >&2
  13. exit 1
  14. fi
  15. done
  16. function log {
  17. printf '%s %s\n' "${EPOCHREALTIME}" "$1" >&2
  18. }
  19. function log_loop {
  20. prefix="$1"
  21. # If the output does not end with a LF, add one. Then replace CRLF with LF and replace remaining CR with LF.
  22. { lastchar="$(tee /dev/fd/3 | tail -c 1 | xxd -p)"; if [[ "${lastchar}" != '0a' ]]; then printf '\n'; fi } 3>&1 |
  23. sed -u 's,\r$,,; s,\r,\n,g' |
  24. while IFS= read -r line; do log "${prefix}${line}"; done
  25. }
  26. function send {
  27. local message="$1"
  28. log "Sending message: ${message}"
  29. curl --silent --verbose --max-time 10 --data "${message}" "${HTTP2IRC_POST_URL}" 2> >(log_loop 'curl http2irc POST: ') | log_loop 'http2irc POST response: '
  30. }
  31. function respond {
  32. local nick="$1"
  33. local message="$2"
  34. send "${nick}: ${message}"
  35. }
  36. { # Group the pipeline without requiring a backslash every time
  37. while :; do
  38. # Read from http2irc
  39. log 'Starting http2irc GET stream...'
  40. curl --silent --verbose --no-buffer "${HTTP2IRC_GET_URL}" 2> >(log_loop 'curl http2irc GET: ')
  41. printf '\n' # Ensure that there's a trailing LF for `read`
  42. done |
  43. # Log all raw input
  44. tee >(log_loop 'Received http2irc line: ') |
  45. # Transform the JSONL data into a more suitable format for the following: lines of 'modes SP nick SP message'
  46. python3 -u -c 'import json, sys'$'\n''def json_parse_or_none(s):'$'\n'' try: return json.loads(s)'$'\n'' except json.JSONDecodeError as e:'$'\n'' print(f"Could not parse {s[:100]}…: {type(e).__name__}: {e!s}")'$'\n''{print(o["user"]["modes"] or "_", o["user"]["nick"], o["message"]) for o in map(json_parse_or_none, sys.stdin) if o and o.get("command") == "PRIVMSG"}' |
  47. # For valid bot commands with adequate permissions, assign a job ID and respond. Suppress everything else. Print lines of 'jobid SP nick SP URL' for the processing below.
  48. while read -r modes nick message; do
  49. if [[ "${message}" == '!help' ]]; then
  50. respond "${nick}" '`!a URL`: archives a single repository'
  51. respond "${nick}" '`!a < URL`: archives a list of repositories (no success/failure report, no warnings/errors report, check logs manually!)'
  52. continue
  53. fi
  54. if [[ "${message}" != '!a '* ]]; then
  55. continue
  56. fi
  57. if [[ "${modes}" != *[@+]* ]]; then
  58. respond "${nick}" 'Only voiced or opped users may use this command.'
  59. continue
  60. fi
  61. if [[ "${message}" =~ ^'!a '([a-z-]+\+)?[a-z]+://[^\ ]+$ ]]; then
  62. # Individual job
  63. jobs=("${message:3}")
  64. src="${message:3}"
  65. elif [[ "${message}" =~ ^'!a < 'https://transfer\.archivete\.am/[a-zA-Z0-9]+/.+$ ]]; then
  66. # List job
  67. jobs=()
  68. url="${message:5}"
  69. bad=
  70. log "Retrieving list job list: ${url}"
  71. while read -r line; do
  72. if [[ "${line}" =~ ^'!a '([a-z-]+\+)?[a-z]+://[^\ ]+$ ]]; then
  73. jobs+=("${line}")
  74. elif [[ "${line}" == '' ]]; then
  75. # Ignore empty lines
  76. continue
  77. else
  78. respond "${nick}" "Malformed line in ${url}: ${line}"
  79. bad=1
  80. break
  81. fi
  82. done < <({ curl --silent --verbose --fail --max-time 10 "${message:5}" 2> >(log_loop 'curl list job: '); printf '\n'; } | tee >(log_loop 'List input line: '))
  83. if [[ "${bad}" ]]; then
  84. continue
  85. fi
  86. src="${url}"
  87. else
  88. respond "${nick}" "I don't understand your command. Please forgive me."
  89. continue
  90. fi
  91. read -r jobid </proc/sys/kernel/random/uuid
  92. respond "${nick}" "Queueing job ${jobid} for ${src}"
  93. appendcounter=; if [[ ${#jobs[@]} -gt 1 ]]; then appendcounter=yes; fi
  94. for ((i=0; i<${#jobs[@]}; ++i)); do
  95. job="${jobs[${i}]}"
  96. singlejobid="${jobid}"; if [[ "${appendcounter}" ]]; then singlejobid+="_${i}"; fi
  97. printf '%s %s %s\n' "${singlejobid}" "${nick}" "${job}"
  98. done
  99. if [[ "${appendcounter}" ]]; then printf '%s %s end\n' "${jobid}" "${nick}"; fi # Special value for sending a message when all list URLs have been processed
  100. done |
  101. # The actual work loop
  102. while :; do
  103. # Process in batches for efficiency of parallel IA processing after uploads
  104. declare -a batch=()
  105. while read -r -t 1 line; do
  106. batch+=("${line}")
  107. done
  108. if [[ ${#batch[@]} -eq 0 ]]; then
  109. continue
  110. fi
  111. statuscodes=() # Exit status for each `batch` element's codearchiver process (-1 for 'end' markers)
  112. newfiles=()
  113. newfilejobindices=() # One entry for each element of `newfiles`, containing the corresponding index in `batch` to which the file belongs
  114. for ((i=0; i<${#batch[@]}; ++i)); do
  115. line="${batch[${i}]}"
  116. singlejobid="${line%% *}"
  117. line="${line#* }"
  118. nick="${line%% *}"
  119. url="${line#* }"
  120. # Handle marker for end of list job: tell the user it's done and move on.
  121. if [[ "${url}" == 'end' ]]; then
  122. # No status code reflection here because the start of the list job might not even have been in this batch.
  123. respond "${nick}" "Job ${jobid} finished."
  124. statuscodes+=(-1)
  125. continue
  126. fi
  127. # Find nonexistent filename for log file (*not* concurrency-safe!)
  128. logbasename="$(date +%Y%m%dT%H%M%SZ)_${singlejobid}"
  129. if [[ -e "${logbasename}_codearchiver.log" ]]; then
  130. for ((i=0; ; ++i)); do
  131. if [[ ! -e "${logbasename}_coll${i}_codearchiver.log" ]]; then
  132. break
  133. fi
  134. done
  135. logbasename="${logbasename}_coll${i}"
  136. fi
  137. logname="${logbasename}_codearchiver.log"
  138. artefactsname="${logbasename}_codearchiver_artefacts.txt"
  139. # Run codearchiver, duplicating WARNINGs and higher in the bot output
  140. log "Running ${url} (${singlejobid}), logging into ${logname}"
  141. codearchiver --verbose --write-artefacts-fd-3 "${url}" \
  142. 2> >(tee "${logname}" | grep -Fv -e ' INFO ' | log_loop "From codearchiver ${singlejobid}: ") \
  143. 3> >(tee "${artefactsname}" | log_loop "New artefacts from codearchiver ${singlejob}: ")
  144. status="$?"
  145. log "codearchiver ${url} finished with status code ${status}"
  146. statuscodes+=("${status}")
  147. #TODO Integrate this into the pipe from codearchiver above to avoid rereading the entire log file
  148. declare -i badcount=$(awk '! ($3 ~ /^INFO$/) { cnt += 1; } END { printf "%d\n", cnt; }' "${logname}")
  149. # Compress log file with zstd -19
  150. log "Compressing log file ${logname}"
  151. zstd -19 --rm "${logname}" 2> >(log_loop 'zstd err: ')
  152. if [[ -e "${logname}.zst" && ! -e "${logname}" ]]; then
  153. # Compression successful
  154. logname="${logname}.zst"
  155. fi
  156. # Move everything but the log file to ./failed/ if codearchiver exited non-zero
  157. readarray -t artefacts <"${artefactsname}"
  158. if [[ "${status}" -ne 0 ]]; then
  159. msg="$(printf 'Moving artefact files'; printf ' %q' "${artefacts[@]}" "${artefactsname}"; printf ' from non-zero exit for job %s to ./failed/\n' "${singlejobid}";)"
  160. log "${msg}"
  161. mkdir --parents ./failed/
  162. mv --verbose -- "${artefacts[@]}" "${artefactsname}" ./failed/ 2> >(log_loop 'mv err: ') | log_loop 'mv out: '
  163. else
  164. for file in "${artefacts[@]}"; do
  165. newfiles+=("${file}")
  166. newfilejobindices+=("${i}")
  167. done
  168. newfiles+=("${artefactsname}")
  169. newfilejobindices+=("${i}")
  170. fi
  171. newfiles+=("${logname}")
  172. newfilejobindices+=("${i}")
  173. # For individual jobs, tell the user about warnings and success/failure
  174. if [[ "${singlejobid}" != *_* ]]; then
  175. if [[ "${status}" -eq 0 ]]; then
  176. respond "${nick}" "Job ${singlejobid} succeeded."
  177. else
  178. respond "${nick}" "Job ${singlejobid} failed."
  179. fi
  180. if [[ ${badcount} -gt 0 ]]; then
  181. respond "${nick}" "Job ${singlejobid} produced ${badcount} warnings or errors."
  182. fi
  183. fi
  184. done
  185. # Record SHA-256 hashes for new files
  186. log "SHA-256 hashes:"
  187. sha256sum "${newfiles[@]}" > >(log_loop 'sha256sum: ')
  188. # Upload
  189. date="$(date '+%Y-%m-%d')"
  190. identifier="codearchiver_${date//-/}"
  191. if [[ -z "${CODEARCHIVER_BOT_TEST}" ]]; then
  192. collection='archiveteam_codearchiver'
  193. else
  194. identifier="test_${identifier}"
  195. collection='test_collection'
  196. fi
  197. uploadsfine=y
  198. for f in "${newfiles[@]}"; do
  199. ia-upload-stream --no-derive "${identifier}" "${f}" \
  200. "collection:${collection}" \
  201. 'mediatype:software' \
  202. "date:${date}" \
  203. <"${f}" 2> >(log_loop 'ia-upload-stream: ')
  204. status="$?"
  205. if [[ "${status}" -ne 0 ]]; then
  206. log "Upload failed: exit status ${status}"
  207. if [[ "${uploadsfine}" ]]; then
  208. send "Upload failed: exit status ${status}"
  209. fi
  210. uploadsfine=
  211. fi
  212. done
  213. if [[ "${uploadsfine}" ]]; then
  214. # Wait until all tasks for the item are done
  215. while :; do
  216. tasks="$(python3 -c 'import json, sys; o = json.load(sys.stdin); print(sum(o["value"]["summary"].values()))' < <({ curl --silent --verbose --fail --max-time 10 --header "Authorization: LOW ${IA_S3_ACCESS}:${IA_S3_SECRET}" "https://archive.org/services/tasks.php?identifier=${identifier}&summary=1&history=0" 2> >(log_loop 'curl IA tasks err: '); } | tee >(log_loop 'curl IA tasks out: ')))"
  217. if [[ "${tasks}" == '0' ]]; then
  218. break
  219. fi
  220. sleep 60
  221. done
  222. # Replace non-metadata files with a symlink to .uploaded dummy file
  223. touch '.uploaded'
  224. for f in "${newfiles[@]}"; do
  225. if [[ "${f}" != *_codearchiver_metadata.txt ]]; then
  226. log "Replacing ${f} with symlink to .uploaded"
  227. { rm --verbose -- "${f}" && ln --symbolic --verbose '.uploaded' "${f}"; } |& log_loop 'rm/ln: '
  228. fi
  229. done
  230. fi
  231. done
  232. }