OILS / soil / web.sh View on Github | oilshell.org

231 lines, 116 significant
1#!/usr/bin/env bash
2#
3# Wrapper for soil/web.py.
4#
5# Usage:
6# soil/web.sh <function name>
7
8set -o nounset
9set -o pipefail
10set -o errexit
11
12shopt -s nullglob # for list-json
13
14REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
15readonly REPO_ROOT
16
17source $REPO_ROOT/soil/common.sh
18
19# Jobs to show and keep. This corresponds to say soil/worker.sh JOB-dummy,
20# which means each git COMMIT is more than 15 jobs.
21readonly NUM_JOBS=4000
22
23soil-web() {
24 # We may be executed by a wwup.cgi on the server, which doesn't have
25 # PATH=~/bin, and the shebang is /usr/bin/env python2
26
27 local -a prefix=()
28 if test -n "${CONTENT_LENGTH:-}"; then
29 prefix=( ~/bin/python2 )
30 fi
31
32 PYTHONPATH=$REPO_ROOT "${prefix[@]}" $REPO_ROOT/soil/web.py "$@"
33}
34
35# Bug fix for another race:
36# ls *.json has a race: the shell expands files that may no longer exist, and
37# then 'ls' fails!
38list-json() {
39 local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
40
41 for name in $dir/*/*.json; do
42 echo $name
43 done
44}
45
46rewrite-jobs-index() {
47 ### Atomic update of travis-ci.oilshell.org/jobs/
48 local prefix=$1
49 local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
50
51 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
52
53 log "soil-web: Rewriting uuu/${prefix}jobs/index.html"
54
55 # Fix for bug #1169: don't create the temp file on a different file system,
56 # which /tmp may be.
57 #
58 # When the source and target are on different systems, I believe 'mv' falls
59 # back to 'cp', which has this race condition:
60 #
61 # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
62
63 # Limit to last 100 jobs. Glob is in alphabetical order and jobs look like
64 # 2020-03-20__...
65
66 local index_tmp=$dir/$$.index.html # index of every job in every run
67 local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
68
69 list-json $dir \
70 | tail -n -$NUM_JOBS \
71 | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id
72
73 echo "rewrite index status = ${PIPESTATUS[@]}"
74
75 mv -v $index_tmp $dir/index.html
76
77 mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
78 mv -v $run_index_tmp $dir/$run_id/index.html
79}
80
81cleanup-jobs-index() {
82 local prefix=$1
83 local dry_run=${2:-true}
84
85 local dir=$SOIL_HOST_DIR/uuu/${prefix}jobs
86
87 # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
88 case $dry_run in
89 false)
90 # Bug fix: There's a race here when 2 jobs complete at the same time.
91 # Use rm -f to ignore failure if the file was already deleted.
92
93 list-json $dir | soil-web cleanup $NUM_JOBS | xargs --no-run-if-empty -- rm -f -v
94 ;;
95 true)
96 list-json $dir | soil-web cleanup $NUM_JOBS
97 ;;
98 *)
99 log 'Expected true or false for dry_run'
100 esac
101}
102
103test-cleanup() {
104 # the 999 jobs are the oldest
105
106 soil-web cleanup 2 <<EOF
107travis-ci.oilshell.org/github-jobs/999/one.json
108travis-ci.oilshell.org/github-jobs/999/two.json
109travis-ci.oilshell.org/github-jobs/999/three.json
110travis-ci.oilshell.org/github-jobs/1000/one.json
111travis-ci.oilshell.org/github-jobs/1000/two.json
112travis-ci.oilshell.org/github-jobs/1001/one.json
113travis-ci.oilshell.org/github-jobs/1001/two.json
114travis-ci.oilshell.org/github-jobs/1001/three.json
115EOF
116}
117
118cleanup-status-api() {
119 ### cleanup the files used for maybe-merge
120
121 local dry_run=${1:-true}
122
123 local dir=$SOIL_HOST_DIR/uuu/status-api/github
124
125 pushd $dir
126 case $dry_run in
127 false)
128 # delete all but the last 30
129 ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
130 ;;
131 true)
132 ls | head -n -30
133 ;;
134 *)
135 log 'Expected true or false for dry_run'
136 esac
137 popd
138}
139
140event-job-done() {
141 ### "Server side" handler
142
143 local prefix=$1 # 'github-' or 'sourcehut-'
144 local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
145
146 rewrite-jobs-index $prefix $run_id
147
148 # note: we could speed jobs up by doing this separately?
149 cleanup-jobs-index $prefix false
150}
151
152DISABLED-event-job-done() {
153 ### Hook for wwup.cgi to execute
154
155 # As long as the CGI script shows output, I don't think we need any wrappers
156 # The scripts are written so we don't need to 'cd'
157 _event-job-done "$@"
158 return
159
160 # This is the directory that soil/web-init.sh deploys to, and it's shaped
161 # like the Oils repo
162 cd ~/soil-web
163
164 # Figure out why exit code is 127
165 # Oh probably because it's not started in the home dir?
166
167 # TODO: I guess wwup.cgi can buffer this entire response or something?
168 # You POST and you get of status, stdout, stderr back?
169 _event-job-done "$@" > ~/event-job-done.$$.log 2>&1
170}
171
172#
173# Dev Tools
174#
175
176sync-testdata() {
177
178 local dest=_tmp/github-jobs/
179
180 rsync --archive --verbose \
181 $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
182
183 # 2023-04: 3.2 GB of files! Probably can reduce this
184
185 du --si -s $dest
186}
187
188copy-web() {
189 ### for relative URLs to work
190
191 cp -r -v web/ _tmp/
192}
193
194local-test() {
195 ### Used the sync'd testdata
196 local dir=${1:-_tmp/github-jobs}
197
198 local index=$dir/index.html
199
200 local run_id=3722
201 local run_index=$dir/$run_id/index.html
202
203 list-json $dir | soil-web github-index $index $run_index $run_id
204
205 echo "Wrote $index and $run_index"
206}
207
208hello() {
209 echo "hi from $0"
210 echo
211
212 echo ARGS
213 local i=0
214 for arg in "$@"; do
215 echo "[$i] $arg"
216
217 # For testing wwup.cgi
218 if test "$arg" = 'FAIL'; then
219 echo 'failing early'
220 return 42
221 fi
222
223 i=$(( i + 1 ))
224 done
225 echo
226
227 whoami
228 hostname
229}
230
231"$@"