| 1 | #!/usr/bin/env bash
|
| 2 | #
|
| 3 | # Functions to invoke soil/web remotely.
|
| 4 | #
|
| 5 | # soil/web is deployed manually, and then this runs at HEAD in the repo. Every
|
| 6 | # CI run has an up-to-date copy.
|
| 7 | #
|
| 8 | # Usage:
|
| 9 | # soil/web-worker.sh <function name>
|
| 10 |
|
| 11 | set -o nounset
|
| 12 | set -o pipefail
|
| 13 | set -o errexit
|
| 14 |
|
| 15 | REPO_ROOT=$(cd "$(dirname $0)/.."; pwd)
|
| 16 |
|
| 17 | source soil/common.sh
|
| 18 | source test/tsv-lib.sh # tsv2html
|
| 19 | source web/table/html.sh # table-sort-{begin,end}
|
| 20 |
|
| 21 | # ~/
|
| 22 | # soil-web/ # executable files
|
| 23 | # doctools/
|
| 24 | # html_head.py
|
| 25 | # soil/
|
| 26 | # web.py
|
| 27 | # web.sh
|
| 28 | # travis-ci.oilshell.org/ # served over HTTP
|
| 29 | # index.html
|
| 30 | # web/
|
| 31 | # base.css
|
| 32 | # soil.css
|
| 33 | # github-jobs/
|
| 34 | # index.html
|
| 35 | # 3619/ # $GITHUB_RUN_NUMBER
|
| 36 | # dev-minimal.wwz
|
| 37 | # cpp-small.wwz
|
| 38 | # sourcehut-jobs/
|
| 39 | # index.html
|
| 40 | # 22/ # $JOB_ID
|
| 41 | # dev-minimal.wwz
|
| 42 | # 23 # $JOB_ID
|
| 43 | # cpp-small.wwz
|
| 44 |
|
| 45 | sshq() {
|
| 46 | # Don't need commands module as I said here!
|
| 47 | # https://www.oilshell.org/blog/2017/01/31.html
|
| 48 | #
|
| 49 | # This is Bernstein chaining through ssh.
|
| 50 |
|
| 51 | my-ssh $SOIL_USER_HOST "$(printf '%q ' "$@")"
|
| 52 | }
|
| 53 |
|
| 54 | remote-rewrite-jobs-index() {
|
| 55 | sshq soil-web/soil/web.sh rewrite-jobs-index "$@"
|
| 56 | }
|
| 57 |
|
| 58 | remote-cleanup-jobs-index() {
|
| 59 | local prefix=$1
|
| 60 | # clean it up for real!
|
| 61 | sshq soil-web/soil/web.sh cleanup-jobs-index "$prefix" false
|
| 62 | }
|
| 63 |
|
| 64 | remote-cleanup-status-api() {
|
| 65 | #sshq soil-web/soil/web.sh cleanup-status-api false
|
| 66 | # 2024-07 - work around bug by doing dry_run only.
|
| 67 | #
|
| 68 | # TODO: Fix the logic in soil/web.sh
|
| 69 |
|
| 70 | if false; then
|
| 71 | sshq soil-web/soil/web.sh cleanup-status-api true
|
| 72 | else
|
| 73 | curl --include --fail-with-body \
|
| 74 | --form 'run-hook=soil-cleanup-status-api' \
|
| 75 | --form 'arg1=true' \
|
| 76 | $WWUP_URL
|
| 77 | fi
|
| 78 | }
|
| 79 |
|
| 80 | my-scp() {
|
| 81 | scp -o StrictHostKeyChecking=no "$@"
|
| 82 | }
|
| 83 |
|
| 84 | my-ssh() {
|
| 85 | ssh -o StrictHostKeyChecking=no "$@"
|
| 86 | }
|
| 87 |
|
| 88 | scp-status-api() {
|
| 89 | local run_id=${1:-TEST2-github-run-id}
|
| 90 | local job_name=$2
|
| 91 |
|
| 92 | local status_file="_soil-jobs/$job_name.status.txt"
|
| 93 | local remote_path="$SOIL_REMOTE_DIR/status-api/github/$run_id/$job_name"
|
| 94 |
|
| 95 | # We could make this one invocation of something like:
|
| 96 | # cat $status_file | sshq soil/web.sh PUT $remote_path
|
| 97 |
|
| 98 | if false; then
|
| 99 | my-ssh $SOIL_USER_HOST "mkdir -p $(dirname $remote_path)"
|
| 100 |
|
| 101 | # the consumer should check if these are all zero
|
| 102 | # note: the file gets RENAMED
|
| 103 | my-scp $status_file "$SOIL_USER_HOST:$remote_path"
|
| 104 | else
|
| 105 | # Note: we don't need to change the name of the file, because we just glob
|
| 106 | # the dir
|
| 107 | curl --include --fail-with-body \
|
| 108 | --form 'payload-type=status-api' \
|
| 109 | --form "subdir=github/$run_id" \
|
| 110 | --form "file1=@$status_file" \
|
| 111 | $WWUP_URL
|
| 112 | fi
|
| 113 | }
|
| 114 |
|
| 115 | scp-results() {
|
| 116 | # could also use Travis known_hosts addon?
|
| 117 | local prefix=$1 # sourcehut- or ''
|
| 118 | shift
|
| 119 |
|
| 120 | my-scp "$@" "$SOIL_USER_HOST:$SOIL_REMOTE_DIR/${prefix}jobs/"
|
| 121 | }
|
| 122 |
|
| 123 | # Dummy that doesn't depend on results
|
| 124 | deploy-test-wwz() {
|
| 125 | set -x
|
| 126 | local out_name="$(date +%Y-%m-%d__%H-%M-%S)_test"
|
| 127 |
|
| 128 | local wwz=$out_name.wwz
|
| 129 |
|
| 130 | cat >index.html <<EOF
|
| 131 | <a href="build/oil-manifest.txt">build/oil-manifest.txt</a> <br/>
|
| 132 | <a href="build/opy-manifest.txt">build/opy-manifest.txt</a> <br/>
|
| 133 | <a href="env.txt">env.txt</a> <br/>
|
| 134 | EOF
|
| 135 |
|
| 136 | dump-env > env.txt
|
| 137 |
|
| 138 | zip -q $wwz env.txt index.html build/*.txt
|
| 139 |
|
| 140 | scp-results '' $wwz
|
| 141 | }
|
| 142 |
|
| 143 | format-wwz-index() {
|
| 144 | ### What's displayed in $ID.wwz/index.html
|
| 145 |
|
| 146 | local job_id=$1
|
| 147 | local tsv=${2:-_tmp/soil/INDEX.tsv}
|
| 148 |
|
| 149 | soil-html-head "$job_id.wwz"
|
| 150 |
|
| 151 | cat <<EOF
|
| 152 | <body class="width40">
|
| 153 | <p id="home-link">
|
| 154 | <a href="..">Up</a>
|
| 155 | | <a href="/">Home</a>
|
| 156 | | <a href="//oilshell.org/">oilshell.org</a>
|
| 157 | </p>
|
| 158 |
|
| 159 | <h1>$job_id.wwz</h1>
|
| 160 | EOF
|
| 161 |
|
| 162 | echo '<ul>'
|
| 163 | cat <<EOF
|
| 164 | <li>
|
| 165 | <a href="_tmp/soil/INDEX.tsv">_tmp/soil/INDEX.tsv</a>, also copied to
|
| 166 | <a href="../$job_id.tsv">../$job_id.tsv</a>.
|
| 167 | </li>
|
| 168 | <li>
|
| 169 | <a href="../$job_id.json">../$job_id.json</a>
|
| 170 | </li>
|
| 171 | EOF
|
| 172 |
|
| 173 | if test -f _tmp/soil/image.html; then
|
| 174 | echo '
|
| 175 | <li>
|
| 176 | <a href="_tmp/soil/image.html">Container Image Stats</a>
|
| 177 | </li>
|
| 178 | '
|
| 179 | fi
|
| 180 |
|
| 181 | echo '</ul>'
|
| 182 | }
|
| 183 |
|
| 184 | format-image-stats() {
|
| 185 | local soil_dir=${1:-_tmp/soil}
|
| 186 | local web_base_url=${2:-'/web'} # for production
|
| 187 |
|
| 188 | table-sort-html-head "Image Stats" $web_base_url
|
| 189 |
|
| 190 | # prints <body>; make it wide for the shell commands
|
| 191 | table-sort-begin "width60"
|
| 192 |
|
| 193 | # TODO:
|
| 194 | # - Format the TSV as an HTML table
|
| 195 | # - Save the name and tag and show it
|
| 196 |
|
| 197 | cat <<EOF
|
| 198 | <p id="home-link">
|
| 199 | <a href="/">Home</a>
|
| 200 | | <a href="//oilshell.org/">oilshell.org</a>
|
| 201 | </p>
|
| 202 |
|
| 203 | <h1>Images Tagged</h1>
|
| 204 |
|
| 205 | <a href="images-tagged.txt">images-tagged.txt</a> <br/>
|
| 206 |
|
| 207 | <h1>Image Layers</h1>
|
| 208 | EOF
|
| 209 |
|
| 210 | tsv2html3 $soil_dir/image-layers.tsv
|
| 211 |
|
| 212 | # First column is number of bytes; ignore header
|
| 213 | local total_bytes=$(awk '
|
| 214 | { sum += $1 }
|
| 215 | END { printf("%.1f", sum / 1000000) }
|
| 216 | ' $soil_dir/image-layers.tsv)
|
| 217 |
|
| 218 | echo "<p>Total Size: <b>$total_bytes MB</b></p>"
|
| 219 |
|
| 220 |
|
| 221 | cat <<EOF
|
| 222 | <h2>Raw Data</h2>
|
| 223 |
|
| 224 | <a href="image-layers.txt">image-layers.txt</a> <br/>
|
| 225 | <a href="image-layers.tsv">image-layers.tsv</a> <br/>
|
| 226 | </body>
|
| 227 | </html>
|
| 228 | EOF
|
| 229 |
|
| 230 | table-sort-end image-layers
|
| 231 | }
|
| 232 |
|
| 233 | make-job-wwz() {
|
| 234 | local job_id=${1:-test-job}
|
| 235 |
|
| 236 | local wwz=$job_id.wwz
|
| 237 |
|
| 238 | # Doesn't exist when we're not using a container
|
| 239 | if test -f _tmp/soil/image-layers.tsv; then
|
| 240 | format-image-stats _tmp/soil > _tmp/soil/image.html
|
| 241 | fi
|
| 242 |
|
| 243 | format-wwz-index $job_id > index.html
|
| 244 |
|
| 245 | # _tmp/soil: Logs are in _tmp, see soil/worker.sh
|
| 246 | # web/ : spec test HTML references this.
|
| 247 | # Note that that index references /web/{base,soil}.css, outside the .wwz
|
| 248 | # osh-summary.html uses table-sort.js and ajax.js
|
| 249 | #
|
| 250 | # TODO:
|
| 251 | # - Could move _tmp/{spec,stateful,syscall} etc. to _test
|
| 252 | # - Create _tmp/benchmarks/{compute,gc,gc-cachegrind,osh-parser,mycpp-examples,...}
|
| 253 | # - would require release/$VERSION/pub/benchmarks.wwz, like we have
|
| 254 | # pub/metrics.wwz, for consistent links
|
| 255 |
|
| 256 | zip -q -r $wwz \
|
| 257 | index.html \
|
| 258 | _build/wedge/logs \
|
| 259 | _gen/mycpp/examples \
|
| 260 | _test \
|
| 261 | _tmp/{soil,spec,src-tree-www,wild-www,stateful,process-table,syscall,benchmark-data,metrics,mycpp-examples,compute,gc,gc-cachegrind,perf,vm-baseline,osh-runtime,osh-parser,host-id,shell-id} \
|
| 262 | _tmp/uftrace/{index.html,stage2} \
|
| 263 | web/{base,src-tree,spec-tests,spec-cpp,line-counts,benchmarks,wild}.css web/ajax.js \
|
| 264 | web/table/table-sort.{css,js} \
|
| 265 | _release/oil*.tar _release/*.xshar _release/VERSION/
|
| 266 | }
|
| 267 |
|
| 268 | test-collect-json() {
|
| 269 | soil/collect_json.py _tmp/soil PATH
|
| 270 | }
|
| 271 |
|
| 272 | deploy-job-results() {
|
| 273 | ### Copy .wwz, .tsv, and .json to a new dir
|
| 274 |
|
| 275 | local prefix=$1 # e.g. github- for example.com/github-jobs/
|
| 276 | local run_dir=$2 # e.g. 1234 # make this dir
|
| 277 | local job_name=$3 # e.g. cpp-small for example.com/github-jobs/1234/cpp-small.wwz
|
| 278 | shift 2
|
| 279 | # rest of args are more env vars
|
| 280 |
|
| 281 | # writes $job_name.wwz
|
| 282 | make-job-wwz $job_name
|
| 283 |
|
| 284 | # Debug permissions. When using docker rather than podman, these dirs can be
|
| 285 | # owned by root and we can't write into them.
|
| 286 | ls -l -d _tmp/soil
|
| 287 | ls -l _tmp/soil
|
| 288 |
|
| 289 | date +%s > _tmp/soil/task-deploy-start-time.txt
|
| 290 |
|
| 291 | soil/collect_json.py _tmp/soil "$@" > $job_name.json
|
| 292 |
|
| 293 | # So we don't have to unzip it
|
| 294 | cp _tmp/soil/INDEX.tsv $job_name.tsv
|
| 295 |
|
| 296 | if false; then
|
| 297 | local remote_dest_dir="$SOIL_REMOTE_DIR/${prefix}jobs/$run_dir"
|
| 298 | my-ssh $SOIL_USER_HOST "mkdir -p $remote_dest_dir"
|
| 299 |
|
| 300 | # Do JSON last because that's what 'list-json' looks for
|
| 301 | my-scp $job_name.{wwz,tsv,json} "$SOIL_USER_HOST:$remote_dest_dir"
|
| 302 | else
|
| 303 | curl --include --fail-with-body \
|
| 304 | --form "payload-type=${prefix}jobs" \
|
| 305 | --form "subdir=$run_dir" \
|
| 306 | --form "file1=@${job_name}.wwz" \
|
| 307 | --form "file2=@${job_name}.tsv" \
|
| 308 | --form "file3=@${job_name}.json" \
|
| 309 | $WWUP_URL
|
| 310 | fi
|
| 311 |
|
| 312 | log ''
|
| 313 | log 'View CI results here:'
|
| 314 | log ''
|
| 315 | log "https://$SOIL_HOST/uuu/${prefix}jobs/$run_dir/"
|
| 316 | log "https://$SOIL_HOST/uuu/${prefix}jobs/$run_dir/$job_name.wwz/"
|
| 317 | log ''
|
| 318 | }
|
| 319 |
|
| 320 | publish-cpp-tarball() {
|
| 321 | local prefix=${1:-'github-'} # e.g. example.com/github-jobs/
|
| 322 |
|
| 323 | # Example of dir structure we need to cleanup:
|
| 324 | #
|
| 325 | # sourcehut-jobs/
|
| 326 | # git-$hash/
|
| 327 | # index.html
|
| 328 | # oils-for-unix.tar
|
| 329 | # github-jobs/
|
| 330 | # git-$hash/
|
| 331 | # oils-for-unix.tar
|
| 332 | #
|
| 333 | # Algorithm
|
| 334 | # 1. List all JSON, finding commit date and commit hash
|
| 335 | # 2. Get the OLDEST commit dates, e.g. all except for 50
|
| 336 | # 3. Delete all commit hash dirs not associated with them
|
| 337 |
|
| 338 | if false; then
|
| 339 | # Note: don't upload code without auth
|
| 340 | # TODO: Move it to a different dir.
|
| 341 |
|
| 342 | local commit_hash
|
| 343 | commit_hash=$(cat _tmp/soil/commit-hash.txt)
|
| 344 |
|
| 345 | local tar=_release/oils-for-unix.tar
|
| 346 | curl --include --fail-with-body \
|
| 347 | --form 'payload-type=github-jobs' \
|
| 348 | --form "subdir=git-$commit_hash" \
|
| 349 | --form "file1=@$tar" \
|
| 350 | $WWUP_URL
|
| 351 |
|
| 352 | log 'Tarball:'
|
| 353 | log ''
|
| 354 | log "https://$SOIL_HOST/code/github-jobs/git-$commit_hash/"
|
| 355 |
|
| 356 | else
|
| 357 | # Fix subtle problem here !!!
|
| 358 | shopt -s inherit_errexit
|
| 359 |
|
| 360 | local git_commit_dir
|
| 361 | git_commit_dir=$(git-commit-dir "$prefix")
|
| 362 |
|
| 363 | my-ssh $SOIL_USER_HOST "mkdir -p $git_commit_dir"
|
| 364 |
|
| 365 | # Do JSON last because that's what 'list-json' looks for
|
| 366 |
|
| 367 | local tar=_release/oils-for-unix.tar
|
| 368 |
|
| 369 | # Permission denied because of host/guest issue
|
| 370 | #local tar_gz=$tar.gz
|
| 371 | #gzip -c $tar > $tar_gz
|
| 372 |
|
| 373 | # Avoid race condition
|
| 374 | # Crappy UUID: seconds since epoch, plus PID
|
| 375 | local timestamp
|
| 376 | timestamp=$(date +%s)
|
| 377 |
|
| 378 | local temp_name="tmp-$timestamp-$$.tar"
|
| 379 |
|
| 380 | my-scp $tar "$SOIL_USER_HOST:$git_commit_dir/$temp_name"
|
| 381 |
|
| 382 | my-ssh $SOIL_USER_HOST \
|
| 383 | "mv -v $git_commit_dir/$temp_name $git_commit_dir/oils-for-unix.tar"
|
| 384 |
|
| 385 | log 'Tarball:'
|
| 386 | log ''
|
| 387 | log "https://$git_commit_dir"
|
| 388 | fi
|
| 389 |
|
| 390 | }
|
| 391 |
|
| 392 | remote-event-job-done() {
|
| 393 | ### "Client side" handler: a job calls this when it's done
|
| 394 |
|
| 395 | local prefix=$1 # 'github-' or 'sourcehut-'
|
| 396 | local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
|
| 397 |
|
| 398 | log "remote-event-job-done $prefix $run_id"
|
| 399 |
|
| 400 | # Deployed code dir
|
| 401 | if false; then
|
| 402 | sshq soil-web/soil/web.sh event-job-done "$@"
|
| 403 | else
|
| 404 | # Note: I think curl does URL escaping of arg1= arg2= ?
|
| 405 | curl --include --fail-with-body \
|
| 406 | --form 'run-hook=soil-event-job-done' \
|
| 407 | --form "arg1=$prefix" \
|
| 408 | --form "arg2=$run_id" \
|
| 409 | $WWUP_URL
|
| 410 | fi
|
| 411 | }
|
| 412 |
|
| 413 | filename=$(basename $0)
|
| 414 | if test $filename = 'web-worker.sh'; then
|
| 415 | "$@"
|
| 416 | fi
|