#!/usr/bin/env bash set -euo pipefail set -x # Bump this to "clear" the cache. # Actually, it just causes us to ignore previous cached results. cache_clear_token=2023-11-09c cache_dir="cache/$CI_JOB_IMAGE,$cache_clear_token,puppeteer" local_dir="$HOME/.cache/puppeteer" if test -d "$cache_dir"; then mkdir -p "$local_dir"/. cp -a "$cache_dir"/. "$local_dir"/. fi ./bin/via-yarn-install-in-ci mmdc https://github.com/mermaid-js/mermaid-cli \ bba0240ad87f6fbf44d8a24941e37f4bc2c8bf30 ( cd "$local_dir" find . -printf '%y %p %l\n' | sort find . -type f | sort | xargs sha256sum -- ) >puppeteer-cache-got-listings # Empirically, the mermaid-cli locked install produced a very old # chromium today. So I suspect it's not fetching "latest", but a controlled # version. Nevertheless, we should check that what we got is actually # the same and hasn't been changed by Google (or something on the way). # Doing this now isn't ideal, because I don't actually know if # the mermaid-cli *install* process runs anything from here. # But I don't think it runs the main chrome binary, since at one point # in our tests we got as far as this and then the chrome binary # failed with due to a missing OS shared library. # This expected output listing shouldn't be in bin/ but the best way # to fix that would be to rename the whole bin directory to maint. diff -u bin/puppeteer-cache-expect-listings puppeteer-cache-got-listings if ! test -d "$cache_dir"; then mkdir -p "$cache_dir" cp -a "$local_dir"/. "$cache_dir"/. fi # This is the easiest way to get the shared libraries that chromium # depends on. Obviously, using the Debian package's dependencies # is totally wrong, but it works in practice, and we don't have a proper # dependency list from the binaries from the ad-hoc downloads. apt-get install -y chromium