summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ofborg/.envrc1
-rw-r--r--ofborg/.github/workflows/ci.yml33
-rw-r--r--ofborg/.gitignore13
-rw-r--r--ofborg/Cargo.lock3725
-rw-r--r--ofborg/Cargo.toml13
-rw-r--r--ofborg/LICENSE21
-rw-r--r--ofborg/README.md107
-rw-r--r--ofborg/config.public.json33
-rw-r--r--ofborg/default.nix6
-rw-r--r--ofborg/doc/sample.dot18
-rw-r--r--ofborg/doc/sample.svg157
-rw-r--r--ofborg/example.config.json24
-rw-r--r--ofborg/flake.lock27
-rw-r--r--ofborg/flake.nix137
-rw-r--r--ofborg/ofborg-simple-build/Cargo.toml10
-rw-r--r--ofborg/ofborg/Cargo.toml46
-rw-r--r--ofborg/service.nix197
-rw-r--r--ofborg/shell.nix6
-rw-r--r--ofborg/tickborg-simple-build/Cargo.toml10
-rw-r--r--ofborg/tickborg-simple-build/src/main.rs36
-rw-r--r--ofborg/tickborg/.gitignore6
-rw-r--r--ofborg/tickborg/Cargo.toml46
-rw-r--r--ofborg/tickborg/build.rs566
-rw-r--r--ofborg/tickborg/src/acl.rs59
-rw-r--r--ofborg/tickborg/src/asynccmd.rs293
-rw-r--r--ofborg/tickborg/src/bin/build-faker.rs62
-rw-r--r--ofborg/tickborg/src/bin/builder.rs116
-rw-r--r--ofborg/tickborg/src/bin/evaluation-filter.rs88
-rw-r--r--ofborg/tickborg/src/bin/github-comment-filter.rs114
-rw-r--r--ofborg/tickborg/src/bin/github-comment-poster.rs76
-rw-r--r--ofborg/tickborg/src/bin/github-webhook-receiver.rs278
-rw-r--r--ofborg/tickborg/src/bin/log-message-collector.rs83
-rw-r--r--ofborg/tickborg/src/bin/logapi.rs151
-rw-r--r--ofborg/tickborg/src/bin/mass-rebuilder.rs73
-rw-r--r--ofborg/tickborg/src/bin/stats.rs134
-rw-r--r--ofborg/tickborg/src/buildtool.rs455
-rw-r--r--ofborg/tickborg/src/checkout.rs340
-rw-r--r--ofborg/tickborg/src/clone.rs173
-rw-r--r--ofborg/tickborg/src/commentparser.rs289
-rw-r--r--ofborg/tickborg/src/commitstatus.rs103
-rw-r--r--ofborg/tickborg/src/config.rs387
-rw-r--r--ofborg/tickborg/src/easyamqp.rs287
-rw-r--r--ofborg/tickborg/src/easylapin.rs251
-rw-r--r--ofborg/tickborg/src/evalchecker.rs62
-rw-r--r--ofborg/tickborg/src/files.rs8
-rw-r--r--ofborg/tickborg/src/ghevent/common.rs31
-rw-r--r--ofborg/tickborg/src/ghevent/issuecomment.rs19
-rw-r--r--ofborg/tickborg/src/ghevent/mod.rs9
-rw-r--r--ofborg/tickborg/src/ghevent/pullrequestevent.rs81
-rw-r--r--ofborg/tickborg/src/lib.rs103
-rw-r--r--ofborg/tickborg/src/locks.rs25
-rw-r--r--ofborg/tickborg/src/maintainers.nix118
-rw-r--r--ofborg/tickborg/src/maintainers.rs211
-rw-r--r--ofborg/tickborg/src/message/buildjob.rs55
-rw-r--r--ofborg/tickborg/src/message/buildlogmsg.rs17
-rw-r--r--ofborg/tickborg/src/message/buildresult.rs225
-rw-r--r--ofborg/tickborg/src/message/common.rs14
-rw-r--r--ofborg/tickborg/src/message/evaluationjob.rs29
-rw-r--r--ofborg/tickborg/src/message/mod.rs7
-rw-r--r--ofborg/tickborg/src/nix.rs893
-rw-r--r--ofborg/tickborg/src/nixenv.rs70
-rw-r--r--ofborg/tickborg/src/notifyworker.rs45
-rw-r--r--ofborg/tickborg/src/outpathdiff.rs7
-rw-r--r--ofborg/tickborg/src/stats.rs57
-rw-r--r--ofborg/tickborg/src/systems.rs74
-rw-r--r--ofborg/tickborg/src/tagger.rs87
-rw-r--r--ofborg/tickborg/src/tasks/build.rs599
-rw-r--r--ofborg/tickborg/src/tasks/eval/mod.rs48
-rw-r--r--ofborg/tickborg/src/tasks/eval/monorepo.rs254
-rw-r--r--ofborg/tickborg/src/tasks/evaluate.rs556
-rw-r--r--ofborg/tickborg/src/tasks/evaluationfilter.rs146
-rw-r--r--ofborg/tickborg/src/tasks/githubcommentfilter.rs182
-rw-r--r--ofborg/tickborg/src/tasks/githubcommentposter.rs765
-rw-r--r--ofborg/tickborg/src/tasks/log_message_collector.rs487
-rw-r--r--ofborg/tickborg/src/tasks/mod.rs8
-rw-r--r--ofborg/tickborg/src/tasks/statscollector.rs68
-rw-r--r--ofborg/tickborg/src/test_scratch.rs61
-rw-r--r--ofborg/tickborg/src/worker.rs53
-rw-r--r--ofborg/tickborg/src/writetoline.rs356
-rwxr-xr-xofborg/tickborg/test-nix/bin/nix-build4
-rwxr-xr-xofborg/tickborg/test-nix/bin/nix-instantiate4
-rw-r--r--ofborg/tickborg/test-srcs/build-pr/default.nix26
-rw-r--r--ofborg/tickborg/test-srcs/build-pr/hi another file0
-rw-r--r--ofborg/tickborg/test-srcs/build-pr/succeed.sh4
-rw-r--r--ofborg/tickborg/test-srcs/eval-mixed-failure/default.nix44
-rw-r--r--ofborg/tickborg/test-srcs/eval/default.nix18
-rw-r--r--ofborg/tickborg/test-srcs/events/pr-changed-base.json484
-rw-r--r--ofborg/tickborg/test-srcs/events/pr-converted-to-draft.json1
-rw-r--r--ofborg/tickborg/test-srcs/maintainers-pr/default.nix8
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/default.nix5
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/lib/attrsets.nix482
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/lib/default.nix137
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/lib/fixed-points.nix101
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/lib/lists.nix663
-rw-r--r--ofborg/tickborg/test-srcs/maintainers/lib/strings.nix684
-rwxr-xr-xofborg/tickborg/test-srcs/make-maintainer-pr.sh31
-rwxr-xr-xofborg/tickborg/test-srcs/make-pr.sh31
97 files changed, 17607 insertions, 0 deletions
diff --git a/ofborg/.envrc b/ofborg/.envrc
new file mode 100644
index 0000000000..3550a30f2d
--- /dev/null
+++ b/ofborg/.envrc
@@ -0,0 +1 @@
+use flake
diff --git a/ofborg/.github/workflows/ci.yml b/ofborg/.github/workflows/ci.yml
new file mode 100644
index 0000000000..8f71a46e50
--- /dev/null
+++ b/ofborg/.github/workflows/ci.yml
@@ -0,0 +1,33 @@
+name: CI
+
+on:
+ push:
+ branches: [released]
+ pull_request:
+
+jobs:
+ checkPhase:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Install Nix
+ uses: cachix/install-nix-action@v17
+ with:
+ nix_path: nixpkgs=channel:nixos-unstable
+ - name: Set up git
+ run: |
+ git config --global user.email "ofborg@example.com"
+ git config --global user.name "ofborg"
+ - name: checkPhase
+ run: nix-shell --pure --run "cargo test"
+
+ nix-build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Install Nix
+ uses: cachix/install-nix-action@v17
+ with:
+ nix_path: nixpkgs=channel:nixos-unstable
+ - name: nix-build
+ run: nix-build -A ofborg.rs
diff --git a/ofborg/.gitignore b/ofborg/.gitignore
new file mode 100644
index 0000000000..d5f18343d4
--- /dev/null
+++ b/ofborg/.gitignore
@@ -0,0 +1,13 @@
+vendor
+*.log
+config.json
+.bash_hist
+config.private.json
+config.prod.json
+config.local.json
+config.*irc*.json
+result
+target
+*.nix.orig
+*.nix.rej
+/.direnv
diff --git a/ofborg/Cargo.lock b/ofborg/Cargo.lock
new file mode 100644
index 0000000000..81249f3a81
--- /dev/null
+++ b/ofborg/Cargo.lock
@@ -0,0 +1,3725 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "aes"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
+dependencies = [
+ "cfg-if",
+ "cipher",
+ "cpufeatures 0.2.17",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "amq-protocol"
+version = "10.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8032525e9bb1bb8aa556476de729106e972b9fb811e5db21ce462a4f0f057d03"
+dependencies = [
+ "amq-protocol-tcp",
+ "amq-protocol-types",
+ "amq-protocol-uri",
+ "cookie-factory",
+ "nom 8.0.0",
+ "serde",
+]
+
+[[package]]
+name = "amq-protocol-tcp"
+version = "10.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22f50ebc589843a42a1428b3e1b149164645bfe8c22a7ed0f128ad0af4aaad84"
+dependencies = [
+ "amq-protocol-uri",
+ "async-rs",
+ "cfg-if",
+ "tcp-stream",
+ "tracing",
+]
+
+[[package]]
+name = "amq-protocol-types"
+version = "10.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12ffea0c942eb17ea55262e4cc57b223d8d6f896269b1313153f9215784dc2b8"
+dependencies = [
+ "cookie-factory",
+ "nom 8.0.0",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "amq-protocol-uri"
+version = "10.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baa9f65c896cb658503e5547e262132ac356c26bc477afedfd8d3f324f4c5006"
+dependencies = [
+ "amq-protocol-types",
+ "percent-encoding",
+ "url",
+]
+
+[[package]]
+name = "android_system_properties"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.102"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
+
+[[package]]
+name = "asn1-rs"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60"
+dependencies = [
+ "asn1-rs-derive",
+ "asn1-rs-impl",
+ "displaydoc",
+ "nom 7.1.3",
+ "num-traits",
+ "rusticata-macros",
+ "thiserror 2.0.18",
+ "time",
+]
+
+[[package]]
+name = "asn1-rs-derive"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "asn1-rs-impl"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "async-channel"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2"
+dependencies = [
+ "concurrent-queue",
+ "event-listener-strategy",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-compat"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1ba85bc55464dcbf728b56d97e119d673f4cf9062be330a9a26f3acf504a590"
+dependencies = [
+ "futures-core",
+ "futures-io",
+ "once_cell",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "async-executor"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c96bf972d85afc50bf5ab8fe2d54d1586b4e0b46c97c50a0c9e71e2f7bcd812a"
+dependencies = [
+ "async-task",
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "pin-project-lite",
+ "slab",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13f937e26114b93193065fd44f507aa2e9169ad0cdabbb996920b1fe1ddea7ba"
+dependencies = [
+ "async-channel",
+ "async-executor",
+ "async-lock",
+ "blocking",
+ "futures-lite",
+ "tokio",
+]
+
+[[package]]
+name = "async-lock"
+version = "3.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
+dependencies = [
+ "event-listener",
+ "event-listener-strategy",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-rs"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e7d98bcae2752f5f3edb17288ff34b799760be54f63261073eed9f6982367b5"
+dependencies = [
+ "async-compat",
+ "async-global-executor",
+ "async-trait",
+ "cfg-if",
+ "futures-core",
+ "futures-io",
+ "hickory-resolver",
+ "tokio",
+ "tokio-stream",
+]
+
+[[package]]
+name = "async-task"
+version = "4.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
+
+[[package]]
+name = "async-trait"
+version = "0.1.89"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
+[[package]]
+name = "autocfg"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+
+[[package]]
+name = "aws-lc-rs"
+version = "1.16.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a054912289d18629dc78375ba2c3726a3afe3ff71b4edba9dedfca0e3446d1fc"
+dependencies = [
+ "aws-lc-sys",
+ "zeroize",
+]
+
+[[package]]
+name = "aws-lc-sys"
+version = "0.39.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83a25cf98105baa966497416dbd42565ce3a8cf8dbfd59803ec9ad46f3126399"
+dependencies = [
+ "cc",
+ "cmake",
+ "dunce",
+ "fs_extra",
+]
+
+[[package]]
+name = "backon"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef"
+dependencies = [
+ "fastrand",
+]
+
+[[package]]
+name = "base64"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
+
+[[package]]
+name = "base64"
+version = "0.21.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
+
+[[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
+[[package]]
+name = "base64ct"
+version = "1.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
+
+[[package]]
+name = "bitflags"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "block-buffer"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be"
+dependencies = [
+ "hybrid-array",
+]
+
+[[package]]
+name = "block-padding"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "blocking"
+version = "1.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21"
+dependencies = [
+ "async-channel",
+ "async-task",
+ "futures-io",
+ "futures-lite",
+ "piper",
+]
+
+[[package]]
+name = "brace-expand"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3adb80ee272c844254166ea32c8ae11c211b3639a293fdde41b1645b6be2c62"
+
+[[package]]
+name = "bumpalo"
+version = "3.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb"
+
+[[package]]
+name = "bytes"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
+
+[[package]]
+name = "cbc"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "cc"
+version = "1.2.58"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1e928d4b69e3077709075a938a05ffbedfa53a84c8f766efbf8220bb1ff60e1"
+dependencies = [
+ "find-msvc-tools",
+ "jobserver",
+ "libc",
+ "shlex",
+]
+
+[[package]]
+name = "cesu8"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
+
+[[package]]
+name = "cfg_aliases"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
+
+[[package]]
+name = "chacha20"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.3.0",
+ "rand_core 0.10.0",
+]
+
+[[package]]
+name = "chrono"
+version = "0.4.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0"
+dependencies = [
+ "iana-time-zone",
+ "num-traits",
+ "windows-link",
+]
+
+[[package]]
+name = "cipher"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+dependencies = [
+ "crypto-common 0.1.7",
+ "inout",
+]
+
+[[package]]
+name = "cmake"
+version = "0.1.58"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0f78a02292a74a88ac736019ab962ece0bc380e3f977bf72e376c5d78ff0678"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "cmov"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "de0758edba32d61d1fd9f4d69491b47604b91ee2f7e6b33de7e54ca4ebe55dc3"
+
+[[package]]
+name = "cms"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b77c319abfd5219629c45c34c89ba945ed3c5e49fcde9d16b6c3885f118a730"
+dependencies = [
+ "const-oid 0.9.6",
+ "der",
+ "spki",
+ "x509-cert",
+]
+
+[[package]]
+name = "combine"
+version = "4.6.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
+dependencies = [
+ "bytes",
+ "memchr",
+]
+
+[[package]]
+name = "concurrent-queue"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "const-oid"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
+
+[[package]]
+name = "const-oid"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6ef517f0926dd24a1582492c791b6a4818a4d94e789a334894aa15b0d12f55c"
+
+[[package]]
+name = "cookie-factory"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2"
+
+[[package]]
+name = "core-foundation"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "critical-section"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b"
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
+
+[[package]]
+name = "crypto-common"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710"
+dependencies = [
+ "hybrid-array",
+]
+
+[[package]]
+name = "ctutils"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1005a6d4446f5120ef475ad3d2af2b30c49c2c9c6904258e3bb30219bebed5e4"
+dependencies = [
+ "cmov",
+]
+
+[[package]]
+name = "data-encoding"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea"
+
+[[package]]
+name = "der"
+version = "0.7.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
+dependencies = [
+ "const-oid 0.9.6",
+ "der_derive",
+ "flagset",
+ "pem-rfc7468",
+ "zeroize",
+]
+
+[[package]]
+name = "der-parser"
+version = "10.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6"
+dependencies = [
+ "asn1-rs",
+ "displaydoc",
+ "nom 7.1.3",
+ "num-bigint",
+ "num-traits",
+ "rusticata-macros",
+]
+
+[[package]]
+name = "der_derive"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "deranged"
+version = "0.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c"
+dependencies = [
+ "powerfmt",
+]
+
+[[package]]
+name = "des"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer 0.10.4",
+ "crypto-common 0.1.7",
+ "subtle",
+]
+
+[[package]]
+name = "digest"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4850db49bf08e663084f7fb5c87d202ef91a3907271aff24a94eb97ff039153c"
+dependencies = [
+ "block-buffer 0.12.0",
+ "const-oid 0.10.2",
+ "crypto-common 0.2.1",
+ "ctutils",
+]
+
+[[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "dunce"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
+
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
+[[package]]
+name = "enum-as-inner"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
+[[package]]
+name = "errno"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
+dependencies = [
+ "libc",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "event-listener"
+version = "5.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
+dependencies = [
+ "concurrent-queue",
+ "parking",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "event-listener-strategy"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
+dependencies = [
+ "event-listener",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
+
+[[package]]
+name = "find-msvc-tools"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+
+[[package]]
+name = "flagset"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe"
+
+[[package]]
+name = "flume"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e139bc46ca777eb5efaf62df0ab8cc5fd400866427e56c68b22e414e53bd3be"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "spin 0.9.8",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foldhash"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "fs2"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "fs_extra"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
+
+[[package]]
+name = "futures"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
+
+[[package]]
+name = "futures-lite"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "parking",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "futures-macro"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "futures-rustls"
+version = "0.26.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb"
+dependencies = [
+ "futures-io",
+ "rustls",
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
+
+[[package]]
+name = "futures-task"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
+
+[[package]]
+name = "futures-util"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "slab",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "wasi",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "r-efi 5.3.0",
+ "wasip2",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "r-efi 6.0.0",
+ "rand_core 0.10.0",
+ "wasip2",
+ "wasip3",
+]
+
+[[package]]
+name = "h2"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "http 1.4.0",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.15.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
+dependencies = [
+ "foldhash",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
+
+[[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "hickory-proto"
+version = "0.25.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502"
+dependencies = [
+ "async-trait",
+ "cfg-if",
+ "data-encoding",
+ "enum-as-inner",
+ "futures-channel",
+ "futures-io",
+ "futures-util",
+ "idna",
+ "ipnet",
+ "once_cell",
+ "rand 0.9.2",
+ "ring 0.17.14",
+ "thiserror 2.0.18",
+ "tinyvec",
+ "tokio",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "hickory-resolver"
+version = "0.25.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a"
+dependencies = [
+ "cfg-if",
+ "futures-util",
+ "hickory-proto",
+ "ipconfig",
+ "moka",
+ "once_cell",
+ "parking_lot",
+ "rand 0.9.2",
+ "resolv-conf",
+ "smallvec",
+ "thiserror 2.0.18",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "hmac"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+dependencies = [
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "hmac"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6303bc9732ae41b04cb554b844a762b4115a61bfaa81e3e83050991eeb56863f"
+dependencies = [
+ "digest 0.11.2",
+]
+
+[[package]]
+name = "http"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "http"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
+dependencies = [
+ "bytes",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http 1.4.0",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http 1.4.0",
+ "http-body",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
+
+[[package]]
+name = "httpdate"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+
+[[package]]
+name = "hubcaps"
+version = "0.6.2"
+source = "git+https://github.com/ofborg/hubcaps.git?rev=0d7466ef941a7a8e160c071e2846e56b90b6ea86#0d7466ef941a7a8e160c071e2846e56b90b6ea86"
+dependencies = [
+ "base64 0.22.1",
+ "data-encoding",
+ "futures",
+ "http 1.4.0",
+ "hyperx",
+ "jsonwebtoken",
+ "log",
+ "mime",
+ "percent-encoding",
+ "reqwest",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "url",
+]
+
+[[package]]
+name = "hybrid-array"
+version = "0.4.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3944cf8cf766b40e2a1a333ee5e9b563f854d5fa49d6a8ca2764e97c6eddb214"
+dependencies = [
+ "typenum",
+]
+
+[[package]]
+name = "hyper"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "h2",
+ "http 1.4.0",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "smallvec",
+ "tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.27.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
+dependencies = [
+ "http 1.4.0",
+ "hyper",
+ "hyper-util",
+ "rustls",
+ "rustls-pki-types",
+ "tokio",
+ "tokio-rustls",
+ "tower-service",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
+dependencies = [
+ "base64 0.22.1",
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "http 1.4.0",
+ "http-body",
+ "hyper",
+ "ipnet",
+ "libc",
+ "percent-encoding",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "hyperx"
+version = "1.4.0"
+source = "git+https://github.com/chantra/hyperx.git?branch=semver#69f17cf858573db42c2baaf0bfead54521de32f9"
+dependencies = [
+ "base64 0.13.1",
+ "bytes",
+ "http 0.2.12",
+ "httpdate",
+ "language-tags",
+ "mime",
+ "percent-encoding",
+ "unicase",
+]
+
+[[package]]
+name = "iana-time-zone"
+version = "0.1.65"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "log",
+ "wasm-bindgen",
+ "windows-core",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "icu_collections"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c"
+dependencies = [
+ "displaydoc",
+ "potential_utf",
+ "utf8_iter",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locale_core"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4"
+dependencies = [
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38"
+
+[[package]]
+name = "icu_properties"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de"
+dependencies = [
+ "icu_collections",
+ "icu_locale_core",
+ "icu_properties_data",
+ "icu_provider",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14"
+
+[[package]]
+name = "icu_provider"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421"
+dependencies = [
+ "displaydoc",
+ "icu_locale_core",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "id-arena"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
+
+[[package]]
+name = "idna"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.16.1",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "inout"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
+dependencies = [
+ "block-padding",
+ "generic-array",
+]
+
+[[package]]
+name = "ipconfig"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d40460c0ce33d6ce4b0630ad68ff63d6661961c48b6dba35e5a4d81cfb48222"
+dependencies = [
+ "socket2",
+ "widestring",
+ "windows-registry",
+ "windows-result",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2"
+
+[[package]]
+name = "iri-string"
+version = "0.7.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682"
+
+[[package]]
+name = "jni"
+version = "0.21.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97"
+dependencies = [
+ "cesu8",
+ "cfg-if",
+ "combine",
+ "jni-sys 0.3.1",
+ "log",
+ "thiserror 1.0.69",
+ "walkdir",
+ "windows-sys 0.45.0",
+]
+
+[[package]]
+name = "jni-sys"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41a652e1f9b6e0275df1f15b32661cf0d4b78d4d87ddec5e0c3c20f097433258"
+dependencies = [
+ "jni-sys 0.4.1",
+]
+
+[[package]]
+name = "jni-sys"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c6377a88cb3910bee9b0fa88d4f42e1d2da8e79915598f65fb0c7ee14c878af2"
+dependencies = [
+ "jni-sys-macros",
+]
+
+[[package]]
+name = "jni-sys-macros"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38c0b942f458fe50cdac086d2f946512305e5631e720728f2a61aabcd47a6264"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "jobserver"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
+dependencies = [
+ "getrandom 0.3.4",
+ "libc",
+]
+
+[[package]]
+name = "js-sys"
+version = "0.3.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e04e2ef80ce82e13552136fabeef8a5ed1f985a96805761cbb9a2c34e7664d9"
+dependencies = [
+ "cfg-if",
+ "futures-util",
+ "once_cell",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "jsonwebtoken"
+version = "8.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378"
+dependencies = [
+ "base64 0.21.7",
+ "pem",
+ "ring 0.16.20",
+ "serde",
+ "serde_json",
+ "simple_asn1",
+]
+
+[[package]]
+name = "language-tags"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+
+[[package]]
+name = "lapin"
+version = "4.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39338badb3f992d800f6964501b056b575bdf142eb288202f973d218fe253b90"
+dependencies = [
+ "amq-protocol",
+ "async-rs",
+ "async-trait",
+ "backon",
+ "cfg-if",
+ "flume",
+ "futures-core",
+ "futures-io",
+ "tracing",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "leb128fmt"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
+
+[[package]]
+name = "libc"
+version = "0.2.184"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af"
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
+
+[[package]]
+name = "litemap"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0"
+
+[[package]]
+name = "lock_api"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
+dependencies = [
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
+
+[[package]]
+name = "lru-cache"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
+dependencies = [
+ "linked-hash-map",
+]
+
+[[package]]
+name = "lru-slab"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
+
+[[package]]
+name = "matchers"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "md5"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0"
+
+[[package]]
+name = "memchr"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "mio"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1"
+dependencies = [
+ "libc",
+ "wasi",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "moka"
+version = "0.12.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "957228ad12042ee839f93c8f257b62b4c0ab5eaae1d4fa60de53b27c9d7c5046"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+ "equivalent",
+ "parking_lot",
+ "portable-atomic",
+ "smallvec",
+ "tagptr",
+ "uuid",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "nom"
+version = "8.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.50.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
+dependencies = [
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-conv"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967"
+
+[[package]]
+name = "num-integer"
+version = "0.1.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "oid-registry"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7"
+dependencies = [
+ "asn1-rs",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.21.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50"
+dependencies = [
+ "critical-section",
+ "portable-atomic",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe"
+
+[[package]]
+name = "p12-keystore"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffb9bf5222606eb712d3bb30e01bc9420545b00859970897e70c682353a034f2"
+dependencies = [
+ "base64 0.22.1",
+ "cbc",
+ "cms",
+ "der",
+ "des",
+ "hex",
+ "hmac 0.12.1",
+ "pkcs12",
+ "pkcs5",
+ "rand 0.10.0",
+ "rc2",
+ "sha1",
+ "sha2 0.10.9",
+ "thiserror 2.0.18",
+ "x509-parser",
+]
+
+[[package]]
+name = "parking"
+version = "2.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-link",
+]
+
+[[package]]
+name = "pbkdf2"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
+dependencies = [
+ "digest 0.10.7",
+ "hmac 0.12.1",
+]
+
+[[package]]
+name = "pem"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8"
+dependencies = [
+ "base64 0.13.1",
+]
+
+[[package]]
+name = "pem-rfc7468"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
+dependencies = [
+ "base64ct",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
+
+[[package]]
+name = "piper"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c835479a4443ded371d6c535cbfd8d31ad92c5d23ae9770a61bc155e4992a3c1"
+dependencies = [
+ "atomic-waker",
+ "fastrand",
+ "futures-io",
+]
+
+[[package]]
+name = "pkcs12"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "695b3df3d3cc1015f12d70235e35b6b79befc5fa7a9b95b951eab1dd07c9efc2"
+dependencies = [
+ "cms",
+ "const-oid 0.9.6",
+ "der",
+ "digest 0.10.7",
+ "spki",
+ "x509-cert",
+ "zeroize",
+]
+
+[[package]]
+name = "pkcs5"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6"
+dependencies = [
+ "aes",
+ "cbc",
+ "der",
+ "pbkdf2",
+ "scrypt",
+ "sha2 0.10.9",
+ "spki",
+]
+
+[[package]]
+name = "portable-atomic"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
+
+[[package]]
+name = "potential_utf"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564"
+dependencies = [
+ "zerovec",
+]
+
+[[package]]
+name = "powerfmt"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "prettyplease"
+version = "0.2.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
+dependencies = [
+ "proc-macro2",
+ "syn",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quinn"
+version = "0.11.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
+dependencies = [
+ "bytes",
+ "cfg_aliases",
+ "pin-project-lite",
+ "quinn-proto",
+ "quinn-udp",
+ "rustc-hash",
+ "rustls",
+ "socket2",
+ "thiserror 2.0.18",
+ "tokio",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-proto"
+version = "0.11.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098"
+dependencies = [
+ "aws-lc-rs",
+ "bytes",
+ "getrandom 0.3.4",
+ "lru-slab",
+ "rand 0.9.2",
+ "ring 0.17.14",
+ "rustc-hash",
+ "rustls",
+ "rustls-pki-types",
+ "slab",
+ "thiserror 2.0.18",
+ "tinyvec",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-udp"
+version = "0.5.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
+dependencies = [
+ "cfg_aliases",
+ "libc",
+ "once_cell",
+ "socket2",
+ "tracing",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "r-efi"
+version = "6.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf"
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha",
+ "rand_core 0.9.5",
+]
+
+[[package]]
+name = "rand"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8"
+dependencies = [
+ "chacha20",
+ "getrandom 0.4.2",
+ "rand_core 0.10.0",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.9.5",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c"
+dependencies = [
+ "getrandom 0.3.4",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba"
+
+[[package]]
+name = "rc2"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62c64daa8e9438b84aaae55010a93f396f8e60e3911590fcba770d04643fc1dd"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
+
+[[package]]
+name = "reqwest"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801"
+dependencies = [
+ "base64 0.22.1",
+ "bytes",
+ "futures-core",
+ "http 1.4.0",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-rustls",
+ "hyper-util",
+ "js-sys",
+ "log",
+ "percent-encoding",
+ "pin-project-lite",
+ "quinn",
+ "rustls",
+ "rustls-pki-types",
+ "rustls-platform-verifier",
+ "sync_wrapper",
+ "tokio",
+ "tokio-rustls",
+ "tower",
+ "tower-http",
+ "tower-service",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+]
+
+[[package]]
+name = "resolv-conf"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7"
+
+[[package]]
+name = "ring"
+version = "0.16.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
+dependencies = [
+ "cc",
+ "libc",
+ "once_cell",
+ "spin 0.5.2",
+ "untrusted 0.7.1",
+ "web-sys",
+ "winapi",
+]
+
+[[package]]
+name = "ring"
+version = "0.17.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "getrandom 0.2.17",
+ "libc",
+ "untrusted 0.9.0",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "rustc-hash"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe"
+
+[[package]]
+name = "rusticata-macros"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
+dependencies = [
+ "nom 7.1.3",
+]
+
+[[package]]
+name = "rustix"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "rustls"
+version = "0.23.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4"
+dependencies = [
+ "aws-lc-rs",
+ "once_cell",
+ "rustls-pki-types",
+ "rustls-webpki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-connector"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f510f2d983baf4a45354ae8ca5abf5a6cdb3c47244ea22f705499d6d9c09a912"
+dependencies = [
+ "futures-io",
+ "futures-rustls",
+ "log",
+ "rustls",
+ "rustls-native-certs",
+ "rustls-pki-types",
+ "rustls-webpki",
+]
+
+[[package]]
+name = "rustls-native-certs"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63"
+dependencies = [
+ "openssl-probe",
+ "rustls-pki-types",
+ "schannel",
+ "security-framework",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
+dependencies = [
+ "web-time",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-platform-verifier"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784"
+dependencies = [
+ "core-foundation",
+ "core-foundation-sys",
+ "jni",
+ "log",
+ "once_cell",
+ "rustls",
+ "rustls-native-certs",
+ "rustls-platform-verifier-android",
+ "rustls-webpki",
+ "security-framework",
+ "security-framework-sys",
+ "webpki-root-certs",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "rustls-platform-verifier-android"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f"
+
+[[package]]
+name = "rustls-webpki"
+version = "0.103.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef"
+dependencies = [
+ "aws-lc-rs",
+ "ring 0.17.14",
+ "rustls-pki-types",
+ "untrusted 0.9.0",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
+[[package]]
+name = "salsa20"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "schannel"
+version = "0.1.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "scrypt"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f"
+dependencies = [
+ "pbkdf2",
+ "salsa20",
+ "sha2 0.10.9",
+]
+
+[[package]]
+name = "security-framework"
+version = "3.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
+
+[[package]]
+name = "serde"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.149"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
+dependencies = [
+ "itoa",
+ "memchr",
+ "serde",
+ "serde_core",
+ "zmij",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.2.17",
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.2.17",
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "sha2"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "446ba717509524cb3f22f17ecc096f10f4822d76ab5c0b9822c5f9c284e825f4"
+dependencies = [
+ "cfg-if",
+ "cpufeatures 0.3.0",
+ "digest 0.11.2",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "simple_asn1"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d585997b0ac10be3c5ee635f1bab02d512760d14b7c468801ac8a01d9ae5f1d"
+dependencies = [
+ "num-bigint",
+ "num-traits",
+ "thiserror 2.0.18",
+ "time",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5"
+
+[[package]]
+name = "smallvec"
+version = "1.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
+
+[[package]]
+name = "socket2"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e"
+dependencies = [
+ "libc",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+
+[[package]]
+name = "spin"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
+dependencies = [
+ "lock_api",
+]
+
+[[package]]
+name = "spki"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
+dependencies = [
+ "base64ct",
+ "der",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
+
+[[package]]
+name = "subtle"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+
+[[package]]
+name = "syn"
+version = "2.0.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tagptr"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
+
+[[package]]
+name = "tcp-stream"
+version = "0.34.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff68da304b44cbdfcb3c084ef27d8ddb8bdfba1dab36b1469513c6fe1e1c2b58"
+dependencies = [
+ "async-rs",
+ "cfg-if",
+ "futures-io",
+ "p12-keystore",
+ "rustls-connector",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.27.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd"
+dependencies = [
+ "fastrand",
+ "getrandom 0.4.2",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+dependencies = [
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
+dependencies = [
+ "thiserror-impl 2.0.18",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "tickborg"
+version = "0.1.0"
+dependencies = [
+ "async-trait",
+ "brace-expand",
+ "chrono",
+ "either",
+ "fs2",
+ "futures",
+ "futures-util",
+ "hex",
+ "hmac 0.13.0",
+ "http 1.4.0",
+ "http-body-util",
+ "hubcaps",
+ "hyper",
+ "hyper-util",
+ "lapin",
+ "lru-cache",
+ "md5",
+ "mime",
+ "nom 8.0.0",
+ "parking_lot",
+ "regex",
+ "rustls-pki-types",
+ "serde",
+ "serde_json",
+ "sha2 0.11.0",
+ "tempfile",
+ "tokio",
+ "tokio-stream",
+ "tracing",
+ "tracing-subscriber",
+ "uuid",
+]
+
+[[package]]
+name = "tickborg-simple-build"
+version = "0.1.0"
+dependencies = [
+ "log",
+ "tickborg",
+]
+
+[[package]]
+name = "time"
+version = "0.3.47"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c"
+dependencies = [
+ "deranged",
+ "itoa",
+ "num-conv",
+ "powerfmt",
+ "serde_core",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
+
+[[package]]
+name = "time-macros"
+version = "0.2.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215"
+dependencies = [
+ "num-conv",
+ "time-core",
+]
+
+[[package]]
+name = "tinystr"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d"
+dependencies = [
+ "displaydoc",
+ "zerovec",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tokio"
+version = "1.50.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
+dependencies = [
+ "bytes",
+ "libc",
+ "mio",
+ "pin-project-lite",
+ "socket2",
+ "tokio-macros",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.26.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61"
+dependencies = [
+ "rustls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tower"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http 1.4.0",
+ "http-body",
+ "iri-string",
+ "pin-project-lite",
+ "tower",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
+dependencies = [
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-serde"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1"
+dependencies = [
+ "serde",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex-automata",
+ "serde",
+ "serde_json",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+ "tracing-serde",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
+[[package]]
+name = "typenum"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
+
+[[package]]
+name = "unicase"
+version = "2.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+
+[[package]]
+name = "untrusted"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+
+[[package]]
+name = "untrusted"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+
+[[package]]
+name = "url"
+version = "2.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "uuid"
+version = "1.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9"
+dependencies = [
+ "getrandom 0.4.2",
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
+
+[[package]]
+name = "version_check"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.1+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
+
+[[package]]
+name = "wasip2"
+version = "1.0.2+wasi-0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasip3"
+version = "0.4.0+wasi-0.3.0-rc-2026-01-06"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0551fc1bb415591e3372d0bc4780db7e587d84e2a7e79da121051c5c4b89d0b0"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.67"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03623de6905b7206edd0a75f69f747f134b7f0a2323392d664448bf2d3c5d87e"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fbdf9a35adf44786aecd5ff89b4563a90325f9da0923236f6104e603c7e86be"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dca9693ef2bab6d4e6707234500350d8dad079eb508dca05530c85dc3a529ff2"
+dependencies = [
+ "bumpalo",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.117"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39129a682a6d2d841b6c429d0c51e5cb0ed1a03829d8b3d1e69a011e62cb3d3b"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "wasm-encoder"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319"
+dependencies = [
+ "leb128fmt",
+ "wasmparser",
+]
+
+[[package]]
+name = "wasm-metadata"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
+dependencies = [
+ "anyhow",
+ "indexmap",
+ "wasm-encoder",
+ "wasmparser",
+]
+
+[[package]]
+name = "wasmparser"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
+dependencies = [
+ "bitflags",
+ "hashbrown 0.15.5",
+ "indexmap",
+ "semver",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd70027e39b12f0849461e08ffc50b9cd7688d942c1c8e3c7b22273236b4dd0a"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "web-time"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki-root-certs"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "804f18a4ac2676ffb4e8b5b5fa9ae38af06df08162314f96a68d2a363e21a8ca"
+dependencies = [
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "widestring"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-core"
+version = "0.62.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
+dependencies = [
+ "windows-implement",
+ "windows-interface",
+ "windows-link",
+ "windows-result",
+ "windows-strings",
+]
+
+[[package]]
+name = "windows-implement"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-interface"
+version = "0.59.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-link"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
+
+[[package]]
+name = "windows-registry"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720"
+dependencies = [
+ "windows-link",
+ "windows-result",
+ "windows-strings",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-strings"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
+dependencies = [
+ "windows-targets 0.42.2",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.61.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
+dependencies = [
+ "windows_aarch64_gnullvm 0.42.2",
+ "windows_aarch64_msvc 0.42.2",
+ "windows_i686_gnu 0.42.2",
+ "windows_i686_msvc 0.42.2",
+ "windows_x86_64_gnu 0.42.2",
+ "windows_x86_64_gnullvm 0.42.2",
+ "windows_x86_64_msvc 0.42.2",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm 0.52.6",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm 0.53.1",
+ "windows_aarch64_msvc 0.53.1",
+ "windows_i686_gnu 0.53.1",
+ "windows_i686_gnullvm 0.53.1",
+ "windows_i686_msvc 0.53.1",
+ "windows_x86_64_gnu 0.53.1",
+ "windows_x86_64_gnullvm 0.53.1",
+ "windows_x86_64_msvc 0.53.1",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.42.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
+
+[[package]]
+name = "wit-bindgen"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
+dependencies = [
+ "wit-bindgen-rust-macro",
+]
+
+[[package]]
+name = "wit-bindgen-core"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc"
+dependencies = [
+ "anyhow",
+ "heck",
+ "wit-parser",
+]
+
+[[package]]
+name = "wit-bindgen-rust"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
+dependencies = [
+ "anyhow",
+ "heck",
+ "indexmap",
+ "prettyplease",
+ "syn",
+ "wasm-metadata",
+ "wit-bindgen-core",
+ "wit-component",
+]
+
+[[package]]
+name = "wit-bindgen-rust-macro"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a"
+dependencies = [
+ "anyhow",
+ "prettyplease",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wit-bindgen-core",
+ "wit-bindgen-rust",
+]
+
+[[package]]
+name = "wit-component"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
+dependencies = [
+ "anyhow",
+ "bitflags",
+ "indexmap",
+ "log",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "wasm-encoder",
+ "wasm-metadata",
+ "wasmparser",
+ "wit-parser",
+]
+
+[[package]]
+name = "wit-parser"
+version = "0.244.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
+dependencies = [
+ "anyhow",
+ "id-arena",
+ "indexmap",
+ "log",
+ "semver",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "unicode-xid",
+ "wasmparser",
+]
+
+[[package]]
+name = "writeable"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
+
+[[package]]
+name = "x509-cert"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1301e935010a701ae5f8655edc0ad17c44bad3ac5ce8c39185f75453b720ae94"
+dependencies = [
+ "const-oid 0.9.6",
+ "der",
+ "spki",
+]
+
+[[package]]
+name = "x509-parser"
+version = "0.18.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d43b0f71ce057da06bc0851b23ee24f3f86190b07203dd8f567d0b706a185202"
+dependencies = [
+ "asn1-rs",
+ "data-encoding",
+ "der-parser",
+ "lazy_static",
+ "nom 7.1.3",
+ "oid-registry",
+ "rusticata-macros",
+ "thiserror 2.0.18",
+ "time",
+]
+
+[[package]]
+name = "yoke"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca"
+dependencies = [
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerocopy"
+version = "0.8.48"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.8.48"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zeroize"
+version = "1.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
+
+[[package]]
+name = "zerotrie"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.11.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zmij"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa"
diff --git a/ofborg/Cargo.toml b/ofborg/Cargo.toml
new file mode 100644
index 0000000000..87e2967da1
--- /dev/null
+++ b/ofborg/Cargo.toml
@@ -0,0 +1,13 @@
+[workspace]
+members = [
+ "tickborg",
+ "tickborg-simple-build"
+]
+resolver = "2"
+
+[profile.release]
+debug = true
+
+[patch.crates-io]
+#hubcaps = { path = "../hubcaps" }
+#amq-proto = { path = "rust-amq-proto" }
diff --git a/ofborg/LICENSE b/ofborg/LICENSE
new file mode 100644
index 0000000000..14bb7669c9
--- /dev/null
+++ b/ofborg/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Graham Christensen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/ofborg/README.md b/ofborg/README.md
new file mode 100644
index 0000000000..812fe78f2f
--- /dev/null
+++ b/ofborg/README.md
@@ -0,0 +1,107 @@
+# tickborg
+
+Distributed CI bot for the Project Tick monorepo, adapted from [ofborg](https://github.com/NixOS/ofborg).
+
+## Overview
+
+tickborg is a RabbitMQ-based distributed CI system that:
+- Automatically detects changed projects in PRs
+- Builds affected sub-projects using their native build systems (CMake, Meson, Autotools, Cargo, Gradle, Make)
+- Posts build results as GitHub check runs
+- Supports multi-platform builds (Linux, macOS, Windows, FreeBSD)
+
+## Automatic Building
+
+PRs automatically trigger builds for affected projects based on:
+- **File path detection**: Changed files are mapped to sub-projects
+- **Conventional Commits**: Commit scopes like `feat(meshmc):` trigger builds for the named project
+
+Example commit titles and the builds they will start:
+
+| Message | Automatic Build |
+|---|---|
+| `feat(meshmc): add chunk loading` | `meshmc` |
+| `cmark: fix buffer overflow` | `cmark` |
+| `fix(neozip): handle empty archives` | `neozip` |
+| `chore(ci): update workflow` | _(CI changes only)_ |
+
+If the title of a PR begins with `WIP:` or contains `[WIP]` anywhere, its
+projects are not built automatically.
+
+## Commands
+
+The comment parser is line-based. Commentary can be interwoven with bot
+instructions.
+
+1. To trigger the bot, the line _must_ start with `@tickbot` (case insensitive).
+2. To use multiple commands, separate them on different lines.
+
+### build
+
+```
+@tickbot build meshmc neozip cmark
+```
+
+This will build the specified projects using their configured build systems.
+
+### test
+
+```
+@tickbot test meshmc
+```
+
+This will run the test suite for the specified projects.
+
+### eval
+
+```
+@tickbot eval
+```
+
+Triggers a full evaluation of the PR — detects changed projects and labels the PR.
+
+## Supported Platforms
+
+| Platform | Runner |
+|---|---|
+| `x86_64-linux` | `ubuntu-latest` |
+| `aarch64-linux` | `ubuntu-24.04-arm` |
+| `x86_64-darwin` | `macos-15` |
+| `aarch64-darwin` | `macos-15` |
+| `x86_64-windows` | `windows-2025` |
+| `aarch64-windows` | `windows-2025` |
+| `x86_64-freebsd` | `ubuntu-latest` (VM) |
+
+## Sub-Projects
+
+| Project | Build System | Path |
+|---|---|---|
+| mnv | Autotools | `mnv/` |
+| cgit | Make | `cgit/` |
+| cmark | CMake | `cmark/` |
+| neozip | CMake | `neozip/` |
+| genqrcode | CMake | `genqrcode/` |
+| json4cpp | CMake | `json4cpp/` |
+| tomlplusplus | Meson | `tomlplusplus/` |
+| libnbtplusplus | CMake | `libnbtplusplus/` |
+| meshmc | CMake | `meshmc/` |
+| forgewrapper | Gradle | `forgewrapper/` |
+| corebinutils | Make | `corebinutils/` |
+
+## Hacking
+
+```shell
+$ git clone https://github.com/project-tick/Project-Tick/
+$ cd Project-Tick/ofborg
+$ cd tickborg
+$ cargo build
+$ cargo check
+$ cargo test
+```
+
+Make sure to format with `cargo fmt` and lint with `cargo clippy`.
+
+## Configuration
+
+See [`example.config.json`](./example.config.json) for a minimal configuration
+and [`config.public.json`](./config.public.json) for the production config.
diff --git a/ofborg/config.public.json b/ofborg/config.public.json
new file mode 100644
index 0000000000..b88b41fd05
--- /dev/null
+++ b/ofborg/config.public.json
@@ -0,0 +1,33 @@
+{
+ "github_webhook_receiver": {
+ "listen": "[::1]:9899",
+ "webhook_secret_file": "/run/secrets/tickborg/github-webhook-secret",
+ "rabbitmq": {
+ "host": "localhost",
+ "ssl": false,
+ "username": "tickborg-github-webhook",
+ "password_file": "/run/secrets/tickborg/github-webhook-amqp-password",
+ "virtualhost": "tickborg"
+ }
+ },
+ "feedback": {
+ "full_logs": true
+ },
+ "log_storage": {
+ "path": "/var/lib/nginx/tickborg/logs/"
+ },
+ "runner": {
+ "repos": [
+ "project-tick/Project-Tick"
+ ],
+ "disable_trusted_users": false,
+ "trusted_users": []
+ },
+ "checkout": {
+ "root": "/var/lib/tickborg/.build-test"
+ },
+ "build": {
+ "system": "x86_64-linux",
+ "build_timeout_seconds": 3600
+ }
+}
diff --git a/ofborg/default.nix b/ofborg/default.nix
new file mode 100644
index 0000000000..65f558ee36
--- /dev/null
+++ b/ofborg/default.nix
@@ -0,0 +1,6 @@
+(import
+ (fetchTarball {
+ url = "https://github.com/edolstra/flake-compat/archive/0f9255e01c2351cc7d116c072cb317785dd33b33.tar.gz";
+ sha256 = "0m9grvfsbwmvgwaxvdzv6cmyvjnlww004gfxjvcl806ndqaxzy4j";
+ })
+ { src = ./.; }).defaultNix.packages.${builtins.currentSystem}
diff --git a/ofborg/doc/sample.dot b/ofborg/doc/sample.dot
new file mode 100644
index 0000000000..128255660a
--- /dev/null
+++ b/ofborg/doc/sample.dot
@@ -0,0 +1,18 @@
+digraph ClassDiagram {
+ "PR approved to build,\n`@grahamcofborg sample 10`" -> "does the PR change stdenv?";
+ "does the PR change stdenv?" -> "refuse, won't do\nmass rebuilds" [label="yes"];
+ "does the PR change stdenv?" -> "stdenv already built?" [label="no"];
+ "stdenv already built?" -> "Comment:\n(stdenv not available, will retry later.)" [label="no"];
+ "Comment:\n(stdenv not available, will retry later.)" ->
+ "put the build job back to the delayed build queue";
+ "put the build job back to the delayed build queue" ->
+ "5 min later...";
+ "5 min later..." -> "stdenv built yet?";
+ "stdenv built yet?" -> "put the build job back to the delayed build queue"[label="no"];
+ "stdenv built yet?" -> "proceed with build"[label="yes"];
+ "stdenv already built?" -> "proceed with build" [label="yes"];
+ "proceed with build" -> "diff prev commit's drvs with current commit's drvs";
+ "diff prev commit's drvs with current commit's drvs" -> "select 10 drvs";
+ "select 10 drvs" -> "build them";
+ "build them" -> "comment on github issue with a build report";
+} \ No newline at end of file
diff --git a/ofborg/doc/sample.svg b/ofborg/doc/sample.svg
new file mode 100644
index 0000000000..3d61aeab28
--- /dev/null
+++ b/ofborg/doc/sample.svg
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 2.36.0 (20140111.2315)
+ -->
+<!-- Title: ClassDiagram Pages: 1 -->
+<svg width="558pt" height="968pt"
+ viewBox="0.00 0.00 558.00 968.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 964)">
+<title>ClassDiagram</title>
+<polygon fill="white" stroke="none" points="-4,4 -4,-964 554,-964 554,4 -4,4"/>
+<!-- PR approved to build,\n`@grahamcofborg sample 10` -->
+<g id="node1" class="node"><title>PR approved to build,\n`@grahamcofborg sample 10`</title>
+<ellipse fill="none" stroke="black" cx="220" cy="-933" rx="128.54" ry="26.7407"/>
+<text text-anchor="middle" x="220" y="-936.8" font-family="Times,serif" font-size="14.00">PR approved to build,</text>
+<text text-anchor="middle" x="220" y="-921.8" font-family="Times,serif" font-size="14.00">`@grahamcofborg sample 10`</text>
+</g>
+<!-- does the PR change stdenv? -->
+<g id="node2" class="node"><title>does the PR change stdenv?</title>
+<ellipse fill="none" stroke="black" cx="220" cy="-850" rx="109.261" ry="18"/>
+<text text-anchor="middle" x="220" y="-846.3" font-family="Times,serif" font-size="14.00">does the PR change stdenv?</text>
+</g>
+<!-- PR approved to build,\n`@grahamcofborg sample 10`&#45;&gt;does the PR change stdenv? -->
+<g id="edge1" class="edge"><title>PR approved to build,\n`@grahamcofborg sample 10`&#45;&gt;does the PR change stdenv?</title>
+<path fill="none" stroke="black" d="M220,-905.925C220,-897.114 220,-887.254 220,-878.347"/>
+<polygon fill="black" stroke="black" points="223.5,-878.088 220,-868.088 216.5,-878.088 223.5,-878.088"/>
+</g>
+<!-- refuse, won&#39;t do\nmass rebuilds -->
+<g id="node3" class="node"><title>refuse, won&#39;t do\nmass rebuilds</title>
+<ellipse fill="none" stroke="black" cx="166" cy="-753" rx="73.3085" ry="26.7407"/>
+<text text-anchor="middle" x="166" y="-756.8" font-family="Times,serif" font-size="14.00">refuse, won&#39;t do</text>
+<text text-anchor="middle" x="166" y="-741.8" font-family="Times,serif" font-size="14.00">mass rebuilds</text>
+</g>
+<!-- does the PR change stdenv?&#45;&gt;refuse, won&#39;t do\nmass rebuilds -->
+<g id="edge2" class="edge"><title>does the PR change stdenv?&#45;&gt;refuse, won&#39;t do\nmass rebuilds</title>
+<path fill="none" stroke="black" d="M210.346,-832.016C203.462,-819.906 193.97,-803.207 185.534,-788.365"/>
+<polygon fill="black" stroke="black" points="188.434,-786.385 180.45,-779.421 182.349,-789.844 188.434,-786.385"/>
+<text text-anchor="middle" x="208.5" y="-802.3" font-family="Times,serif" font-size="14.00">yes</text>
+</g>
+<!-- stdenv already built? -->
+<g id="node4" class="node"><title>stdenv already built?</title>
+<ellipse fill="none" stroke="black" cx="342" cy="-753" rx="84.2388" ry="18"/>
+<text text-anchor="middle" x="342" y="-749.3" font-family="Times,serif" font-size="14.00">stdenv already built?</text>
+</g>
+<!-- does the PR change stdenv?&#45;&gt;stdenv already built? -->
+<g id="edge3" class="edge"><title>does the PR change stdenv?&#45;&gt;stdenv already built?</title>
+<path fill="none" stroke="black" d="M241.53,-832.235C261.304,-816.837 290.776,-793.888 312.691,-776.822"/>
+<polygon fill="black" stroke="black" points="314.945,-779.503 320.685,-770.598 310.644,-773.98 314.945,-779.503"/>
+<text text-anchor="middle" x="291" y="-802.3" font-family="Times,serif" font-size="14.00">no</text>
+</g>
+<!-- Comment:\n(stdenv not available, will retry later.) -->
+<g id="node5" class="node"><title>Comment:\n(stdenv not available, will retry later.)</title>
+<ellipse fill="none" stroke="black" cx="204" cy="-647" rx="158.088" ry="26.7407"/>
+<text text-anchor="middle" x="204" y="-650.8" font-family="Times,serif" font-size="14.00">Comment:</text>
+<text text-anchor="middle" x="204" y="-635.8" font-family="Times,serif" font-size="14.00">(stdenv not available, will retry later.)</text>
+</g>
+<!-- stdenv already built?&#45;&gt;Comment:\n(stdenv not available, will retry later.) -->
+<g id="edge4" class="edge"><title>stdenv already built?&#45;&gt;Comment:\n(stdenv not available, will retry later.)</title>
+<path fill="none" stroke="black" d="M320.133,-735.521C300.195,-720.495 270.199,-697.889 245.687,-679.416"/>
+<polygon fill="black" stroke="black" points="247.7,-676.55 237.607,-673.327 243.487,-682.141 247.7,-676.55"/>
+<text text-anchor="middle" x="288" y="-696.3" font-family="Times,serif" font-size="14.00">no</text>
+</g>
+<!-- proceed with build -->
+<g id="node9" class="node"><title>proceed with build</title>
+<ellipse fill="none" stroke="black" cx="358" cy="-314" rx="77.0154" ry="18"/>
+<text text-anchor="middle" x="358" y="-310.3" font-family="Times,serif" font-size="14.00">proceed with build</text>
+</g>
+<!-- stdenv already built?&#45;&gt;proceed with build -->
+<g id="edge10" class="edge"><title>stdenv already built?&#45;&gt;proceed with build</title>
+<path fill="none" stroke="black" d="M357.614,-735.048C373.874,-715.569 397,-681.905 397,-648 397,-648 397,-648 397,-401 397,-379.145 386.424,-356.73 376.287,-340.265"/>
+<polygon fill="black" stroke="black" points="379.152,-338.252 370.77,-331.772 373.282,-342.065 379.152,-338.252"/>
+<text text-anchor="middle" x="406.5" y="-516.3" font-family="Times,serif" font-size="14.00">yes</text>
+</g>
+<!-- put the build job back to the delayed build queue -->
+<g id="node6" class="node"><title>put the build job back to the delayed build queue</title>
+<ellipse fill="none" stroke="black" cx="184" cy="-564" rx="184.375" ry="18"/>
+<text text-anchor="middle" x="184" y="-560.3" font-family="Times,serif" font-size="14.00">put the build job back to the delayed build queue</text>
+</g>
+<!-- Comment:\n(stdenv not available, will retry later.)&#45;&gt;put the build job back to the delayed build queue -->
+<g id="edge5" class="edge"><title>Comment:\n(stdenv not available, will retry later.)&#45;&gt;put the build job back to the delayed build queue</title>
+<path fill="none" stroke="black" d="M197.562,-619.925C195.362,-611.017 192.898,-601.037 190.68,-592.053"/>
+<polygon fill="black" stroke="black" points="194.015,-590.958 188.219,-582.088 187.219,-592.636 194.015,-590.958"/>
+</g>
+<!-- 5 min later... -->
+<g id="node7" class="node"><title>5 min later...</title>
+<ellipse fill="none" stroke="black" cx="184" cy="-476" rx="57.2927" ry="18"/>
+<text text-anchor="middle" x="184" y="-472.3" font-family="Times,serif" font-size="14.00">5 min later...</text>
+</g>
+<!-- put the build job back to the delayed build queue&#45;&gt;5 min later... -->
+<g id="edge6" class="edge"><title>put the build job back to the delayed build queue&#45;&gt;5 min later...</title>
+<path fill="none" stroke="black" d="M184,-545.597C184,-533.746 184,-517.817 184,-504.292"/>
+<polygon fill="black" stroke="black" points="187.5,-504.084 184,-494.084 180.5,-504.084 187.5,-504.084"/>
+</g>
+<!-- stdenv built yet? -->
+<g id="node8" class="node"><title>stdenv built yet?</title>
+<ellipse fill="none" stroke="black" cx="277" cy="-402" rx="69.3156" ry="18"/>
+<text text-anchor="middle" x="277" y="-398.3" font-family="Times,serif" font-size="14.00">stdenv built yet?</text>
+</g>
+<!-- 5 min later...&#45;&gt;stdenv built yet? -->
+<g id="edge7" class="edge"><title>5 min later...&#45;&gt;stdenv built yet?</title>
+<path fill="none" stroke="black" d="M204.64,-459.021C217.433,-449.116 234.057,-436.246 248.144,-425.341"/>
+<polygon fill="black" stroke="black" points="250.332,-428.073 256.096,-419.183 246.047,-422.538 250.332,-428.073"/>
+</g>
+<!-- stdenv built yet?&#45;&gt;put the build job back to the delayed build queue -->
+<g id="edge8" class="edge"><title>stdenv built yet?&#45;&gt;put the build job back to the delayed build queue</title>
+<path fill="none" stroke="black" d="M274.426,-420.06C271.007,-439.101 263.73,-470.339 250,-494 240.193,-510.899 225.426,-526.864 212.345,-539.173"/>
+<polygon fill="black" stroke="black" points="209.916,-536.651 204.888,-545.977 214.634,-541.822 209.916,-536.651"/>
+<text text-anchor="middle" x="272" y="-472.3" font-family="Times,serif" font-size="14.00">no</text>
+</g>
+<!-- stdenv built yet?&#45;&gt;proceed with build -->
+<g id="edge9" class="edge"><title>stdenv built yet?&#45;&gt;proceed with build</title>
+<path fill="none" stroke="black" d="M292.618,-384.418C304.642,-371.652 321.445,-353.812 335.005,-339.415"/>
+<polygon fill="black" stroke="black" points="337.845,-341.503 342.154,-331.824 332.75,-336.704 337.845,-341.503"/>
+<text text-anchor="middle" x="333.5" y="-354.3" font-family="Times,serif" font-size="14.00">yes</text>
+</g>
+<!-- diff prev commit&#39;s drvs with current commit&#39;s drvs -->
+<g id="node10" class="node"><title>diff prev commit&#39;s drvs with current commit&#39;s drvs</title>
+<ellipse fill="none" stroke="black" cx="358" cy="-240" rx="192.075" ry="18"/>
+<text text-anchor="middle" x="358" y="-236.3" font-family="Times,serif" font-size="14.00">diff prev commit&#39;s drvs with current commit&#39;s drvs</text>
+</g>
+<!-- proceed with build&#45;&gt;diff prev commit&#39;s drvs with current commit&#39;s drvs -->
+<g id="edge11" class="edge"><title>proceed with build&#45;&gt;diff prev commit&#39;s drvs with current commit&#39;s drvs</title>
+<path fill="none" stroke="black" d="M358,-295.937C358,-287.807 358,-277.876 358,-268.705"/>
+<polygon fill="black" stroke="black" points="361.5,-268.441 358,-258.441 354.5,-268.441 361.5,-268.441"/>
+</g>
+<!-- select 10 drvs -->
+<g id="node11" class="node"><title>select 10 drvs</title>
+<ellipse fill="none" stroke="black" cx="358" cy="-166" rx="59.1941" ry="18"/>
+<text text-anchor="middle" x="358" y="-162.3" font-family="Times,serif" font-size="14.00">select 10 drvs</text>
+</g>
+<!-- diff prev commit&#39;s drvs with current commit&#39;s drvs&#45;&gt;select 10 drvs -->
+<g id="edge12" class="edge"><title>diff prev commit&#39;s drvs with current commit&#39;s drvs&#45;&gt;select 10 drvs</title>
+<path fill="none" stroke="black" d="M358,-221.937C358,-213.807 358,-203.876 358,-194.705"/>
+<polygon fill="black" stroke="black" points="361.5,-194.441 358,-184.441 354.5,-194.441 361.5,-194.441"/>
+</g>
+<!-- build them -->
+<g id="node12" class="node"><title>build them</title>
+<ellipse fill="none" stroke="black" cx="358" cy="-92" rx="49.0941" ry="18"/>
+<text text-anchor="middle" x="358" y="-88.3" font-family="Times,serif" font-size="14.00">build them</text>
+</g>
+<!-- select 10 drvs&#45;&gt;build them -->
+<g id="edge13" class="edge"><title>select 10 drvs&#45;&gt;build them</title>
+<path fill="none" stroke="black" d="M358,-147.937C358,-139.807 358,-129.876 358,-120.705"/>
+<polygon fill="black" stroke="black" points="361.5,-120.441 358,-110.441 354.5,-120.441 361.5,-120.441"/>
+</g>
+<!-- comment on github issue with a build report -->
+<g id="node13" class="node"><title>comment on github issue with a build report</title>
+<ellipse fill="none" stroke="black" cx="358" cy="-18" rx="168.478" ry="18"/>
+<text text-anchor="middle" x="358" y="-14.3" font-family="Times,serif" font-size="14.00">comment on github issue with a build report</text>
+</g>
+<!-- build them&#45;&gt;comment on github issue with a build report -->
+<g id="edge14" class="edge"><title>build them&#45;&gt;comment on github issue with a build report</title>
+<path fill="none" stroke="black" d="M358,-73.937C358,-65.8072 358,-55.8761 358,-46.7047"/>
+<polygon fill="black" stroke="black" points="361.5,-46.4406 358,-36.4407 354.5,-46.4407 361.5,-46.4406"/>
+</g>
+</g>
+</svg>
diff --git a/ofborg/example.config.json b/ofborg/example.config.json
new file mode 100644
index 0000000000..0344d169ea
--- /dev/null
+++ b/ofborg/example.config.json
@@ -0,0 +1,24 @@
+{
+ "runner": {
+ "identity": "...",
+ "repos": ["project-tick/Project-Tick"],
+ "trusted_users": []
+ },
+ "checkout": {
+ "root": "/home/user/.tickborg-checkout"
+ },
+ "build": {
+ "system": "x86_64-linux",
+ "build_timeout_seconds": 1800
+ },
+ "rabbitmq": {
+ "ssl": true,
+ "host": "events.tickborg.project-tick.net",
+ "virtualhost": "tickborg",
+ "username": "...",
+ "password": "..."
+ },
+ "feedback": {
+ "full_logs": true
+ }
+}
diff --git a/ofborg/flake.lock b/ofborg/flake.lock
new file mode 100644
index 0000000000..bcd8a472a6
--- /dev/null
+++ b/ofborg/flake.lock
@@ -0,0 +1,27 @@
+{
+ "nodes": {
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1775002709,
+ "narHash": "sha256-d3Yx83vSrN+2z/loBh4mJpyRqr9aAJqlke4TkpFmRJA=",
+ "owner": "nixos",
+ "repo": "nixpkgs",
+ "rev": "bcd464ccd2a1a7cd09aa2f8d4ffba83b761b1d0e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nixos",
+ "ref": "nixos-25.11",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "root": {
+ "inputs": {
+ "nixpkgs": "nixpkgs"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/ofborg/flake.nix b/ofborg/flake.nix
new file mode 100644
index 0000000000..3b1bcf5202
--- /dev/null
+++ b/ofborg/flake.nix
@@ -0,0 +1,137 @@
+{
+ inputs = {
+ nixpkgs.url = "github:nixos/nixpkgs/nixos-25.11";
+ };
+
+ outputs =
+ {
+ self,
+ nixpkgs,
+ ...
+ }@inputs:
+ let
+ supportedSystems = [
+ "aarch64-darwin"
+ "x86_64-darwin"
+ "x86_64-linux"
+ "aarch64-linux"
+ ];
+ forAllSystems = f: nixpkgs.lib.genAttrs supportedSystems (system: f system);
+ in
+ {
+ devShell = forAllSystems (system: inputs.self.devShells.${system}.default);
+ devShells = forAllSystems (
+ system:
+ let
+ pkgs = import nixpkgs {
+ inherit system;
+ };
+ in
+ {
+ default = pkgs.mkShell {
+ name = "tickborg-dev";
+ nativeBuildInputs = with pkgs; [
+ bash
+ rustc
+ cargo
+ clippy
+ rustfmt
+ pkg-config
+ git
+ cmake
+ ];
+ buildInputs =
+ with pkgs;
+ lib.optionals stdenv.isDarwin [
+ darwin.Security
+ libiconv
+ ];
+
+ postHook = ''
+ checkPhase() (
+ cd "${builtins.toString ./.}/ofborg"
+ set -x
+ cargo fmt
+ git diff --exit-code
+ cargofmtexit=$?
+
+ cargo clippy
+ cargoclippyexit=$?
+
+ cargo build && cargo test
+ cargotestexit=$?
+
+ sum=$((cargofmtexit + cargoclippyexit + cargotestexit))
+ exit $sum
+ )
+ '';
+
+ RUSTFLAGS = "-D warnings";
+ RUST_BACKTRACE = "1";
+ RUST_LOG = "tickborg=debug";
+ };
+ }
+ );
+
+ packages = forAllSystems (
+ system:
+ let
+ pkgs = import nixpkgs { inherit system; };
+
+ pkg = pkgs.rustPlatform.buildRustPackage {
+ name = "tickborg";
+ src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
+
+ nativeBuildInputs = with pkgs; [
+ pkg-config
+ pkgs.rustPackages.clippy
+ ];
+
+ preBuild = ''
+ cargo clippy
+ '';
+
+ doCheck = false;
+ checkInputs = with pkgs; [ ];
+
+ cargoLock = {
+ lockFile = ./Cargo.lock;
+ outputHashes = {
+ "hubcaps-0.6.2" = "sha256-Vl4wQIKQVRxkpQxL8fL9rndAN3TKLV4OjgnZOpT6HRo=";
+ "hyperx-1.4.0" = "sha256-MW/KxxMYvj/DYVKrYa7rDKwrH6s8uQOCA0dR2W7GBeg=";
+ };
+ };
+ };
+
+ in
+ {
+ inherit pkg;
+ default = pkg;
+
+ tickborg = pkgs.runCommand "tickborg-rs" { src = pkg; } ''
+ mkdir -p $out/bin
+ for f in $(find $src -type f); do
+ bn=$(basename "$f")
+ ln -s "$f" "$out/bin/$bn"
+
+ # Cargo outputs bins with dashes; create underscore symlinks
+ if echo "$bn" | grep -q "-"; then
+ ln -s "$f" "$out/bin/$(echo "$bn" | tr '-' '_')"
+ fi
+ done
+
+ test -e $out/bin/builder
+ test -e $out/bin/github_comment_filter
+ test -e $out/bin/github_comment_poster
+ test -e $out/bin/github_webhook_receiver
+ test -e $out/bin/log_message_collector
+ test -e $out/bin/evaluation_filter
+ '';
+ }
+ );
+
+ hydraJobs = {
+ buildRs = forAllSystems (system: self.packages.${system}.tickborg);
+ };
+ };
+}
diff --git a/ofborg/ofborg-simple-build/Cargo.toml b/ofborg/ofborg-simple-build/Cargo.toml
new file mode 100644
index 0000000000..65d55f6e1a
--- /dev/null
+++ b/ofborg/ofborg-simple-build/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "tickborg-simple-build"
+version = "0.1.0"
+authors = ["Project Tick Contributors"]
+edition = "2024"
+
+[dependencies]
+tickborg = { path = "../tickborg" }
+
+log = "0.4.25"
diff --git a/ofborg/ofborg/Cargo.toml b/ofborg/ofborg/Cargo.toml
new file mode 100644
index 0000000000..459c06459e
--- /dev/null
+++ b/ofborg/ofborg/Cargo.toml
@@ -0,0 +1,46 @@
+[package]
+name = "tickborg"
+version = "0.1.0"
+authors = ["Project Tick Contributors"]
+build = "build.rs"
+edition = "2024"
+description = "Distributed CI bot for Project Tick monorepo"
+license = "MIT"
+
+[dependencies]
+async-trait = "0.1.89"
+brace-expand = "0.1.0"
+chrono = { version = "0.4.38", default-features = false, features = [
+ "clock",
+ "std",
+] }
+either = "1.13.0"
+fs2 = "0.4.3"
+futures = "0.3.31"
+futures-util = "0.3.31"
+hex = "0.4.3"
+hmac = "0.13.0"
+http = "1"
+http-body-util = "0.1"
+#hubcaps = "0.6"
+# for Conclusion::Skipped which is in master
+hubcaps = { git = "https://github.com/ofborg/hubcaps.git", rev = "0d7466ef941a7a8e160c071e2846e56b90b6ea86" }
+hyper = { version = "1.0", features = ["full", "server", "http1"] }
+hyper-util = { version = "0.1", features = ["server", "tokio", "http1"] }
+lapin = "4.3.0"
+lru-cache = "0.1.2"
+md5 = "0.8.0"
+mime = "0.3"
+nom = "8"
+parking_lot = "0.12.4"
+regex = "1.11.1"
+rustls-pki-types = "1.14"
+serde = { version = "1.0.217", features = ["derive"] }
+serde_json = "1.0.135"
+sha2 = "0.11.0"
+tempfile = "3.15.0"
+tokio = { version = "1", features = ["rt-multi-thread", "net", "macros", "sync"] }
+tokio-stream = "0.1"
+tracing = "0.1.41"
+tracing-subscriber = { version = "0.3.19", features = ["json", "env-filter"] }
+uuid = { version = "1.12", features = ["v4"] }
diff --git a/ofborg/service.nix b/ofborg/service.nix
new file mode 100644
index 0000000000..8290925006
--- /dev/null
+++ b/ofborg/service.nix
@@ -0,0 +1,197 @@
+{ config, lib, pkgs, ... }:
+
+let
+ cfg = config.services.tickborg;
+ tickborg = (import ./flake.nix).packages.${pkgs.system}.tickborg;
+
+ # Her servis icin ortak systemd ayarlari
+ commonServiceConfig = {
+ User = "tickborg";
+ Group = "tickborg";
+ PrivateTmp = true;
+ ProtectSystem = "strict";
+ ProtectHome = true;
+ NoNewPrivileges = true;
+ ReadWritePaths = [
+ "/var/lib/tickborg"
+ "/var/log/tickborg"
+ ];
+ WorkingDirectory = "/var/lib/tickborg";
+ Restart = "always";
+ RestartSec = 10;
+ Environment = [
+ "RUST_LOG=tickborg=info"
+ "RUST_BACKTRACE=1"
+ ];
+ };
+
+ # Her servis icin ortak birim ozellikleri
+ mkTickborgService = name: extraConfig: {
+ enable = cfg.enable;
+ after = [ "network-online.target" "rabbitmq.service" ];
+ wants = [ "network-online.target" ];
+ wantedBy = [ "multi-user.target" ];
+ description = "Tickborg ${name}";
+
+ path = with pkgs; [
+ git
+ bash
+ cmake
+ gnumake
+ gcc
+ pkg-config
+ # Meson icin
+ meson
+ ninja
+ # Autotools icin
+ autoconf
+ automake
+ libtool
+ # Java/Gradle icin
+ jdk17
+ # Cargo icin
+ rustc
+ cargo
+ ];
+
+ serviceConfig = commonServiceConfig // (extraConfig.serviceConfig or {});
+
+ script = ''
+ export HOME=/var/lib/tickborg
+ git config --global user.email "tickborg@project-tick.dev"
+ git config --global user.name "TickBorg"
+ exec ${tickborg}/bin/${extraConfig.binary} ${cfg.configFile}
+ '';
+ };
+
+in {
+ options.services.tickborg = {
+ enable = lib.mkEnableOption "Tickborg CI bot servisleri";
+
+ configFile = lib.mkOption {
+ type = lib.types.path;
+ default = "/etc/tickborg/config.json";
+ description = "Tickborg yapilandirma dosyasi yolu";
+ };
+
+ enableBuilder = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Builder servisini etkinlestir";
+ };
+
+ enableWebhook = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Webhook receiver servisini etkinlestir";
+ };
+
+ enableEvaluationFilter = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Evaluation filter servisini etkinlestir";
+ };
+
+ enableCommentFilter = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Comment filter servisini etkinlestir";
+ };
+
+ enableCommentPoster = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Comment poster servisini etkinlestir";
+ };
+
+ enableMassRebuilder = lib.mkOption {
+ type = lib.types.bool;
+ default = false;
+ description = "Mass rebuilder servisini etkinlestir";
+ };
+
+ enableLogCollector = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Log collector servisini etkinlestir";
+ };
+
+ enableStats = lib.mkOption {
+ type = lib.types.bool;
+ default = true;
+ description = "Stats (Prometheus metrics) servisini etkinlestir";
+ };
+ };
+
+ config = lib.mkIf cfg.enable {
+ users.users.tickborg = {
+ description = "Tickborg CI Bot";
+ home = "/var/lib/tickborg";
+ createHome = true;
+ group = "tickborg";
+ isSystemUser = true;
+ };
+ users.groups.tickborg = {};
+
+ systemd.tmpfiles.rules = [
+ "d /var/lib/tickborg 0750 tickborg tickborg -"
+ "d /var/lib/tickborg/checkout 0750 tickborg tickborg -"
+ "d /var/log/tickborg 0750 tickborg tickborg -"
+ ];
+
+ systemd.services = lib.mkMerge [
+ (lib.mkIf cfg.enableWebhook {
+ "tickborg-webhook-receiver" = mkTickborgService "Webhook Receiver" {
+ binary = "github_webhook_receiver";
+ };
+ })
+
+ (lib.mkIf cfg.enableEvaluationFilter {
+ "tickborg-evaluation-filter" = mkTickborgService "Evaluation Filter" {
+ binary = "evaluation_filter";
+ };
+ })
+
+ (lib.mkIf cfg.enableCommentFilter {
+ "tickborg-comment-filter" = mkTickborgService "Comment Filter" {
+ binary = "github_comment_filter";
+ };
+ })
+
+ (lib.mkIf cfg.enableCommentPoster {
+ "tickborg-comment-poster" = mkTickborgService "Comment Poster" {
+ binary = "github_comment_poster";
+ };
+ })
+
+ (lib.mkIf cfg.enableBuilder {
+ "tickborg-builder" = mkTickborgService "Builder" {
+ binary = "builder";
+ serviceConfig = {
+ # Builder daha fazla kaynak kullanabilir
+ MemoryMax = "8G";
+ CPUQuota = "400%";
+ };
+ };
+ })
+
+ (lib.mkIf cfg.enableMassRebuilder {
+ "tickborg-mass-rebuilder" = mkTickborgService "Mass Rebuilder" {
+ binary = "mass_rebuilder";
+ };
+ })
+
+ (lib.mkIf cfg.enableLogCollector {
+ "tickborg-log-collector" = mkTickborgService "Log Collector" {
+ binary = "log_message_collector";
+ };
+ })
+
+ (lib.mkIf cfg.enableStats {
+ "tickborg-stats" = mkTickborgService "Stats" {
+ binary = "stats";
+ };
+ })
+ ];
+ };
+}
diff --git a/ofborg/shell.nix b/ofborg/shell.nix
new file mode 100644
index 0000000000..0a83ea3110
--- /dev/null
+++ b/ofborg/shell.nix
@@ -0,0 +1,6 @@
+(import
+ (fetchTarball {
+ url = "https://github.com/edolstra/flake-compat/archive/0f9255e01c2351cc7d116c072cb317785dd33b33.tar.gz";
+ sha256 = "0m9grvfsbwmvgwaxvdzv6cmyvjnlww004gfxjvcl806ndqaxzy4j";
+ })
+ { src = ./.; }).shellNix
diff --git a/ofborg/tickborg-simple-build/Cargo.toml b/ofborg/tickborg-simple-build/Cargo.toml
new file mode 100644
index 0000000000..65d55f6e1a
--- /dev/null
+++ b/ofborg/tickborg-simple-build/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "tickborg-simple-build"
+version = "0.1.0"
+authors = ["Project Tick Contributors"]
+edition = "2024"
+
+[dependencies]
+tickborg = { path = "../tickborg" }
+
+log = "0.4.25"
diff --git a/ofborg/tickborg-simple-build/src/main.rs b/ofborg/tickborg-simple-build/src/main.rs
new file mode 100644
index 0000000000..1d88bb1a05
--- /dev/null
+++ b/ofborg/tickborg-simple-build/src/main.rs
@@ -0,0 +1,36 @@
+extern crate log;
+
+use std::env;
+use std::path::Path;
+
+use tickborg::buildtool;
+use tickborg::config;
+
+fn main() {
+ tickborg::setup_log();
+
+ log::info!("Loading config...");
+ let cfg = config::load(env::args().nth(1).unwrap().as_ref());
+ let executor = cfg.build_executor();
+
+ log::info!("Running build...");
+ // Build the first known project as a smoke test
+ if let Some(project) = buildtool::known_projects().first() {
+ match executor.build_project(Path::new("./"), project) {
+ Ok(mut output) => {
+ use std::io::Read;
+ let mut buf = String::new();
+ output.read_to_string(&mut buf).ok();
+ print!("{buf}");
+ }
+ Err(mut output) => {
+ use std::io::Read;
+ let mut buf = String::new();
+ output.read_to_string(&mut buf).ok();
+ eprintln!("Build failed:\n{buf}");
+ }
+ }
+ } else {
+ log::error!("No projects configured");
+ }
+}
diff --git a/ofborg/tickborg/.gitignore b/ofborg/tickborg/.gitignore
new file mode 100644
index 0000000000..a587f156a2
--- /dev/null
+++ b/ofborg/tickborg/.gitignore
@@ -0,0 +1,6 @@
+target
+rust-amqp
+test-scratch
+*.bk
+rust-amq-proto
+private-key.pem
diff --git a/ofborg/tickborg/Cargo.toml b/ofborg/tickborg/Cargo.toml
new file mode 100644
index 0000000000..459c06459e
--- /dev/null
+++ b/ofborg/tickborg/Cargo.toml
@@ -0,0 +1,46 @@
+[package]
+name = "tickborg"
+version = "0.1.0"
+authors = ["Project Tick Contributors"]
+build = "build.rs"
+edition = "2024"
+description = "Distributed CI bot for Project Tick monorepo"
+license = "MIT"
+
+[dependencies]
+async-trait = "0.1.89"
+brace-expand = "0.1.0"
+chrono = { version = "0.4.38", default-features = false, features = [
+ "clock",
+ "std",
+] }
+either = "1.13.0"
+fs2 = "0.4.3"
+futures = "0.3.31"
+futures-util = "0.3.31"
+hex = "0.4.3"
+hmac = "0.13.0"
+http = "1"
+http-body-util = "0.1"
+#hubcaps = "0.6"
+# for Conclusion::Skipped which is in master
+hubcaps = { git = "https://github.com/ofborg/hubcaps.git", rev = "0d7466ef941a7a8e160c071e2846e56b90b6ea86" }
+hyper = { version = "1.0", features = ["full", "server", "http1"] }
+hyper-util = { version = "0.1", features = ["server", "tokio", "http1"] }
+lapin = "4.3.0"
+lru-cache = "0.1.2"
+md5 = "0.8.0"
+mime = "0.3"
+nom = "8"
+parking_lot = "0.12.4"
+regex = "1.11.1"
+rustls-pki-types = "1.14"
+serde = { version = "1.0.217", features = ["derive"] }
+serde_json = "1.0.135"
+sha2 = "0.11.0"
+tempfile = "3.15.0"
+tokio = { version = "1", features = ["rt-multi-thread", "net", "macros", "sync"] }
+tokio-stream = "0.1"
+tracing = "0.1.41"
+tracing-subscriber = { version = "0.3.19", features = ["json", "env-filter"] }
+uuid = { version = "1.12", features = ["v4"] }
diff --git a/ofborg/tickborg/build.rs b/ofborg/tickborg/build.rs
new file mode 100644
index 0000000000..23c19933ed
--- /dev/null
+++ b/ofborg/tickborg/build.rs
@@ -0,0 +1,566 @@
+use std::env;
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+
+enum MetricType {
+ Ticker(Metric),
+ Counter(Metric),
+}
+
+impl MetricType {
+ fn collector_type(&self) -> String {
+ match self {
+ MetricType::Ticker(_) => String::from("u64"),
+ MetricType::Counter(_) => String::from("u64"),
+ }
+ }
+
+ fn enum_matcher_types(&self) -> String {
+ let fields = self.enum_field_types();
+
+ if !fields.is_empty() {
+ format!("{}({})", self.variant(), fields.join(", "))
+ } else {
+ self.variant()
+ }
+ }
+
+ fn variant(&self) -> String {
+ match self {
+ MetricType::Ticker(event) => event.variant.clone(),
+ MetricType::Counter(event) => event.variant.clone(),
+ }
+ }
+
+ fn metric_type(&self) -> String {
+ match self {
+ MetricType::Ticker(_) => String::from("counter"),
+ MetricType::Counter(_) => String::from("counter"),
+ }
+ }
+
+ fn metric_name(&self) -> String {
+ match self {
+ MetricType::Ticker(event) => event.metric_name.clone(),
+ MetricType::Counter(event) => event.metric_name.clone(),
+ }
+ }
+
+ fn description(&self) -> String {
+ match self {
+ MetricType::Ticker(event) => event.description.clone(),
+ MetricType::Counter(event) => event.description.clone(),
+ }
+ }
+
+ fn enum_index_types(&self) -> Vec<String> {
+ let event: &Metric = match self {
+ MetricType::Ticker(i_event) => i_event,
+ MetricType::Counter(i_event) => i_event,
+ };
+
+ let fields: Vec<String> = event
+ .fields
+ .iter()
+ .map(|(_fieldname, fieldtype)| fieldtype.clone())
+ .collect();
+
+ fields
+ }
+
+ fn enum_field_types(&self) -> Vec<String> {
+ let mut extra_fields: Vec<String> = vec![];
+
+ match self {
+ MetricType::Ticker(_) => {}
+ MetricType::Counter(_) => {
+ extra_fields = vec![self.collector_type()];
+ }
+ }
+
+ let mut fields: Vec<String> = self.enum_index_types();
+ fields.append(&mut extra_fields);
+
+ fields
+ }
+
+ fn enum_index_names(&self) -> Vec<String> {
+ let event: &Metric = match self {
+ MetricType::Ticker(i_event) => i_event,
+ MetricType::Counter(i_event) => i_event,
+ };
+
+ let fields: Vec<String> = event
+ .fields
+ .iter()
+ .map(|(fieldname, _fieldtype)| fieldname.clone())
+ .collect();
+
+ fields
+ }
+
+ fn enum_field_names(&self) -> Vec<String> {
+ let mut extra_fields: Vec<String> = vec![];
+
+ match self {
+ MetricType::Ticker(_) => {}
+ MetricType::Counter(_) => {
+ extra_fields = vec!["value".to_owned()];
+ }
+ }
+
+ let mut fields: Vec<String> = self.enum_index_names();
+ fields.append(&mut extra_fields);
+
+ fields
+ }
+
+ fn record_value(&self) -> String {
+ match self {
+ MetricType::Ticker(_) => String::from("1"),
+ MetricType::Counter(_) => String::from("value"),
+ }
+ }
+}
+
+struct Metric {
+ variant: String,
+ fields: Vec<(String, String)>, // Vec because it is sorted
+ metric_name: String,
+ description: String,
+}
+
+fn name_to_parts(name: &str) -> Vec<String> {
+ let mut parts: Vec<String> = vec![];
+ let mut buf = String::from("");
+ for c in name.chars() {
+ if char::is_uppercase(c) && !buf.is_empty() {
+ parts.push(buf.to_owned());
+ buf = String::from("");
+ }
+ buf.push(c);
+ }
+ if !buf.is_empty() {
+ parts.push(buf.to_owned());
+ std::mem::drop(buf);
+ }
+
+ parts
+}
+
+impl Metric {
+ pub fn ticker(name: &str, desc: &str, fields: Option<Vec<(&str, &str)>>) -> MetricType {
+ let parts = name_to_parts(name);
+
+ MetricType::Ticker(Metric {
+ variant: parts.iter().cloned().collect(),
+ fields: fields
+ .unwrap_or_default()
+ .iter()
+ .map(|(fieldname, fieldtype)| ((*fieldname).to_string(), (*fieldtype).to_string()))
+ .collect(),
+ metric_name: parts.join("_").to_lowercase(),
+ description: desc.to_owned(),
+ })
+ }
+
+ pub fn counter(name: &str, desc: &str, fields: Option<Vec<(&str, &str)>>) -> MetricType {
+ let parts = name_to_parts(name);
+
+ MetricType::Counter(Metric {
+ variant: parts.iter().cloned().collect(),
+ fields: fields
+ .unwrap_or_default()
+ .iter()
+ .map(|(fieldname, fieldtype)| ((*fieldname).to_string(), (*fieldtype).to_string()))
+ .collect(),
+ metric_name: parts.join("_").to_lowercase(),
+ description: desc.to_owned(),
+ })
+ }
+}
+
+fn events() -> Vec<MetricType> {
+ vec![
+ Metric::ticker(
+ "StatCollectorLegacyEvent",
+ "Number of received legacy events",
+ Some(vec![("event", "String")]),
+ ),
+ Metric::ticker(
+ "StatCollectorBogusEvent",
+ "Number of received unparseable events",
+ None,
+ ),
+ Metric::ticker("JobReceived", "Number of received worker jobs", None),
+ Metric::counter(
+ "EvaluationDuration",
+ "Amount of time spent running evaluations",
+ Some(vec![("branch", "String")]),
+ ),
+ Metric::ticker(
+ "EvaluationDurationCount",
+ "Number of timed evaluations performed",
+ Some(vec![("branch", "String")]),
+ ),
+ Metric::ticker(
+ "TargetBranchFailsEvaluation",
+ "Number of PR evaluations which failed because the target branch failed",
+ Some(vec![("branch", "String")]),
+ ),
+ Metric::ticker(
+ "JobDecodeSuccess",
+ "Number of successfully decoded jobs",
+ None,
+ ),
+ Metric::ticker(
+ "JobDecodeFailure",
+ "Number of jobs which failed to parse",
+ None,
+ ),
+ Metric::ticker(
+ "IssueAlreadyClosed",
+ "Number of jobs for issues which are already closed",
+ None,
+ ),
+ Metric::ticker(
+ "IssueFetchFailed",
+ "Number of failed fetches for GitHub issues",
+ None,
+ ),
+ Metric::ticker(
+ "TaskEvaluationCheckComplete",
+ "Number of completed evaluation tasks",
+ None,
+ ),
+ /*
+ Metric::counter(
+ "TimeElapsed",
+ "",
+ None
+ ),
+ Metric::counter(
+ "EnvironmentsAllocatedCount",
+ "",
+ None
+ ),
+ Metric::counter(
+ "EnvironmentsAllocatedBytes",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ListElementsCount",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ListElementsBytes",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ListConcatenations",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ValuesAllocatedCount",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ValuesAllocatedBytes",
+ "",
+ None
+ ),
+ Metric::counter(
+ "SetsAllocatedCount",
+ "",
+ None
+ ),
+ Metric::counter(
+ "SetsAllocatedBytes",
+ "",
+ None
+ ),
+ Metric::counter(
+ "RightBiasedUnions",
+ "",
+ None
+ ),
+ Metric::counter(
+ "ValuesCopiedInRightBiasedUnions",
+ "",
+ None
+ ),
+ Metric::counter(
+ "SymbolsInSymbolTable",
+ "",
+ None
+ ),
+ Metric::counter(
+ "SizeOfSymbolTable",
+ "",
+ None
+ ),
+ Metric::counter(
+ "NumberOfThunks",
+ "",
+ None
+ ),
+ Metric::counter(
+ "NumberOfThunksAvoided",
+ "",
+ None
+ ),
+ Metric::counter(
+ "NumberOfAttrLookups",
+ "",
+ None
+ ),
+ Metric::counter(
+ "NumberOfPrimopCalls",
+ "",
+ None
+ ),
+ Metric::counter(
+ "NumberOfFunctionCalls",
+ "",
+ None
+ ),
+ Metric::counter(
+ "TotalAllocations",
+ "",
+ None
+ ),
+ Metric::counter(
+ "CurrentBoehmHeapSizeBytes",
+ "",
+ None
+ ),
+ Metric::counter(
+ "TotalBoehmHeapAllocationsBytes",
+ "",
+ None
+ ),
+ */
+ ]
+}
+
+fn main() {
+ let out_dir = env::var("OUT_DIR").unwrap();
+ let dest_path = Path::new(&out_dir).join("events.rs");
+ let mut f = File::create(dest_path).unwrap();
+
+ println!("cargo:rerun-if-changed=build.rs");
+
+ // Write the Event enum, which contains all possible event types
+ f.write_all(
+ b"
+use std::collections::HashMap;
+use std::sync::Arc;
+use std::sync::Mutex;
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+#[serde(rename_all=\"kebab-case\")]
+pub enum Event {
+",
+ )
+ .unwrap();
+
+ let variants: Vec<String> = events()
+ .iter()
+ .map(|mtype| format!(" {}", mtype.enum_matcher_types()))
+ .collect();
+
+ f.write_all(variants.join(",\n").as_bytes()).unwrap();
+ f.write_all(b"\n}\n\n").unwrap();
+
+ f.write_all(
+ b"pub fn event_metric_name(event: &Event) -> String {
+ match *event {
+",
+ )
+ .unwrap();
+
+ let variants: Vec<String> = events()
+ .iter()
+ .map(|mtype| {
+ let fields: Vec<String> = mtype
+ .enum_field_names()
+ .iter()
+ .map(|_| String::from("_"))
+ .collect();
+
+ let variant_match = if !fields.is_empty() {
+ format!("{}({})", &mtype.variant(), fields.join(", "))
+ } else {
+ mtype.variant()
+ };
+
+ format!(
+ " Event::{} => String::from(\"{}\")",
+ &variant_match,
+ &mtype.metric_name(),
+ )
+ })
+ .collect();
+
+ f.write_all(variants.join(",\n").as_bytes()).unwrap();
+ f.write_all(b"}\n }").unwrap();
+
+ // Create a struct to hold all the possible metrics
+ f.write_all(
+ b"
+#[derive(Default, Clone)]
+pub struct MetricCollector {
+",
+ )
+ .unwrap();
+
+ let variants: Vec<String> = events()
+ .iter()
+ .map(|mtype| {
+ let mut fields: Vec<String> = mtype.enum_index_types();
+ fields.push("String".to_owned()); // Instance
+ let fields_str = {
+ let s = fields.join(", ");
+ if fields.len() > 1 {
+ format!("({s})")
+ } else {
+ s
+ }
+ };
+
+ format!(
+ " {}: Arc<Mutex<HashMap<{},{}>>>",
+ mtype.metric_name(),
+ fields_str,
+ mtype.collector_type(),
+ )
+ })
+ .collect();
+
+ f.write_all(variants.join(",\n").as_bytes()).unwrap();
+ f.write_all(b"\n}\n\n").unwrap();
+
+ // Create a struct to hold all the possible metrics
+ f.write_all(
+ b"
+
+impl MetricCollector {
+ pub fn new() -> MetricCollector {
+ Default::default()
+ }
+
+ pub fn record(&self, instance: String, event: Event) {
+ match event {
+",
+ )
+ .unwrap();
+
+ let variants: Vec<String> = events()
+ .iter()
+ .map(|mtype| {
+ let fields: Vec<String> = mtype.enum_field_names();
+
+ let variant_match = if !fields.is_empty() {
+ format!("{}({})", &mtype.variant(), fields.join(", "))
+ } else {
+ mtype.variant()
+ };
+
+ let mut index_names: Vec<String> = mtype.enum_index_names();
+ index_names.push("instance".to_owned());
+
+ let mut index_fields = index_names.join(", ");
+ if index_names.len() > 1 {
+ index_fields = format!("({index_fields})");
+ }
+
+ format!(
+ "
+ Event::{} => {{
+ let mut accum_table = self.{}
+ .lock()
+ .expect(\"Failed to unwrap metric mutex for {}\");
+ let accum = accum_table
+ .entry({})
+ .or_insert(0);
+ *accum += {};
+ }}
+ ",
+ variant_match,
+ &mtype.metric_name(),
+ &mtype.metric_name(),
+ index_fields,
+ &mtype.record_value(),
+ )
+ })
+ .collect();
+
+ f.write_all(variants.join(",\n").as_bytes()).unwrap();
+ f.write_all(b"\n }\n").unwrap();
+ f.write_all(b"\n }\n").unwrap();
+
+ f.write_all(
+ b"pub fn prometheus_output(&self) -> String {
+ let mut output = String::new();
+",
+ )
+ .unwrap();
+
+ let variants: Vec<String> = events()
+ .iter()
+ .map(|mtype| {
+ let mut index_fields: Vec<String> = mtype.enum_index_names();
+ index_fields.push("instance".to_owned());
+ let ref_index_fields: Vec<String> = index_fields.clone();
+
+ let for_matcher = if index_fields.len() > 1 {
+ format!("({})", ref_index_fields.join(", "))
+ } else {
+ ref_index_fields.join(", ")
+ };
+
+ let key_value_pairs: Vec<String> = index_fields
+ .iter()
+ .map(|name| format!(" format!(\"{name}=\\\"{{}}\\\"\", {name})",))
+ .collect();
+ format!(
+ "
+ output.push_str(\"# HELP tickborg_{} {}\n\");
+ output.push_str(\"# TYPE tickborg_{} {}\n\");
+
+ let table = self.{}.lock()
+ .expect(\"Failed to unwrap metric mutex for {}\");
+ let values: Vec<String> = (*table)
+ .iter()
+ .map(|({}, value)| {{
+ let kvs: Vec<String> = vec![
+{}
+ ];
+ format!(\"tickborg_{}{{{{{{}}}}}} {{}}\", kvs.join(\",\"), value)
+ }})
+ .collect();
+ output.push_str(&values.join(\"\n\"));
+ output.push('\\n');
+ ",
+ &mtype.metric_name(),
+ &mtype.description(),
+ &mtype.metric_name(),
+ &mtype.metric_type(),
+ &mtype.metric_name(),
+ &mtype.metric_name(),
+ for_matcher,
+ &key_value_pairs.join(",\n"),
+ &mtype.metric_name(),
+ )
+ })
+ .collect();
+
+ f.write_all(variants.join("\n").as_bytes()).unwrap();
+ f.write_all(b"output\n }").unwrap();
+ f.write_all(b"\n}").unwrap();
+}
diff --git a/ofborg/tickborg/src/acl.rs b/ofborg/tickborg/src/acl.rs
new file mode 100644
index 0000000000..2059b3e08f
--- /dev/null
+++ b/ofborg/tickborg/src/acl.rs
@@ -0,0 +1,59 @@
+use crate::systems::System;
+
+pub struct Acl {
+ trusted_users: Option<Vec<String>>,
+ repos: Vec<String>,
+}
+
+impl Acl {
+ pub fn new(repos: Vec<String>, mut trusted_users: Option<Vec<String>>) -> Acl {
+ if let Some(ref mut users) = trusted_users {
+ for user in users.iter_mut() {
+ *user = user.to_lowercase();
+ }
+ }
+
+ Acl {
+ trusted_users,
+ repos,
+ }
+ }
+
+ pub fn is_repo_eligible(&self, name: &str) -> bool {
+ self.repos.contains(&name.to_lowercase())
+ }
+
+ pub fn build_job_architectures_for_user_repo(&self, user: &str, repo: &str) -> Vec<System> {
+ if self.can_build_unrestricted(user, repo) {
+ System::all_known_systems()
+ } else {
+ // Non-trusted users can only build on primary platforms
+ System::primary_systems()
+ }
+ }
+
+ pub fn build_job_destinations_for_user_repo(
+ &self,
+ user: &str,
+ repo: &str,
+ ) -> Vec<(Option<String>, Option<String>)> {
+ self.build_job_architectures_for_user_repo(user, repo)
+ .iter()
+ .map(|system| system.as_build_destination())
+ .collect()
+ }
+
+ pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
+ if let Some(ref users) = self.trusted_users {
+ if repo.to_lowercase().starts_with("project-tick/") {
+ users.contains(&user.to_lowercase())
+ } else {
+ false
+ }
+ } else {
+ // If trusted_users is disabled (and thus None), everybody can build
+ // unrestricted
+ true
+ }
+ }
+}
diff --git a/ofborg/tickborg/src/asynccmd.rs b/ofborg/tickborg/src/asynccmd.rs
new file mode 100644
index 0000000000..52cd20da8b
--- /dev/null
+++ b/ofborg/tickborg/src/asynccmd.rs
@@ -0,0 +1,293 @@
+use std::collections::HashMap;
+use std::io::{self, BufRead, BufReader, Read};
+use std::process::{Child, Command, ExitStatus, Stdio};
+use std::sync::mpsc::{self, Receiver, SyncSender, sync_channel};
+use std::thread::{self, JoinHandle};
+
+use tracing::{debug, error, info};
+
+// Specifically set to fall under 1/2 of the AMQP library's
+// SyncSender limitation.
+const OUT_CHANNEL_BUFFER_SIZE: usize = 30;
+
+// The waiter channel should never be over 3 items: process, stderr,
+// stdout, and thusly probably could be unbounded just fine, but what
+// the heck.
+const WAITER_CHANNEL_BUFFER_SIZE: usize = 10;
+
+pub struct AsyncCmd {
+ command: Command,
+}
+
+pub struct SpawnedAsyncCmd {
+ waiter: JoinHandle<Option<Result<ExitStatus, io::Error>>>,
+ rx: Receiver<String>,
+}
+
+#[derive(Debug, Hash, PartialEq, Eq)]
+enum WaitTarget {
+ Stderr,
+ Stdout,
+ Child,
+}
+
+#[derive(Debug)]
+enum WaitResult<T> {
+ Thread(thread::Result<T>),
+ Process(Result<ExitStatus, io::Error>),
+}
+
+fn reader_tx<R: 'static + Read + Send>(read: R, tx: SyncSender<String>) -> thread::JoinHandle<()> {
+ let read = BufReader::new(read);
+
+ thread::spawn(move || {
+ for line in read.lines() {
+ let to_send: String = match line {
+ Ok(line) => line,
+ Err(e) => {
+ error!("Error reading data in reader_tx: {:?}", e);
+ "Non-UTF8 data omitted from the log.".to_owned()
+ }
+ };
+
+ if let Err(e) = tx.send(to_send) {
+ error!("Failed to send log line: {:?}", e);
+ }
+ }
+ })
+}
+
+fn spawn_join<T: Send + 'static>(
+ id: WaitTarget,
+ tx: SyncSender<(WaitTarget, WaitResult<T>)>,
+ waiting_on: thread::JoinHandle<T>,
+) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ if let Err(e) = tx.send((id, WaitResult::Thread(waiting_on.join()))) {
+ error!("Failed to send message to the thread waiter: {:?}", e);
+ }
+ })
+}
+
+fn child_wait<T: Send + 'static>(
+ id: WaitTarget,
+ tx: SyncSender<(WaitTarget, WaitResult<T>)>,
+ mut waiting_on: Child,
+) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ if let Err(e) = tx.send((id, WaitResult::Process(waiting_on.wait()))) {
+ error!("Failed to send message to the thread waiter: {:?}", e);
+ }
+ })
+}
+
+impl AsyncCmd {
+ pub fn new(cmd: Command) -> AsyncCmd {
+ AsyncCmd { command: cmd }
+ }
+
+ pub fn spawn(mut self) -> SpawnedAsyncCmd {
+ let mut child = self
+ .command
+ .stdin(Stdio::null())
+ .stderr(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()
+ .unwrap();
+
+ let (monitor_tx, monitor_rx) = sync_channel(WAITER_CHANNEL_BUFFER_SIZE);
+ let (proc_tx, proc_rx) = sync_channel(OUT_CHANNEL_BUFFER_SIZE);
+
+ let mut waiters: HashMap<WaitTarget, thread::JoinHandle<()>> = HashMap::with_capacity(3);
+ waiters.insert(
+ WaitTarget::Stderr,
+ spawn_join(
+ WaitTarget::Stderr,
+ monitor_tx.clone(),
+ reader_tx(child.stderr.take().unwrap(), proc_tx.clone()),
+ ),
+ );
+
+ waiters.insert(
+ WaitTarget::Stdout,
+ spawn_join(
+ WaitTarget::Stdout,
+ monitor_tx.clone(),
+ reader_tx(child.stdout.take().unwrap(), proc_tx),
+ ),
+ );
+
+ waiters.insert(
+ WaitTarget::Child,
+ child_wait(WaitTarget::Child, monitor_tx, child),
+ );
+
+ let head_waiter = thread::spawn(move || block_on_waiters(monitor_rx, waiters));
+
+ SpawnedAsyncCmd {
+ waiter: head_waiter,
+ rx: proc_rx,
+ }
+ }
+}
+
+impl SpawnedAsyncCmd {
+ pub fn lines(&mut self) -> mpsc::Iter<'_, String> {
+ self.rx.iter()
+ }
+
+ pub fn get_next_line(&mut self) -> Result<String, mpsc::RecvError> {
+ self.rx.recv()
+ }
+
+ pub fn wait(self) -> Result<ExitStatus, io::Error> {
+ self.waiter
+ .join()
+ .map_err(|_err| io::Error::other("Couldn't join thread."))
+ .and_then(|opt| {
+ opt.ok_or_else(|| io::Error::other("Thread didn't return an exit status."))
+ })
+ .and_then(|res| res)
+ }
+}
+
+// FIXME: remove with rust/cargo update
+#[allow(clippy::cognitive_complexity)]
+fn block_on_waiters(
+ monitor_rx: mpsc::Receiver<(WaitTarget, WaitResult<()>)>,
+ mut waiters: HashMap<WaitTarget, thread::JoinHandle<()>>,
+) -> Option<Result<ExitStatus, io::Error>> {
+ let mut status = None;
+
+ for (id, interior_result) in monitor_rx.iter() {
+ match waiters.remove(&id) {
+ Some(handle) => {
+ info!("Received notice that {:?} finished", id);
+ let waiter_result = handle.join();
+
+ info!("waiter status: {:?}", waiter_result);
+ info!("interior status: {:?}", interior_result);
+
+ match interior_result {
+ WaitResult::Thread(t) => {
+ debug!("thread result: {:?}", t);
+ }
+ WaitResult::Process(t) => {
+ status = Some(t);
+ }
+ }
+ }
+ None => {
+ error!(
+ "Received notice that {:?} finished, but it isn't being waited on?",
+ id
+ );
+ }
+ }
+
+ if waiters.is_empty() {
+ debug!("Closing up the waiter receiver thread, no more waiters.");
+ break;
+ }
+ }
+
+ info!(
+ "Out of the child waiter recv, with {:?} remaining waits",
+ waiters.len()
+ );
+
+ status
+}
+
+#[cfg(test)]
+mod tests {
+ use super::AsyncCmd;
+ use std::ffi::{OsStr, OsString};
+ use std::os::unix::ffi::OsStrExt;
+ use std::process::Command;
+
+ #[test]
+ fn basic_echo_test() {
+ let mut cmd = Command::new("/bin/sh");
+ cmd.arg("-c");
+ cmd.arg("echo hi");
+ let acmd = AsyncCmd::new(cmd);
+
+ let mut spawned = acmd.spawn();
+ let lines: Vec<String> = spawned.lines().collect();
+ assert_eq!(lines, vec!["hi"]);
+ let exit_status = spawned.wait().unwrap();
+ assert!(exit_status.success());
+ }
+
+ #[test]
+ fn basic_interpolation_test() {
+ let mut cmd = Command::new("stdbuf");
+ cmd.arg("-o0");
+ cmd.arg("-e0");
+ cmd.arg("bash");
+ cmd.arg("-c");
+
+ // The sleep 0's are to introduce delay between output to help
+ // make it more predictably received in the right order
+ cmd.arg("echo stdout; sleep 0.1; echo stderr >&2; sleep 0.1; echo stdout2; sleep 0.1; echo stderr2 >&2");
+ let acmd = AsyncCmd::new(cmd);
+
+ let mut spawned = acmd.spawn();
+ let lines: Vec<String> = spawned.lines().collect();
+ assert_eq!(lines, vec!["stdout", "stderr", "stdout2", "stderr2"]);
+ let exit_status = spawned.wait().unwrap();
+ assert!(exit_status.success());
+ }
+
+ #[test]
+ fn lots_of_small_ios_test() {
+ let mut cmd = Command::new("/bin/sh");
+ cmd.arg("-c");
+ cmd.arg("for i in `seq 1 100`; do (seq 1 100)& (seq 1 100 >&2)& wait; wait; done");
+ let acmd = AsyncCmd::new(cmd);
+
+ let mut spawned = acmd.spawn();
+ let lines: Vec<String> = spawned.lines().collect();
+ assert_eq!(lines.len(), 20000);
+ let thread_result = spawned.wait();
+ let exit_status = thread_result.expect("Thread should exit correctly");
+ assert!(exit_status.success());
+ }
+
+ #[test]
+ fn lots_of_io_test() {
+ let mut cmd = Command::new("/bin/sh");
+ cmd.arg("-c");
+ cmd.arg("seq 1 100000; seq 1 100000 >&2");
+ let acmd = AsyncCmd::new(cmd);
+
+ let mut spawned = acmd.spawn();
+ let lines: Vec<String> = spawned.lines().collect();
+ assert_eq!(lines.len(), 200000);
+ let thread_result = spawned.wait();
+ let exit_status = thread_result.expect("Thread should exit correctly");
+ assert!(exit_status.success());
+ }
+
+ #[test]
+ fn bad_utf8_test() {
+ let mut echos = OsString::from("echo hi; echo ");
+ echos.push(OsStr::from_bytes(&[0xffu8]));
+ echos.push("; echo there;");
+
+ let mut cmd = Command::new("/bin/sh");
+ cmd.arg("-c");
+ cmd.arg(echos);
+ let acmd = AsyncCmd::new(cmd);
+
+ let mut spawned = acmd.spawn();
+ let lines: Vec<String> = spawned.lines().collect();
+ assert_eq!(
+ lines,
+ vec!["hi", "Non-UTF8 data omitted from the log.", "there"]
+ );
+ let exit_status = spawned.wait().unwrap();
+ assert!(exit_status.success());
+ }
+}
diff --git a/ofborg/tickborg/src/bin/build-faker.rs b/ofborg/tickborg/src/bin/build-faker.rs
new file mode 100644
index 0000000000..df8fcbfa50
--- /dev/null
+++ b/ofborg/tickborg/src/bin/build-faker.rs
@@ -0,0 +1,62 @@
+use lapin::message::Delivery;
+use std::env;
+use std::error::Error;
+
+use tickborg::commentparser;
+use tickborg::config;
+use tickborg::easylapin;
+use tickborg::message::{Pr, Repo, buildjob};
+use tickborg::notifyworker::NotificationReceiver;
+use tickborg::worker;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args().nth(1).expect("usage: build-faker <config>");
+ let cfg = config::load(arg.as_ref());
+
+ let conn = easylapin::from_config(&cfg.builder.unwrap().rabbitmq).await?;
+ let chan = conn.create_channel().await?;
+
+ let repo_msg = Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ };
+
+ let pr_msg = Pr {
+ number: 42,
+ head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
+ target_branch: Some("scratch".to_owned()),
+ };
+
+ let logbackrk = "project-tick/Project-Tick.42".to_owned();
+
+ let msg = buildjob::BuildJob {
+ repo: repo_msg,
+ pr: pr_msg,
+ subset: Some(commentparser::Subset::Project),
+ attrs: vec!["success".to_owned()],
+ logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
+ statusreport: Some((None, Some("scratch".to_owned()))),
+ request_id: "bogus-request-id".to_owned(),
+ };
+
+ {
+ let deliver = Delivery::mock(0, "no-exchange".into(), "".into(), false, vec![]);
+ let recv = easylapin::ChannelNotificationReceiver::new(chan.clone(), deliver);
+
+ for _i in 1..2 {
+ recv.tell(worker::publish_serde_action(
+ None,
+ Some("build-inputs-x86_64-darwin".to_owned()),
+ &msg,
+ ))
+ .await;
+ }
+ }
+
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/builder.rs b/ofborg/tickborg/src/bin/builder.rs
new file mode 100644
index 0000000000..5930ae5696
--- /dev/null
+++ b/ofborg/tickborg/src/bin/builder.rs
@@ -0,0 +1,116 @@
+use std::env;
+use std::error::Error;
+use std::future::Future;
+use std::path::Path;
+use std::pin::Pin;
+
+use futures_util::future;
+use tracing::{error, info, warn};
+
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::{checkout, config, tasks};
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(builder_cfg) = config::load(arg.as_ref()).builder else {
+ error!("No builder configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&builder_cfg.rabbitmq).await?;
+ let mut handles: Vec<Pin<Box<dyn Future<Output = ()> + Send>>> = Vec::new();
+
+ for system in &cfg.build.system {
+ handles.push(self::create_handle(&conn, &cfg, system.to_string()).await?);
+ }
+
+ future::join_all(handles).await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
+
+#[allow(clippy::type_complexity)]
+async fn create_handle(
+ conn: &lapin::Connection,
+ cfg: &config::Config,
+ system: String,
+) -> Result<Pin<Box<dyn Future<Output = ()> + Send>>, Box<dyn Error>> {
+ let mut chan = conn.create_channel().await?;
+
+ let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root));
+ let build_executor = cfg.build_executor();
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "build-jobs".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Fanout,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ let queue_name = if cfg.runner.build_all_jobs != Some(true) {
+ let queue_name = format!("build-inputs-{system}");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+ queue_name
+ } else {
+ warn!("Building all jobs, please don't use this unless you're");
+ warn!("developing and have Graham's permission!");
+ let queue_name = "".to_owned();
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: false,
+ exclusive: true,
+ auto_delete: true,
+ no_wait: false,
+ })
+ .await?;
+ queue_name
+ };
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "build-jobs".to_owned(),
+ routing_key: None,
+ no_wait: false,
+ })
+ .await?;
+
+ let handle = easylapin::NotifyChannel(chan)
+ .consume(
+ tasks::build::BuildWorker::new(cloner, build_executor, system, cfg.runner.identity.clone()),
+ easyamqp::ConsumeConfig {
+ queue: queue_name.clone(),
+ consumer_tag: format!("{}-builder", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ info!("Fetching jobs from {}", &queue_name);
+ Ok(handle)
+}
diff --git a/ofborg/tickborg/src/bin/evaluation-filter.rs b/ofborg/tickborg/src/bin/evaluation-filter.rs
new file mode 100644
index 0000000000..f6fba8b63e
--- /dev/null
+++ b/ofborg/tickborg/src/bin/evaluation-filter.rs
@@ -0,0 +1,88 @@
+use std::env;
+use std::error::Error;
+
+use tracing::{error, info};
+
+use tickborg::config;
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::tasks;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(filter_cfg) = config::load(arg.as_ref()).evaluation_filter else {
+ error!("No evaluation filter configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&filter_cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "github-events".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Topic,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: "mass-rebuild-check-jobs".to_owned(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ let queue_name = String::from("mass-rebuild-check-inputs");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "github-events".to_owned(),
+ routing_key: Some("pull_request.project-tick/*".to_owned()),
+ no_wait: false,
+ })
+ .await?;
+
+ let handle = easylapin::WorkerChannel(chan)
+ .consume(
+ tasks::evaluationfilter::EvaluationFilterWorker::new(cfg.acl()),
+ easyamqp::ConsumeConfig {
+ queue: queue_name.clone(),
+ consumer_tag: format!("{}-evaluation-filter", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ info!("Fetching jobs from {}", &queue_name);
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/github-comment-filter.rs b/ofborg/tickborg/src/bin/github-comment-filter.rs
new file mode 100644
index 0000000000..5240fba8cb
--- /dev/null
+++ b/ofborg/tickborg/src/bin/github-comment-filter.rs
@@ -0,0 +1,114 @@
+use std::env;
+use std::error::Error;
+
+use tickborg::systems::System;
+use tracing::{error, info};
+
+use tickborg::config;
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::tasks;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(filter_cfg) = config::load(arg.as_ref()).github_comment_filter else {
+ error!("No comment filter configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&filter_cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "github-events".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Topic,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "build-jobs".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Fanout,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ let queue_name = "build-inputs";
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.to_owned(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: "build-inputs".to_owned(),
+ exchange: "github-events".to_owned(),
+ routing_key: Some("issue_comment.*".to_owned()),
+ no_wait: false,
+ })
+ .await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "build-results".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Fanout,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ // Create build job queues
+ for sys in System::all_known_systems().iter().map(System::to_string) {
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: format!("build-inputs-{sys}"),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+ }
+
+ let handle = easylapin::WorkerChannel(chan)
+ .consume(
+ tasks::githubcommentfilter::GitHubCommentWorker::new(cfg.acl(), cfg.github()),
+ easyamqp::ConsumeConfig {
+ queue: "build-inputs".to_owned(),
+ consumer_tag: format!("{}-github-comment-filter", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ info!("Fetching jobs from {}", &queue_name);
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/github-comment-poster.rs b/ofborg/tickborg/src/bin/github-comment-poster.rs
new file mode 100644
index 0000000000..5c45d8f546
--- /dev/null
+++ b/ofborg/tickborg/src/bin/github-comment-poster.rs
@@ -0,0 +1,76 @@
+use std::env;
+use std::error::Error;
+
+use tracing::{error, info};
+
+use tickborg::config;
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::tasks;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(poster_cfg) = config::load(arg.as_ref()).github_comment_poster else {
+ error!("No comment poster configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&poster_cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "build-results".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Fanout,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: "build-results".to_owned(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: "build-results".to_owned(),
+ exchange: "build-results".to_owned(),
+ routing_key: None,
+ no_wait: false,
+ })
+ .await?;
+
+ let handle = easylapin::WorkerChannel(chan)
+ .consume(
+ tasks::githubcommentposter::GitHubCommentPoster::new(cfg.github_app_vendingmachine()),
+ easyamqp::ConsumeConfig {
+ queue: "build-results".to_owned(),
+ consumer_tag: format!("{}-github-comment-poster", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/github-webhook-receiver.rs b/ofborg/tickborg/src/bin/github-webhook-receiver.rs
new file mode 100644
index 0000000000..910cd4b350
--- /dev/null
+++ b/ofborg/tickborg/src/bin/github-webhook-receiver.rs
@@ -0,0 +1,278 @@
+use std::env;
+use std::error::Error;
+use std::net::SocketAddr;
+use std::sync::Arc;
+
+use hmac::{Hmac, KeyInit as _, Mac};
+use http::{Method, StatusCode};
+use http_body_util::{BodyExt, Full};
+use hyper::body::{Bytes, Incoming};
+use hyper::server::conn::http1;
+use hyper::service::service_fn;
+use hyper::{Request, Response};
+use hyper_util::rt::TokioIo;
+use lapin::options::BasicPublishOptions;
+use lapin::{BasicProperties, Channel};
+use tickborg::ghevent::GenericWebhook;
+use tickborg::{config, easyamqp, easyamqp::ChannelExt, easylapin};
+use sha2::Sha256;
+use tokio::net::TcpListener;
+use tokio::sync::Mutex;
+use tracing::{error, info, warn};
+
+/// Prepares the the exchange we will write to, the queues that are bound to it
+/// and binds them.
+async fn setup_amqp(chan: &mut Channel) -> Result<(), Box<dyn Error + Send + Sync>> {
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "github-events".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Topic,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ let queue_name = String::from("build-inputs");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "github-events".to_owned(),
+ routing_key: Some(String::from("issue_comment.*")),
+ no_wait: false,
+ })
+ .await?;
+
+ let queue_name = String::from("github-events-unknown");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "github-events".to_owned(),
+ routing_key: Some(String::from("unknown.*")),
+ no_wait: false,
+ })
+ .await?;
+
+ let queue_name = String::from("mass-rebuild-check-inputs");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "github-events".to_owned(),
+ routing_key: Some(String::from("pull_request.*")),
+ no_wait: false,
+ })
+ .await?;
+ Ok(())
+}
+
+fn response(status: StatusCode, body: &'static str) -> Response<Full<Bytes>> {
+ Response::builder()
+ .status(status)
+ .body(Full::new(Bytes::from(body)))
+ .unwrap()
+}
+
+fn empty_response(status: StatusCode) -> Response<Full<Bytes>> {
+ Response::builder()
+ .status(status)
+ .body(Full::new(Bytes::new()))
+ .unwrap()
+}
+
+async fn handle_request(
+ req: Request<Incoming>,
+ webhook_secret: Arc<String>,
+ chan: Arc<Mutex<Channel>>,
+) -> Result<Response<Full<Bytes>>, hyper::Error> {
+ // HTTP 405
+ if req.method() != Method::POST {
+ return Ok(empty_response(StatusCode::METHOD_NOT_ALLOWED));
+ }
+
+ // Get headers before consuming body
+ let sig_header = req
+ .headers()
+ .get("X-Hub-Signature-256")
+ .and_then(|v| v.to_str().ok())
+ .map(|s| s.to_string());
+ let event_type = req
+ .headers()
+ .get("X-Github-Event")
+ .and_then(|v| v.to_str().ok())
+ .map(|s| s.to_string());
+ let content_type = req
+ .headers()
+ .get("Content-Type")
+ .and_then(|v| v.to_str().ok())
+ .map(|s| s.to_string());
+
+ // Read body
+ let raw = match req.collect().await {
+ Ok(collected) => collected.to_bytes(),
+ Err(e) => {
+ warn!("Failed to read body from client: {e}");
+ return Ok(response(
+ StatusCode::INTERNAL_SERVER_ERROR,
+ "Failed to read body",
+ ));
+ }
+ };
+
+ // Validate signature
+ let Some(sig) = sig_header else {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Missing signature header",
+ ));
+ };
+ let mut components = sig.splitn(2, '=');
+ let Some(algo) = components.next() else {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Signature hash method missing",
+ ));
+ };
+ let Some(hash) = components.next() else {
+ return Ok(response(StatusCode::BAD_REQUEST, "Signature hash missing"));
+ };
+ let Ok(hash) = hex::decode(hash) else {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Invalid signature hash hex",
+ ));
+ };
+
+ if algo != "sha256" {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Invalid signature hash method",
+ ));
+ }
+
+ let Ok(mut mac) = Hmac::<Sha256>::new_from_slice(webhook_secret.as_bytes()) else {
+ error!("Unable to create HMAC from secret");
+ return Ok(response(
+ StatusCode::INTERNAL_SERVER_ERROR,
+ "Internal error",
+ ));
+ };
+ mac.update(&raw);
+ if mac.verify_slice(hash.as_slice()).is_err() {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Signature verification failed",
+ ));
+ }
+
+ // Parse body
+ let Some(ct) = content_type else {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "No Content-Type header passed",
+ ));
+ };
+ if !ct.contains("application/json") {
+ return Ok(response(
+ StatusCode::BAD_REQUEST,
+ "Content-Type is not application/json. Webhook misconfigured?",
+ ));
+ }
+
+ let input = match serde_json::from_slice::<GenericWebhook>(&raw) {
+ Ok(i) => i,
+ Err(e) => {
+ error!("Invalid JSON received: {e}");
+ return Ok(response(StatusCode::BAD_REQUEST, "Invalid JSON"));
+ }
+ };
+
+ // Build routing key
+ let Some(event_type) = event_type else {
+ return Ok(response(StatusCode::BAD_REQUEST, "Missing event type"));
+ };
+ let routing_key = format!("{event_type}.{}", input.repository.full_name.to_lowercase());
+
+ // Publish message
+ let chan = chan.lock().await;
+ let _confirmation = chan
+ .basic_publish(
+ "github-events".into(),
+ routing_key.as_str().into(),
+ BasicPublishOptions::default(),
+ &raw,
+ BasicProperties::default()
+ .with_content_type("application/json".into())
+ .with_delivery_mode(2), // persistent
+ )
+ .await;
+
+ Ok(empty_response(StatusCode::NO_CONTENT))
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let Some(cfg) = config::load(arg.as_ref()).github_webhook_receiver else {
+ error!("No GitHub Webhook configuration found!");
+ panic!();
+ };
+
+ let webhook_secret = std::fs::read_to_string(cfg.webhook_secret_file)
+ .expect("Unable to read webhook secret file");
+ let webhook_secret = Arc::new(webhook_secret.trim().to_string());
+
+ let conn = easylapin::from_config(&cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+ setup_amqp(&mut chan).await?;
+ let chan = Arc::new(Mutex::new(chan));
+
+ let addr: SocketAddr = cfg.listen.parse()?;
+ let listener = TcpListener::bind(addr).await?;
+ info!("Listening on {}", addr);
+
+ loop {
+ let (stream, _) = listener.accept().await?;
+ let io = TokioIo::new(stream);
+
+ let webhook_secret = webhook_secret.clone();
+ let chan = chan.clone();
+
+ tokio::task::spawn(async move {
+ let service =
+ service_fn(move |req| handle_request(req, webhook_secret.clone(), chan.clone()));
+
+ if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
+ warn!("Error serving connection: {:?}", err);
+ }
+ });
+ }
+}
diff --git a/ofborg/tickborg/src/bin/log-message-collector.rs b/ofborg/tickborg/src/bin/log-message-collector.rs
new file mode 100644
index 0000000000..728a2d7f4d
--- /dev/null
+++ b/ofborg/tickborg/src/bin/log-message-collector.rs
@@ -0,0 +1,83 @@
+use std::env;
+use std::error::Error;
+use std::path::PathBuf;
+
+use tracing::{error, info};
+
+use tickborg::config;
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::tasks;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(collector_cfg) = config::load(arg.as_ref()).log_message_collector else {
+ error!("No log message collector configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&collector_cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "logs".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Topic,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ let queue_name = "logs".to_owned();
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: false,
+ exclusive: true,
+ auto_delete: true,
+ no_wait: false,
+ })
+ .await?;
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "logs".to_owned(),
+ routing_key: Some("*.*".to_owned()),
+ no_wait: false,
+ })
+ .await?;
+
+ // Regular channel, we want prefetching here.
+ let handle = chan
+ .consume(
+ tasks::log_message_collector::LogMessageCollector::new(
+ PathBuf::from(collector_cfg.logs_path),
+ 100,
+ ),
+ easyamqp::ConsumeConfig {
+ queue: queue_name.clone(),
+ consumer_tag: format!("{}-log-collector", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ info!("Fetching jobs from {}", &queue_name);
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/logapi.rs b/ofborg/tickborg/src/bin/logapi.rs
new file mode 100644
index 0000000000..c1750b76e7
--- /dev/null
+++ b/ofborg/tickborg/src/bin/logapi.rs
@@ -0,0 +1,151 @@
+use std::net::SocketAddr;
+use std::{collections::HashMap, error::Error, path::PathBuf, sync::Arc};
+
+use http::{Method, StatusCode};
+use http_body_util::Full;
+use hyper::body::Bytes;
+use hyper::server::conn::http1;
+use hyper::service::service_fn;
+use hyper::{Request, Response};
+use hyper_util::rt::TokioIo;
+use tickborg::config;
+use tokio::net::TcpListener;
+use tracing::{error, info, warn};
+
+#[derive(serde::Serialize, Default)]
+struct Attempt {
+ metadata: Option<serde_json::Value>,
+ result: Option<serde_json::Value>,
+ log_url: Option<String>,
+}
+
+#[derive(serde::Serialize)]
+struct LogResponse {
+ attempts: HashMap<String, Attempt>,
+}
+
+#[derive(Clone)]
+struct LogApiConfig {
+ logs_path: String,
+ serve_root: String,
+}
+
+fn response(status: StatusCode, body: &'static str) -> Response<Full<Bytes>> {
+ Response::builder()
+ .status(status)
+ .body(Full::new(Bytes::from(body)))
+ .unwrap()
+}
+
+fn json_response(status: StatusCode, body: String) -> Response<Full<Bytes>> {
+ Response::builder()
+ .status(status)
+ .header("Content-Type", "application/json")
+ .body(Full::new(Bytes::from(body)))
+ .unwrap()
+}
+
+async fn handle_request(
+ req: Request<hyper::body::Incoming>,
+ cfg: Arc<LogApiConfig>,
+) -> Result<Response<Full<Bytes>>, hyper::Error> {
+ if req.method() != Method::GET {
+ return Ok(response(StatusCode::METHOD_NOT_ALLOWED, ""));
+ }
+
+ let uri = req.uri().path().to_string();
+ let Some(reqd) = uri.strip_prefix("/logs/").map(ToOwned::to_owned) else {
+ return Ok(response(StatusCode::NOT_FOUND, "invalid uri"));
+ };
+ let path: PathBuf = [&cfg.logs_path, &reqd].iter().collect();
+ let Ok(path) = std::fs::canonicalize(&path) else {
+ return Ok(response(StatusCode::NOT_FOUND, "absent"));
+ };
+ let Ok(iter) = std::fs::read_dir(path) else {
+ return Ok(response(StatusCode::NOT_FOUND, "non dir"));
+ };
+
+ let mut attempts = HashMap::<String, Attempt>::new();
+ for e in iter {
+ let Ok(e) = e else { continue };
+ let e_metadata = e.metadata();
+ if e_metadata.as_ref().map(|v| v.is_dir()).unwrap_or(true) {
+ return Ok(response(StatusCode::INTERNAL_SERVER_ERROR, "dir found"));
+ }
+
+ if e_metadata.as_ref().map(|v| v.is_file()).unwrap_or_default() {
+ let Ok(file_name) = e.file_name().into_string() else {
+ warn!("entry filename is not a utf-8 string: {:?}", e.file_name());
+ continue;
+ };
+
+ if file_name.ends_with(".metadata.json") || file_name.ends_with(".result.json") {
+ let Ok(file) = std::fs::File::open(e.path()) else {
+ warn!("could not open file: {file_name}");
+ continue;
+ };
+ let Ok(json) = serde_json::from_reader::<_, serde_json::Value>(file) else {
+ warn!("file is not a valid json file: {file_name}");
+ continue;
+ };
+ let Some(attempt_id) = json
+ .get("attempt_id")
+ .and_then(|v| v.as_str())
+ .map(ToOwned::to_owned)
+ else {
+ warn!("attempt_id not found in file: {file_name}");
+ continue;
+ };
+ let attempt_obj = attempts.entry(attempt_id).or_default();
+ if file_name.ends_with(".metadata.json") {
+ attempt_obj.metadata = Some(json);
+ } else {
+ attempt_obj.result = Some(json);
+ }
+ } else {
+ let attempt_obj = attempts.entry(file_name.clone()).or_default();
+ attempt_obj.log_url = Some(format!("{}/{reqd}/{file_name}", &cfg.serve_root));
+ }
+ }
+ }
+
+ let body = serde_json::to_string(&LogResponse { attempts }).unwrap_or_default();
+ Ok(json_response(StatusCode::OK, body))
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
+ tickborg::setup_log();
+
+ let arg = std::env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let Some(cfg) = config::load(arg.as_ref()).log_api_config else {
+ error!("No LogApi configuration found!");
+ panic!();
+ };
+
+ let api_cfg = Arc::new(LogApiConfig {
+ logs_path: cfg.logs_path,
+ serve_root: cfg.serve_root,
+ });
+
+ let addr: SocketAddr = cfg.listen.parse()?;
+ let listener = TcpListener::bind(addr).await?;
+ info!("Listening on {}", addr);
+
+ loop {
+ let (stream, _) = listener.accept().await?;
+ let io = TokioIo::new(stream);
+
+ let api_cfg = api_cfg.clone();
+
+ tokio::task::spawn(async move {
+ let service = service_fn(move |req| handle_request(req, api_cfg.clone()));
+
+ if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
+ warn!("Error serving connection: {:?}", err);
+ }
+ });
+ }
+}
diff --git a/ofborg/tickborg/src/bin/mass-rebuilder.rs b/ofborg/tickborg/src/bin/mass-rebuilder.rs
new file mode 100644
index 0000000000..0d5fdb0127
--- /dev/null
+++ b/ofborg/tickborg/src/bin/mass-rebuilder.rs
@@ -0,0 +1,73 @@
+use std::env;
+use std::error::Error;
+use std::path::Path;
+
+use tracing::{error, info};
+
+use tickborg::checkout;
+use tickborg::config;
+use tickborg::easyamqp::{self, ChannelExt, ConsumerExt};
+use tickborg::easylapin;
+use tickborg::stats;
+use tickborg::tasks;
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(rebuilder_cfg) = config::load(arg.as_ref()).mass_rebuilder else {
+ error!("No mass rebuilder configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&rebuilder_cfg.rabbitmq).await?;
+ let mut chan = conn.create_channel().await?;
+
+ let root = Path::new(&cfg.checkout.root);
+ let cloner = checkout::cached_cloner(&root.join(cfg.runner.instance.to_string()));
+
+ let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?);
+
+ let queue_name = String::from("mass-rebuild-check-jobs");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ let handle = easylapin::WorkerChannel(chan)
+ .consume(
+ tasks::evaluate::EvaluationWorker::new(
+ cloner,
+ cfg.github_app_vendingmachine(),
+ cfg.acl(),
+ cfg.runner.identity.clone(),
+ events,
+ ),
+ easyamqp::ConsumeConfig {
+ queue: queue_name.clone(),
+ consumer_tag: format!("{}-mass-rebuild-checker", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ info!("Fetching jobs from {}", queue_name);
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/bin/stats.rs b/ofborg/tickborg/src/bin/stats.rs
new file mode 100644
index 0000000000..cf2193e00f
--- /dev/null
+++ b/ofborg/tickborg/src/bin/stats.rs
@@ -0,0 +1,134 @@
+use std::env;
+use std::error::Error;
+use std::net::SocketAddr;
+use std::sync::Arc;
+
+use http::StatusCode;
+use http_body_util::Full;
+use hyper::body::Bytes;
+use hyper::server::conn::http1;
+use hyper::service::service_fn;
+use hyper::{Request, Response};
+use hyper_util::rt::TokioIo;
+use tokio::net::TcpListener;
+use tracing::{error, info, warn};
+
+use tickborg::easyamqp::{ChannelExt, ConsumerExt};
+use tickborg::{config, easyamqp, easylapin, stats, tasks};
+
+fn response(body: String) -> Response<Full<Bytes>> {
+ Response::builder()
+ .status(StatusCode::OK)
+ .body(Full::new(Bytes::from(body)))
+ .unwrap()
+}
+
+async fn run_http_server(
+ addr: SocketAddr,
+ metrics: Arc<stats::MetricCollector>,
+) -> Result<(), Box<dyn Error + Send + Sync>> {
+ let listener = TcpListener::bind(addr).await?;
+ info!("HTTP server listening on {}", addr);
+
+ loop {
+ let (stream, _) = listener.accept().await?;
+ let io = TokioIo::new(stream);
+
+ let metrics = metrics.clone();
+
+ tokio::task::spawn(async move {
+ let service = service_fn(move |_req: Request<hyper::body::Incoming>| {
+ let metrics = metrics.clone();
+ async move { Ok::<_, hyper::Error>(response(metrics.prometheus_output())) }
+ });
+
+ if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
+ warn!("Error serving connection: {:?}", err);
+ }
+ });
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ tickborg::setup_log();
+
+ let arg = env::args()
+ .nth(1)
+ .unwrap_or_else(|| panic!("usage: {} <config>", std::env::args().next().unwrap()));
+ let cfg = config::load(arg.as_ref());
+
+ let Some(stats_cfg) = config::load(arg.as_ref()).stats else {
+ error!("No stats configuration found!");
+ panic!();
+ };
+
+ let conn = easylapin::from_config(&stats_cfg.rabbitmq).await?;
+
+ let mut chan = conn.create_channel().await?;
+
+ let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?);
+
+ let metrics = Arc::new(stats::MetricCollector::new());
+ let collector = tasks::statscollector::StatCollectorWorker::new(events, (*metrics).clone());
+
+ chan.declare_exchange(easyamqp::ExchangeConfig {
+ exchange: "stats".to_owned(),
+ exchange_type: easyamqp::ExchangeType::Fanout,
+ passive: false,
+ durable: true,
+ auto_delete: false,
+ no_wait: false,
+ internal: false,
+ })
+ .await?;
+
+ let queue_name = String::from("stats-events");
+ chan.declare_queue(easyamqp::QueueConfig {
+ queue: queue_name.clone(),
+ passive: false,
+ durable: true,
+ exclusive: false,
+ auto_delete: false,
+ no_wait: false,
+ })
+ .await?;
+
+ chan.bind_queue(easyamqp::BindQueueConfig {
+ queue: queue_name.clone(),
+ exchange: "stats".to_owned(),
+ routing_key: None,
+ no_wait: false,
+ })
+ .await?;
+
+ let handle = chan
+ .consume(
+ collector,
+ easyamqp::ConsumeConfig {
+ queue: "stats-events".to_owned(),
+ consumer_tag: format!("{}-prometheus-stats-collector", cfg.whoami()),
+ no_local: false,
+ no_ack: false,
+ no_wait: false,
+ exclusive: false,
+ },
+ )
+ .await?;
+
+ // Spawn HTTP server in a separate thread with its own tokio runtime
+ let metrics_clone = metrics.clone();
+ std::thread::spawn(async move || {
+ let addr: SocketAddr = "0.0.0.0:9898".parse().unwrap();
+ if let Err(e) = run_http_server(addr, metrics_clone).await {
+ error!("HTTP server error: {:?}", e);
+ }
+ });
+
+ info!("Fetching jobs from {}", &queue_name);
+ handle.await;
+
+ drop(conn); // Close connection.
+ info!("Closed the session... EOF");
+ Ok(())
+}
diff --git a/ofborg/tickborg/src/buildtool.rs b/ofborg/tickborg/src/buildtool.rs
new file mode 100644
index 0000000000..3171fac5a0
--- /dev/null
+++ b/ofborg/tickborg/src/buildtool.rs
@@ -0,0 +1,455 @@
+use crate::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
+use crate::message::buildresult::BuildStatus;
+
+use std::fmt;
+use std::fs;
+use std::io::{BufRead, BufReader, Seek, SeekFrom};
+use std::path::Path;
+use std::process::{Command, Stdio};
+
+use tempfile::tempfile;
+
+/// Identifies which build system a project uses.
+#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
+pub enum BuildSystem {
+ CMake,
+ Meson,
+ Autotools,
+ Cargo,
+ Gradle,
+ Make,
+ Custom { command: String },
+}
+
+impl fmt::Display for BuildSystem {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ BuildSystem::CMake => write!(f, "cmake"),
+ BuildSystem::Meson => write!(f, "meson"),
+ BuildSystem::Autotools => write!(f, "autotools"),
+ BuildSystem::Cargo => write!(f, "cargo"),
+ BuildSystem::Gradle => write!(f, "gradle"),
+ BuildSystem::Make => write!(f, "make"),
+ BuildSystem::Custom { command } => write!(f, "custom({command})"),
+ }
+ }
+}
+
+/// Project-specific build configuration.
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+pub struct ProjectBuildConfig {
+ pub name: String,
+ pub path: String,
+ pub build_system: BuildSystem,
+ pub build_timeout_seconds: u16,
+ pub configure_args: Vec<String>,
+ pub build_args: Vec<String>,
+ pub test_command: Option<Vec<String>>,
+}
+
+/// The build executor — replaces ofborg's Nix struct.
+#[derive(Clone, Debug)]
+pub struct BuildExecutor {
+ pub build_timeout: u16,
+}
+
+impl BuildExecutor {
+ pub fn new(build_timeout: u16) -> Self {
+ Self { build_timeout }
+ }
+
+ /// Build a project using its configured build system.
+ pub fn build_project(
+ &self,
+ project_root: &Path,
+ config: &ProjectBuildConfig,
+ ) -> Result<fs::File, fs::File> {
+ let project_dir = project_root.join(&config.path);
+ let cmd = self.build_command(&project_dir, config);
+ self.run(cmd, true)
+ }
+
+ /// Build a project asynchronously.
+ pub fn build_project_async(
+ &self,
+ project_root: &Path,
+ config: &ProjectBuildConfig,
+ ) -> SpawnedAsyncCmd {
+ let project_dir = project_root.join(&config.path);
+ let cmd = self.build_command(&project_dir, config);
+ AsyncCmd::new(cmd).spawn()
+ }
+
+ /// Run tests for a project.
+ pub fn test_project(
+ &self,
+ project_root: &Path,
+ config: &ProjectBuildConfig,
+ ) -> Result<fs::File, fs::File> {
+ let project_dir = project_root.join(&config.path);
+ let cmd = self.test_command(&project_dir, config);
+ self.run(cmd, true)
+ }
+
+ fn build_command(&self, project_dir: &Path, config: &ProjectBuildConfig) -> Command {
+ match &config.build_system {
+ BuildSystem::CMake => {
+ let build_dir = project_dir.join("build");
+ let mut cmd = Command::new("cmake");
+ cmd.arg("--build").arg(&build_dir);
+ cmd.args(["--config", "Release"]);
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Meson => {
+ let mut cmd = Command::new("meson");
+ cmd.arg("compile");
+ cmd.args(["-C", "build"]);
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Autotools => {
+ let mut cmd = Command::new("make");
+ cmd.args(["-j", &num_cpus().to_string()]);
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Cargo => {
+ let mut cmd = Command::new("cargo");
+ cmd.arg("build").arg("--release");
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Gradle => {
+ let gradlew = project_dir.join("gradlew");
+ let prog = if gradlew.exists() {
+ gradlew.to_string_lossy().to_string()
+ } else {
+ "gradle".to_string()
+ };
+ let mut cmd = Command::new(prog);
+ cmd.arg("build");
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Make => {
+ let mut cmd = Command::new("make");
+ cmd.args(["-j", &num_cpus().to_string()]);
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Custom { command } => {
+ let mut cmd = Command::new("sh");
+ cmd.args(["-c", command]);
+ for arg in &config.build_args {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ }
+ }
+
+ fn test_command(&self, project_dir: &Path, config: &ProjectBuildConfig) -> Command {
+ if let Some(ref test_cmd) = config.test_command {
+ let mut cmd = Command::new(&test_cmd[0]);
+ for arg in &test_cmd[1..] {
+ cmd.arg(arg);
+ }
+ cmd.current_dir(project_dir);
+ return cmd;
+ }
+
+ match &config.build_system {
+ BuildSystem::CMake => {
+ let mut cmd = Command::new("ctest");
+ cmd.arg("--test-dir").arg("build");
+ cmd.args(["--output-on-failure"]);
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Meson => {
+ let mut cmd = Command::new("meson");
+ cmd.arg("test").args(["-C", "build"]);
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Autotools | BuildSystem::Make => {
+ let mut cmd = Command::new("make");
+ cmd.arg("check");
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Cargo => {
+ let mut cmd = Command::new("cargo");
+ cmd.arg("test").arg("--release");
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Gradle => {
+ let gradlew = project_dir.join("gradlew");
+ let prog = if gradlew.exists() {
+ gradlew.to_string_lossy().to_string()
+ } else {
+ "gradle".to_string()
+ };
+ let mut cmd = Command::new(prog);
+ cmd.arg("test");
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ BuildSystem::Custom { command } => {
+ let mut cmd = Command::new("sh");
+ cmd.args(["-c", command]);
+ cmd.current_dir(project_dir);
+ cmd
+ }
+ }
+ }
+
+ pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
+ let stderr = tempfile().expect("Fetching a stderr tempfile");
+ let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
+
+ let stdout: Stdio = if keep_stdout {
+ Stdio::from(stderr.try_clone().expect("Cloning stderr for stdout"))
+ } else {
+ Stdio::null()
+ };
+
+ let status = cmd
+ .stdout(stdout)
+ .stderr(Stdio::from(stderr))
+ .status()
+ .expect("Running build command ...");
+
+ reader
+ .seek(SeekFrom::Start(0))
+ .expect("Seeking to Start(0)");
+
+ if status.success() {
+ Ok(reader)
+ } else {
+ Err(reader)
+ }
+ }
+}
+
+fn num_cpus() -> usize {
+ std::thread::available_parallelism()
+ .map(|n| n.get())
+ .unwrap_or(4)
+}
+
+pub fn lines_from_file(file: fs::File) -> Vec<String> {
+ BufReader::new(file)
+ .lines()
+ .map_while(Result::ok)
+ .collect()
+}
+
+pub fn wait_for_build_status(spawned: SpawnedAsyncCmd) -> BuildStatus {
+ match spawned.wait() {
+ Ok(s) => match s.code() {
+ Some(0) => BuildStatus::Success,
+ Some(_code) => BuildStatus::Failure,
+ None => BuildStatus::UnexpectedError {
+ err: "process terminated by signal".into(),
+ },
+ },
+ Err(err) => BuildStatus::UnexpectedError {
+ err: format!("failed on interior command {err}"),
+ },
+ }
+}
+
+/// Known projects in the Project Tick monorepo.
+pub fn known_projects() -> Vec<ProjectBuildConfig> {
+ vec![
+ ProjectBuildConfig {
+ name: "mnv".into(),
+ path: "mnv".into(),
+ build_system: BuildSystem::Autotools,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: Some(vec!["make".into(), "check".into()]),
+ },
+ ProjectBuildConfig {
+ name: "cgit".into(),
+ path: "cgit".into(),
+ build_system: BuildSystem::Make,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "cmark".into(),
+ path: "cmark".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "neozip".into(),
+ path: "neozip".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "genqrcode".into(),
+ path: "genqrcode".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "json4cpp".into(),
+ path: "json4cpp".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "tomlplusplus".into(),
+ path: "tomlplusplus".into(),
+ build_system: BuildSystem::Meson,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "libnbtplusplus".into(),
+ path: "libnbtplusplus".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "meshmc".into(),
+ path: "meshmc".into(),
+ build_system: BuildSystem::CMake,
+ build_timeout_seconds: 3600,
+ configure_args: vec![],
+ build_args: vec!["--config".into(), "Release".into()],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "forgewrapper".into(),
+ path: "forgewrapper".into(),
+ build_system: BuildSystem::Gradle,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ProjectBuildConfig {
+ name: "corebinutils".into(),
+ path: "corebinutils".into(),
+ build_system: BuildSystem::Make,
+ build_timeout_seconds: 1800,
+ configure_args: vec![],
+ build_args: vec![],
+ test_command: None,
+ },
+ ]
+}
+
+/// Look up a project by name.
+pub fn find_project(name: &str) -> Option<ProjectBuildConfig> {
+ known_projects().into_iter().find(|p| p.name == name)
+}
+
+/// Detect which projects changed based on modified file paths.
+pub fn detect_changed_projects(changed_files: &[String]) -> Vec<String> {
+ let projects = known_projects();
+ let mut changed = Vec::new();
+
+ for project in &projects {
+ let prefix = format!("{}/", project.path);
+ if changed_files.iter().any(|f| f.starts_with(&prefix)) {
+ changed.push(project.name.clone());
+ }
+ }
+
+ changed.sort();
+ changed.dedup();
+ changed
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_detect_changed_projects() {
+ let files = vec![
+ "mnv/src/main.c".into(),
+ "mnv/Makefile.am".into(),
+ "meshmc/CMakeLists.txt".into(),
+ "README.md".into(),
+ ];
+ let changed = detect_changed_projects(&files);
+ assert_eq!(changed, vec!["meshmc", "mnv"]);
+ }
+
+ #[test]
+ fn test_detect_changed_projects_none() {
+ let files = vec!["README.md".into(), ".gitignore".into()];
+ let changed = detect_changed_projects(&files);
+ assert!(changed.is_empty());
+ }
+
+ #[test]
+ fn test_find_project() {
+ assert!(find_project("meshmc").is_some());
+ assert!(find_project("nonexistent").is_none());
+ assert_eq!(
+ find_project("meshmc").unwrap().build_system,
+ BuildSystem::CMake
+ );
+ assert_eq!(
+ find_project("forgewrapper").unwrap().build_system,
+ BuildSystem::Gradle
+ );
+ }
+
+ #[test]
+ fn test_build_system_display() {
+ assert_eq!(BuildSystem::CMake.to_string(), "cmake");
+ assert_eq!(BuildSystem::Meson.to_string(), "meson");
+ assert_eq!(BuildSystem::Cargo.to_string(), "cargo");
+ assert_eq!(BuildSystem::Gradle.to_string(), "gradle");
+ }
+}
diff --git a/ofborg/tickborg/src/checkout.rs b/ofborg/tickborg/src/checkout.rs
new file mode 100644
index 0000000000..731e68d3f0
--- /dev/null
+++ b/ofborg/tickborg/src/checkout.rs
@@ -0,0 +1,340 @@
+use crate::clone::{self, GitClonable};
+
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::io::Error;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+
+use tracing::info;
+
+pub struct CachedCloner {
+ root: PathBuf,
+}
+
+pub fn cached_cloner(path: &Path) -> CachedCloner {
+ CachedCloner {
+ root: path.to_path_buf(),
+ }
+}
+
+pub struct CachedProject {
+ root: PathBuf,
+ clone_url: String,
+}
+
+pub struct CachedProjectCo {
+ root: PathBuf,
+ id: String,
+ clone_url: String,
+ local_reference: PathBuf,
+}
+
+impl CachedCloner {
+ pub fn project(&self, name: &str, clone_url: String) -> CachedProject {
+ // <root>/repo/<hash>/clone
+ // <root>/repo/<hash>/clone.lock
+ // <root>/repo/<hash>/<type>/<id>
+ // <root>/repo/<hash>/<type>/<id>.lock
+
+ let mut new_root = self.root.clone();
+ new_root.push("repo");
+ new_root.push(format!("{:x}", md5::compute(name)));
+
+ CachedProject {
+ root: new_root,
+ clone_url,
+ }
+ }
+}
+
+impl CachedProject {
+ pub fn clone_for(&self, use_category: String, id: String) -> Result<CachedProjectCo, Error> {
+ self.prefetch_cache()?;
+
+ let mut new_root = self.root.clone();
+ new_root.push(use_category);
+
+ Ok(CachedProjectCo {
+ root: new_root,
+ id,
+ clone_url: self.clone_from(),
+ local_reference: self.clone_to(),
+ })
+ }
+
+ fn prefetch_cache(&self) -> Result<PathBuf, Error> {
+ fs::create_dir_all(&self.root)?;
+
+ self.clone_repo()?;
+ self.fetch_repo()?;
+
+ Ok(self.clone_to())
+ }
+}
+
+impl CachedProjectCo {
+ pub fn checkout_origin_ref(&self, git_ref: &OsStr) -> Result<String, Error> {
+ let mut pref = OsString::from("origin/");
+ pref.push(git_ref);
+
+ self.checkout_ref(&pref)
+ }
+
+ pub fn checkout_ref(&self, git_ref: &OsStr) -> Result<String, Error> {
+ fs::create_dir_all(&self.root)?;
+
+ self.clone_repo()?;
+ self.fetch_repo()?;
+ self.clean()?;
+ self.checkout(git_ref)?;
+
+ // let build_dir = self.build_dir();
+
+ let canonicalized = fs::canonicalize(self.clone_to()).unwrap();
+ Ok(canonicalized.to_str().unwrap().to_string())
+ }
+
+ pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ info!("Fetching PR #{}", pr_id);
+ let result = Command::new("git")
+ .arg("fetch")
+ .arg("origin")
+ .arg(format!("+refs/pull/{pr_id}/head:pr"))
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ if result.success() {
+ Ok(())
+ } else {
+ Err(Error::other("Failed to fetch PR"))
+ }
+ }
+
+ pub fn commit_exists(&self, commit: &OsStr) -> bool {
+ let mut lock = self.lock().expect("Failed to lock");
+
+ info!("Checking if commit {:?} exists", commit);
+ let result = Command::new("git")
+ .arg("--no-pager")
+ .arg("show")
+ .arg(commit)
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()
+ .expect("git show <commit> failed");
+
+ lock.unlock();
+
+ result.success()
+ }
+
+ pub fn merge_commit(&self, commit: &OsStr) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ info!("Merging commit {:?}", commit);
+ let result = Command::new("git")
+ .arg("merge")
+ .arg("--no-gpg-sign")
+ .arg("-m")
+ .arg("Automatic merge for GrahamCOfBorg")
+ .arg(commit)
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ if result.success() {
+ Ok(())
+ } else {
+ Err(Error::other("Failed to merge"))
+ }
+ }
+
+ pub fn commit_messages_from_head(&self, commit: &str) -> Result<Vec<String>, Error> {
+ let mut lock = self.lock()?;
+
+ let result = Command::new("git")
+ .arg("log")
+ .arg("--format=format:%s")
+ .arg(format!("HEAD..{commit}"))
+ .current_dir(self.clone_to())
+ .output()?;
+
+ lock.unlock();
+
+ if result.status.success() {
+ Ok(String::from_utf8_lossy(&result.stdout)
+ .lines()
+ .map(|l| l.to_owned())
+ .collect())
+ } else {
+ Err(Error::other(
+ String::from_utf8_lossy(&result.stderr).to_lowercase(),
+ ))
+ }
+ }
+
+ pub fn files_changed_from_head(&self, commit: &str) -> Result<Vec<String>, Error> {
+ let mut lock = self.lock()?;
+
+ let result = Command::new("git")
+ .arg("diff")
+ .arg("--name-only")
+ .arg(format!("HEAD...{commit}"))
+ .current_dir(self.clone_to())
+ .output()?;
+
+ lock.unlock();
+
+ if result.status.success() {
+ Ok(String::from_utf8_lossy(&result.stdout)
+ .lines()
+ .map(|l| l.to_owned())
+ .collect())
+ } else {
+ Err(Error::other(
+ String::from_utf8_lossy(&result.stderr).to_lowercase(),
+ ))
+ }
+ }
+}
+
+impl clone::GitClonable for CachedProjectCo {
+ fn clone_from(&self) -> String {
+ self.clone_url.clone()
+ }
+
+ fn clone_to(&self) -> PathBuf {
+ let mut clone_path = self.root.clone();
+ clone_path.push(&self.id);
+ clone_path
+ }
+
+ fn lock_path(&self) -> PathBuf {
+ let mut lock_path = self.root.clone();
+ lock_path.push(format!("{}.lock", self.id));
+ lock_path
+ }
+
+ fn extra_clone_args(&self) -> Vec<&OsStr> {
+ let local_ref = self.local_reference.as_ref();
+ vec![
+ OsStr::new("--shared"),
+ OsStr::new("--reference-if-able"),
+ local_ref,
+ ]
+ }
+}
+
+impl clone::GitClonable for CachedProject {
+ fn clone_from(&self) -> String {
+ self.clone_url.clone()
+ }
+
+ fn clone_to(&self) -> PathBuf {
+ let mut clone_path = self.root.clone();
+ clone_path.push("clone");
+ clone_path
+ }
+
+ fn lock_path(&self) -> PathBuf {
+ let mut clone_path = self.root.clone();
+ clone_path.push("clone.lock");
+ clone_path
+ }
+
+ fn extra_clone_args(&self) -> Vec<&OsStr> {
+ vec![OsStr::new("--bare")]
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::test_scratch::TestScratch;
+ use std::path::{Path, PathBuf};
+ use std::process::{Command, Stdio};
+
+ fn tpath(component: &str) -> PathBuf {
+ Path::new(env!("CARGO_MANIFEST_DIR")).join(component)
+ }
+
+ fn make_pr_repo(bare: &Path, co: &Path) -> String {
+ let output = Command::new("bash")
+ .current_dir(tpath("./test-srcs"))
+ .arg("./make-pr.sh")
+ .arg(bare)
+ .arg(co)
+ .stdout(Stdio::piped())
+ .output()
+ .expect("building the test PR failed");
+
+ let stderr =
+ String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {err}"));
+ println!("{stderr}");
+
+ let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
+ hash.trim().to_owned()
+ }
+
+ #[test]
+ pub fn test_commit_msg_list() {
+ let workingdir = TestScratch::new_dir("test-test-commit-msg-list");
+
+ let bare = TestScratch::new_dir("bare-commit-messages");
+ let mk_co = TestScratch::new_dir("mk-commit-messages");
+ let hash = make_pr_repo(&bare.path(), &mk_co.path());
+
+ let cloner = cached_cloner(&workingdir.path());
+ let project = cloner.project("commit-msg-list", bare.string());
+ let working_co = project
+ .clone_for("testing-commit-msgs".to_owned(), "123".to_owned())
+ .expect("clone should work");
+ working_co
+ .checkout_origin_ref(OsStr::new("master"))
+ .unwrap();
+
+ let expect: Vec<String> = vec!["check out this cool PR".to_owned()];
+
+ assert_eq!(
+ working_co
+ .commit_messages_from_head(&hash)
+ .expect("fetching messages should work",),
+ expect
+ );
+ }
+
+ #[test]
+ pub fn test_files_changed_list() {
+ let workingdir = TestScratch::new_dir("test-test-files-changed-list");
+
+ let bare = TestScratch::new_dir("bare-files-changed");
+ let mk_co = TestScratch::new_dir("mk-files-changed");
+ let hash = make_pr_repo(&bare.path(), &mk_co.path());
+
+ let cloner = cached_cloner(&workingdir.path());
+ let project = cloner.project("commit-files-changed-list", bare.string());
+ let working_co = project
+ .clone_for("testing-files-changed".to_owned(), "123".to_owned())
+ .expect("clone should work");
+ working_co
+ .checkout_origin_ref(OsStr::new("master"))
+ .unwrap();
+
+ let expect: Vec<String> = vec!["default.nix".to_owned(), "hi another file".to_owned()];
+
+ assert_eq!(
+ working_co
+ .files_changed_from_head(&hash)
+ .expect("fetching files changed should work",),
+ expect
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/clone.rs b/ofborg/tickborg/src/clone.rs
new file mode 100644
index 0000000000..0dcb71c2c5
--- /dev/null
+++ b/ofborg/tickborg/src/clone.rs
@@ -0,0 +1,173 @@
+use fs2::FileExt;
+
+use std::ffi::OsStr;
+use std::fs;
+use std::io::Error;
+use std::path::PathBuf;
+use std::process::{Command, Stdio};
+
+use tracing::{debug, info, warn};
+
+pub struct Lock {
+ lock: Option<fs::File>,
+}
+
+impl Lock {
+ pub fn unlock(&mut self) {
+ self.lock = None
+ }
+}
+
+pub trait GitClonable {
+ fn clone_from(&self) -> String;
+ fn clone_to(&self) -> PathBuf;
+ fn extra_clone_args(&self) -> Vec<&OsStr>;
+
+ fn lock_path(&self) -> PathBuf;
+
+ fn lock(&self) -> Result<Lock, Error> {
+ debug!("Locking {:?}", self.lock_path());
+
+ match fs::File::create(self.lock_path()) {
+ Err(e) => {
+ warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
+ Err(e)
+ }
+ Ok(lock) => match lock.lock_exclusive() {
+ Err(e) => {
+ warn!(
+ "Failed to get exclusive lock on file {:?}: {}",
+ self.lock_path(),
+ e
+ );
+ Err(e)
+ }
+ Ok(_) => {
+ debug!("Got lock on {:?}", self.lock_path());
+ Ok(Lock { lock: Some(lock) })
+ }
+ },
+ }
+ }
+
+ fn clone_repo(&self) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ if self.clone_to().is_dir() {
+ debug!("Found dir at {:?}, initial clone is done", self.clone_to());
+ return Ok(());
+ }
+
+ info!(
+ "Initial cloning of {} to {:?}",
+ self.clone_from(),
+ self.clone_to()
+ );
+
+ let result = Command::new("git")
+ .arg("clone")
+ .args(self.extra_clone_args())
+ .arg(self.clone_from())
+ .arg(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ if result.success() {
+ Ok(())
+ } else {
+ Err(Error::other(format!(
+ "Failed to clone from {:?} to {:?}",
+ self.clone_from(),
+ self.clone_to()
+ )))
+ }
+ }
+
+ fn fetch_repo(&self) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ info!("Fetching from origin in {:?}", self.clone_to());
+ let result = Command::new("git")
+ .arg("fetch")
+ .arg("origin")
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ if result.success() {
+ Ok(())
+ } else {
+ Err(Error::other("Failed to fetch"))
+ }
+ }
+
+ fn clean(&self) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ debug!("git am --abort");
+ Command::new("git")
+ .arg("am")
+ .arg("--abort")
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .status()?;
+
+ debug!("git merge --abort");
+ Command::new("git")
+ .arg("merge")
+ .arg("--abort")
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .status()?;
+
+ debug!("git reset --hard");
+ Command::new("git")
+ .arg("reset")
+ .arg("--hard")
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ debug!("git clean -x -d --force");
+ Command::new("git")
+ .arg("clean")
+ .arg("-x")
+ .arg("-d")
+ .arg("--force")
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ Ok(())
+ }
+
+ fn checkout(&self, git_ref: &OsStr) -> Result<(), Error> {
+ let mut lock = self.lock()?;
+
+ debug!("git checkout {:?}", git_ref);
+ let result = Command::new("git")
+ .arg("checkout")
+ // we don't care if its dirty
+ .arg("--force")
+ .arg(git_ref)
+ .current_dir(self.clone_to())
+ .stdout(Stdio::null())
+ .status()?;
+
+ lock.unlock();
+
+ if result.success() {
+ Ok(())
+ } else {
+ Err(Error::other("Failed to checkout"))
+ }
+ }
+}
diff --git a/ofborg/tickborg/src/commentparser.rs b/ofborg/tickborg/src/commentparser.rs
new file mode 100644
index 0000000000..f255af85d8
--- /dev/null
+++ b/ofborg/tickborg/src/commentparser.rs
@@ -0,0 +1,289 @@
+use nom::IResult;
+use nom::Parser;
+use nom::branch::alt;
+use nom::bytes::complete::tag;
+use nom::bytes::complete::tag_no_case;
+use nom::character::complete::multispace0;
+use nom::character::complete::multispace1;
+use nom::combinator::map;
+use nom::multi::many1;
+use nom::sequence::preceded;
+use tracing::warn;
+
+pub fn parse(text: &str) -> Option<Vec<Instruction>> {
+ let instructions: Vec<Instruction> = text
+ .lines()
+ .flat_map(|s| match parse_line(s) {
+ Some(instructions) => instructions.into_iter(),
+ None => Vec::new().into_iter(),
+ })
+ .collect();
+
+ if instructions.is_empty() {
+ None
+ } else {
+ Some(instructions)
+ }
+}
+
+fn is_not_whitespace(c: char) -> bool {
+ !c.is_ascii_whitespace()
+}
+
+fn normal_token(input: &str) -> IResult<&str, String> {
+ let (input, tokens) =
+ many1(nom::character::complete::satisfy(is_not_whitespace)).parse(input)?;
+
+ let s: String = tokens.into_iter().collect();
+ if s.eq_ignore_ascii_case("@tickbot") {
+ Err(nom::Err::Error(nom::error::Error::new(
+ input,
+ nom::error::ErrorKind::Tag,
+ )))
+ } else {
+ Ok((input, s))
+ }
+}
+
+fn parse_command(input: &str) -> IResult<&str, Instruction> {
+ alt((
+ preceded(
+ preceded(multispace0, tag("build")),
+ preceded(
+ multispace1,
+ map(many1(preceded(multispace0, normal_token)), |targets| {
+ Instruction::Build(Subset::Project, targets)
+ }),
+ ),
+ ),
+ preceded(
+ preceded(multispace0, tag("test")),
+ preceded(
+ multispace1,
+ map(many1(preceded(multispace0, normal_token)), |targets| {
+ Instruction::Test(targets)
+ }),
+ ),
+ ),
+ preceded(multispace0, map(tag("eval"), |_| Instruction::Eval)),
+ ))
+ .parse(input)
+}
+
+fn parse_line_impl(input: &str) -> IResult<&str, Option<Vec<Instruction>>> {
+ let (input, _) = multispace0.parse(input)?;
+
+ let result = map(
+ many1(preceded(
+ multispace0,
+ preceded(
+ tag_no_case("@tickbot"),
+ preceded(multispace1, parse_command),
+ ),
+ )),
+ |instructions| Some(instructions),
+ )
+ .parse(input);
+
+ match result {
+ Ok((rest, instructions)) => Ok((rest, instructions)),
+ Err(_e) => Ok((input, None)),
+ }
+}
+
+pub fn parse_line(text: &str) -> Option<Vec<Instruction>> {
+ match parse_line_impl(text) {
+ Ok((_, res)) => res,
+ Err(e) => {
+ warn!("Failed parsing string '{}': result was {:?}", text, e);
+ None
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Debug, Clone)]
+pub enum Instruction {
+ Build(Subset, Vec<String>),
+ Test(Vec<String>),
+ Eval,
+}
+
+#[allow(clippy::upper_case_acronyms)]
+#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq, Clone)]
+pub enum Subset {
+ Project,
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::*;
+
+ #[test]
+ fn parse_empty() {
+ assert_eq!(None, parse(""));
+ }
+
+ #[test]
+ fn valid_trailing_instruction() {
+ assert_eq!(
+ Some(vec![Instruction::Eval]),
+ parse(
+ "/cc @samet for ^^
+@tickbot eval",
+ )
+ );
+ }
+
+ #[test]
+ fn bogus_comment() {
+ assert_eq!(None, parse(":) :) :) @tickbot build hi"));
+ }
+
+ #[test]
+ fn bogus_build_comment_empty_list() {
+ assert_eq!(None, parse("@tickbot build"));
+ }
+
+ #[test]
+ fn eval_comment() {
+ assert_eq!(Some(vec![Instruction::Eval]), parse("@tickbot eval"));
+ }
+
+ #[test]
+ fn eval_and_build_comment() {
+ assert_eq!(
+ Some(vec![
+ Instruction::Eval,
+ Instruction::Build(Subset::Project, vec![String::from("meshmc")]),
+ ]),
+ parse("@tickbot eval @tickbot build meshmc")
+ );
+ }
+
+ #[test]
+ fn build_and_eval_and_build_comment() {
+ assert_eq!(
+ Some(vec![
+ Instruction::Build(Subset::Project, vec![String::from("mnv")]),
+ Instruction::Eval,
+ Instruction::Build(Subset::Project, vec![String::from("meshmc")]),
+ ]),
+ parse(
+ "
+@tickbot build mnv
+@tickbot eval
+@tickbot build meshmc",
+ )
+ );
+ }
+
+ #[test]
+ fn complex_comment_with_paragraphs() {
+ assert_eq!(
+ Some(vec![
+ Instruction::Build(Subset::Project, vec![String::from("mnv")]),
+ Instruction::Eval,
+ Instruction::Build(Subset::Project, vec![String::from("meshmc")]),
+ ]),
+ parse(
+ "
+I like where you're going with this PR, so let's try it out!
+
+@tickbot build mnv
+
+I noticed though that the target branch was broken, which should be fixed. Let's eval again.
+
+@tickbot eval
+
+Also, just in case, let's try meshmc
+@tickbot build meshmc",
+ )
+ );
+ }
+
+ #[test]
+ fn build_and_eval_comment() {
+ assert_eq!(
+ Some(vec![
+ Instruction::Build(Subset::Project, vec![String::from("meshmc")]),
+ Instruction::Eval,
+ ]),
+ parse("@tickbot build meshmc @tickbot eval")
+ );
+ }
+
+ #[test]
+ fn build_comment() {
+ assert_eq!(
+ Some(vec![Instruction::Build(
+ Subset::Project,
+ vec![String::from("meshmc"), String::from("mnv")]
+ ),]),
+ parse(
+ "@tickbot build meshmc mnv
+
+neozip",
+ )
+ );
+ }
+
+ #[test]
+ fn test_comment() {
+ assert_eq!(
+ Some(vec![Instruction::Test(
+ vec![
+ String::from("meshmc"),
+ String::from("mnv"),
+ String::from("neozip"),
+ ]
+ ),]),
+ parse("@tickbot test meshmc mnv neozip")
+ );
+ }
+
+ #[test]
+ fn build_comment_newlines() {
+ assert_eq!(
+ Some(vec![Instruction::Build(
+ Subset::Project,
+ vec![
+ String::from("meshmc"),
+ String::from("mnv"),
+ String::from("neozip"),
+ ]
+ ),]),
+ parse("@tickbot build meshmc mnv neozip")
+ );
+ }
+
+ #[test]
+ fn build_comment_case_insensitive_tag() {
+ assert_eq!(
+ Some(vec![Instruction::Build(
+ Subset::Project,
+ vec![
+ String::from("meshmc"),
+ String::from("mnv"),
+ String::from("neozip"),
+ ]
+ ),]),
+ parse("@TickBot build meshmc mnv neozip")
+ );
+ }
+
+ #[test]
+ fn build_comment_lower_package_case_retained() {
+ assert_eq!(
+ Some(vec![Instruction::Build(
+ Subset::Project,
+ vec![
+ String::from("meshmc"),
+ String::from("mnv"),
+ String::from("json4cpp"),
+ ]
+ ),]),
+ parse("@tickbot build meshmc mnv json4cpp")
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/commitstatus.rs b/ofborg/tickborg/src/commitstatus.rs
new file mode 100644
index 0000000000..6747f3b048
--- /dev/null
+++ b/ofborg/tickborg/src/commitstatus.rs
@@ -0,0 +1,103 @@
+use futures_util::future::TryFutureExt;
+use tracing::warn;
+
+pub struct CommitStatus {
+ api: hubcaps::statuses::Statuses,
+ sha: String,
+ context: String,
+ description: String,
+ url: String,
+}
+
+impl CommitStatus {
+ pub fn new(
+ api: hubcaps::statuses::Statuses,
+ sha: String,
+ context: String,
+ description: String,
+ url: Option<String>,
+ ) -> CommitStatus {
+ let mut stat = CommitStatus {
+ api,
+ sha,
+ context,
+ description,
+ url: "".to_owned(),
+ };
+
+ stat.set_url(url);
+
+ stat
+ }
+
+ pub fn set_url(&mut self, url: Option<String>) {
+ self.url = url.unwrap_or_else(|| String::from(""))
+ }
+
+ pub async fn set_with_description(
+ &mut self,
+ description: &str,
+ state: hubcaps::statuses::State,
+ ) -> Result<(), CommitStatusError> {
+ self.set_description(description.to_owned());
+ self.set(state).await
+ }
+
+ pub fn set_description(&mut self, description: String) {
+ self.description = description;
+ }
+
+ pub async fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> {
+ let desc = if self.description.len() >= 140 {
+ warn!(
+ "description is over 140 char; truncating: {:?}",
+ &self.description
+ );
+ self.description.chars().take(140).collect()
+ } else {
+ self.description.clone()
+ };
+ self.api
+ .create(
+ self.sha.as_ref(),
+ &hubcaps::statuses::StatusOptions::builder(state)
+ .context(self.context.clone())
+ .description(desc)
+ .target_url(self.url.clone())
+ .build(),
+ )
+ .map_ok(|_| ())
+ .map_err(|e| CommitStatusError::from(e))
+ .await?;
+ Ok(())
+ }
+}
+
+#[derive(Debug)]
+pub enum CommitStatusError {
+ ExpiredCreds(hubcaps::Error),
+ MissingSha(hubcaps::Error),
+ Error(hubcaps::Error),
+ InternalError(String),
+}
+
+impl From<hubcaps::Error> for CommitStatusError {
+ fn from(e: hubcaps::Error) -> CommitStatusError {
+ use http::status::StatusCode;
+ use hubcaps::Error;
+ match &e {
+ Error::Fault { code, error }
+ if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
+ {
+ CommitStatusError::ExpiredCreds(e)
+ }
+ Error::Fault { code, error }
+ if code == &StatusCode::UNPROCESSABLE_ENTITY
+ && error.message.starts_with("No commit found for SHA:") =>
+ {
+ CommitStatusError::MissingSha(e)
+ }
+ _otherwise => CommitStatusError::Error(e),
+ }
+ }
+}
diff --git a/ofborg/tickborg/src/config.rs b/ofborg/tickborg/src/config.rs
new file mode 100644
index 0000000000..7d7475e3b6
--- /dev/null
+++ b/ofborg/tickborg/src/config.rs
@@ -0,0 +1,387 @@
+use crate::acl;
+use crate::buildtool::BuildExecutor;
+
+use std::collections::{HashMap, hash_map::Entry};
+use std::fmt;
+use std::fs::File;
+use std::io::Read;
+use std::marker::PhantomData;
+use std::path::{Path, PathBuf};
+
+use hubcaps::{Credentials, Github, InstallationTokenGenerator, JWTCredentials};
+use rustls_pki_types::pem::PemObject as _;
+use serde::de::{self, Deserializer};
+use tracing::{debug, error, info, warn};
+
+/// Main tickborg configuration
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct Config {
+ /// Configuration for the webhook receiver
+ pub github_webhook_receiver: Option<GithubWebhookConfig>,
+ /// Configuration for the logapi receiver
+ pub log_api_config: Option<LogApiConfig>,
+ /// Configuration for the evaluation filter
+ pub evaluation_filter: Option<EvaluationFilter>,
+ /// Configuration for the GitHub comment filter
+ pub github_comment_filter: Option<GithubCommentFilter>,
+ /// Configuration for the GitHub comment poster
+ pub github_comment_poster: Option<GithubCommentPoster>,
+ /// Configuration for the mass rebuilder
+ pub mass_rebuilder: Option<MassRebuilder>,
+ /// Configuration for the builder
+ pub builder: Option<Builder>,
+ /// Configuration for the log message collector
+ pub log_message_collector: Option<LogMessageCollector>,
+ /// Configuration for the stats server
+ pub stats: Option<Stats>,
+ pub runner: RunnerConfig,
+ pub checkout: CheckoutConfig,
+ pub build: BuildConfig,
+ pub github_app: Option<GithubAppConfig>,
+}
+
+/// Configuration for the webhook receiver
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GithubWebhookConfig {
+ /// Listen host/port
+ pub listen: String,
+ /// Path to the GitHub webhook secret
+ pub webhook_secret_file: String,
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+fn default_logs_path() -> String {
+ "/var/log/tickborg".into()
+}
+
+fn default_serve_root() -> String {
+ "https://logs.tickborg.project-tick.net/logfile".into()
+}
+
+/// Configuration for logapi
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct LogApiConfig {
+ /// Listen host/port
+ pub listen: String,
+ #[serde(default = "default_logs_path")]
+ pub logs_path: String,
+ #[serde(default = "default_serve_root")]
+ pub serve_root: String,
+}
+
+/// Configuration for the evaluation filter
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct EvaluationFilter {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configuration for the GitHub comment filter
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GithubCommentFilter {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configuration for the GitHub comment poster
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct GithubCommentPoster {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configuration for the mass rebuilder
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct MassRebuilder {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configuration for the builder
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct Builder {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configuration for the log message collector
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct LogMessageCollector {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+ /// Path where the logs reside
+ pub logs_path: String,
+}
+
+/// Configuration for the stats exporter
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(deny_unknown_fields)]
+pub struct Stats {
+ /// RabbitMQ broker to connect to
+ pub rabbitmq: RabbitMqConfig,
+}
+
+/// Configures the connection to a RabbitMQ instance
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+#[serde(deny_unknown_fields)]
+pub struct RabbitMqConfig {
+ /// Whether or not to use SSL
+ pub ssl: bool,
+ /// Hostname to conenct to
+ pub host: String,
+ /// Virtual host to use (defaults to /)
+ pub virtualhost: Option<String>,
+ /// Username to connect with
+ pub username: String,
+ /// File to read the user password from. Contents are automatically stripped
+ pub password_file: PathBuf,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct BuildConfig {
+ #[serde(deserialize_with = "deserialize_one_or_many")]
+ pub system: Vec<String>,
+ pub build_timeout_seconds: u16,
+ /// Additional environment variables for build commands
+ pub extra_env: Option<HashMap<String, String>>,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+pub struct GithubAppConfig {
+ pub app_id: u64,
+ pub private_key: PathBuf,
+ pub oauth_client_id: String,
+ pub oauth_client_secret_file: PathBuf,
+}
+
+const fn default_instance() -> u8 {
+ 1
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct RunnerConfig {
+ #[serde(default = "default_instance")]
+ pub instance: u8,
+ pub identity: String,
+ /// List of GitHub repos we feel responsible for
+ pub repos: Option<Vec<String>>,
+ /// Whether to use the `trusted_users` field or just allow everyone
+ #[serde(default = "Default::default")]
+ pub disable_trusted_users: bool,
+ /// List of users who are allowed to build on less sandboxed platforms
+ pub trusted_users: Option<Vec<String>>,
+
+ /// If true, will create its own queue attached to the build job
+ /// exchange. This means that builders with this enabled will
+ /// trigger duplicate replies to the request for this
+ /// architecture.
+ ///
+ /// This should only be turned on for development.
+ pub build_all_jobs: Option<bool>,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct CheckoutConfig {
+ pub root: String,
+}
+
+impl Config {
+ pub fn whoami(&self) -> String {
+ format!("{}-{}", self.runner.identity, self.build.system.join(","))
+ }
+
+ pub fn acl(&self) -> acl::Acl {
+ let repos = self
+ .runner
+ .repos
+ .clone()
+ .expect("fetching config's runner.repos");
+
+ let trusted_users = if self.runner.disable_trusted_users {
+ None
+ } else {
+ Some(
+ self.runner
+ .trusted_users
+ .clone()
+ .expect("fetching config's runner.trusted_users"),
+ )
+ };
+
+ acl::Acl::new(repos, trusted_users)
+ }
+
+ pub fn github(&self) -> Github {
+ let token = std::fs::read_to_string(
+ self.github_app
+ .clone()
+ .expect("No GitHub app configured")
+ .oauth_client_secret_file,
+ )
+ .expect("Couldn't read from GitHub app token");
+ let token = token.trim();
+ Github::new(
+ "github.com/Project-Tick/tickborg",
+ Credentials::Client(
+ self.github_app
+ .clone()
+ .expect("No GitHub app configured")
+ .oauth_client_id,
+ token.to_owned(),
+ ),
+ )
+ .expect("Unable to create a github client instance")
+ }
+
+ pub fn github_app_vendingmachine(&self) -> GithubAppVendingMachine {
+ GithubAppVendingMachine {
+ conf: self.github_app.clone().unwrap(),
+ id_cache: HashMap::new(),
+ client_cache: HashMap::new(),
+ }
+ }
+
+ pub fn build_executor(&self) -> BuildExecutor {
+ if self.build.build_timeout_seconds < 300 {
+ error!(?self.build.build_timeout_seconds, "Please set build_timeout_seconds to at least 300");
+ panic!();
+ }
+
+ BuildExecutor::new(
+ self.build.build_timeout_seconds,
+ )
+ }
+}
+
+impl RabbitMqConfig {
+ pub fn as_uri(&self) -> Result<String, std::io::Error> {
+ let password = std::fs::read_to_string(&self.password_file).inspect_err(|_| {
+ error!(
+ "Unable to read RabbitMQ password file at {:?}",
+ self.password_file
+ );
+ })?;
+ let uri = format!(
+ "{}://{}:{}@{}/{}",
+ if self.ssl { "amqps" } else { "amqp" },
+ self.username,
+ password,
+ self.host,
+ self.virtualhost.clone().unwrap_or_else(|| "/".to_owned()),
+ );
+ Ok(uri)
+ }
+}
+
+pub fn load(filename: &Path) -> Config {
+ let mut file = File::open(filename).unwrap();
+ let mut contents = String::new();
+ file.read_to_string(&mut contents).unwrap();
+
+ let deserialized: Config = serde_json::from_str(&contents).unwrap();
+
+ deserialized
+}
+
+pub struct GithubAppVendingMachine {
+ conf: GithubAppConfig,
+ id_cache: HashMap<(String, String), Option<u64>>,
+ client_cache: HashMap<u64, Github>,
+}
+
+impl GithubAppVendingMachine {
+ fn useragent(&self) -> &'static str {
+ "github.com/Project-Tick/tickborg (app)"
+ }
+
+ fn jwt(&self) -> JWTCredentials {
+ let pem = rustls_pki_types::PrivatePkcs1KeyDer::from_pem_file(&self.conf.private_key)
+ .expect("Unable to read private key");
+ let private_key_der = pem.secret_pkcs1_der().to_vec();
+ JWTCredentials::new(self.conf.app_id, private_key_der)
+ .expect("Unable to create JWTCredentials")
+ }
+
+ async fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option<u64> {
+ let useragent = self.useragent();
+ let jwt = self.jwt();
+
+ let key = (owner.to_owned(), repo.to_owned());
+
+ match self.id_cache.entry(key) {
+ Entry::Occupied(entry) => *entry.get(),
+ Entry::Vacant(entry) => {
+ info!("Looking up install ID for {}/{}", owner, repo);
+
+ let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap();
+
+ let v = match lookup_gh.app().find_repo_installation(owner, repo).await {
+ Ok(install_id) => {
+ debug!("Received install ID {:?}", install_id);
+ Some(install_id.id)
+ }
+ Err(e) => {
+ warn!("Error during install ID lookup: {:?}", e);
+ None
+ }
+ };
+ *entry.insert(v)
+ }
+ }
+ }
+
+ pub async fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> {
+ let useragent = self.useragent();
+ let jwt = self.jwt();
+ let install_id = self.install_id_for_repo(owner, repo).await?;
+
+ Some(self.client_cache.entry(install_id).or_insert_with(|| {
+ Github::new(
+ useragent,
+ Credentials::InstallationToken(InstallationTokenGenerator::new(install_id, jwt)),
+ )
+ .expect("Unable to create a github client instance")
+ }))
+ }
+}
+
+// Copied from https://stackoverflow.com/a/43627388
+fn deserialize_one_or_many<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ struct StringOrVec(PhantomData<Vec<String>>);
+
+ impl<'de> de::Visitor<'de> for StringOrVec {
+ type Value = Vec<String>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("string or list of strings")
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(vec![value.to_owned()])
+ }
+
+ fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error>
+ where
+ S: de::SeqAccess<'de>,
+ {
+ serde::de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))
+ }
+ }
+
+ deserializer.deserialize_any(StringOrVec(PhantomData))
+}
diff --git a/ofborg/tickborg/src/easyamqp.rs b/ofborg/tickborg/src/easyamqp.rs
new file mode 100644
index 0000000000..2a84bb84ff
--- /dev/null
+++ b/ofborg/tickborg/src/easyamqp.rs
@@ -0,0 +1,287 @@
+pub struct ConsumeConfig {
+ /// Specifies the name of the queue to consume from.
+ pub queue: String,
+
+ /// Specifies the identifier for the consumer. The consumer tag is
+ /// local to a channel, so two clients can use the same consumer
+ /// tags. If this field is empty the server will generate a unique
+ /// tag.
+ ///
+ /// The client MUST NOT specify a tag that refers to an existing
+ /// consumer. Error code: not-allowed
+ pub consumer_tag: String,
+
+ /// If the no-local field is set the server will not send messages
+ /// to the connection that published them.
+ pub no_local: bool,
+
+ /// If this field is set the server does not expect
+ /// acknowledgements for messages. That is, when a message is
+ /// delivered to the client the server assumes the delivery will
+ /// succeed and immediately dequeues it. This functionality may
+ /// increase performance but at the cost of reliability. Messages
+ /// can get lost if a client dies before they are delivered to the
+ /// application.
+ pub no_ack: bool,
+
+ /// Request exclusive consumer access, meaning only this consumer
+ /// can access the queue.
+ ///
+ /// The client MAY NOT gain exclusive access to a queue that
+ /// already has active consumers. Error code: access-refused
+ pub exclusive: bool,
+
+ /// If set, the server will not respond to the method. The client
+ /// should not wait for a reply method. If the server could not
+ /// complete the method it will raise a channel or connection
+ /// exception.
+ pub no_wait: bool,
+}
+
+pub struct BindQueueConfig {
+ /// Specifies the name of the queue to bind.
+ ///
+ /// The client MUST either specify a queue name or have previously
+ /// declared a queue on the same channel Error code: not-found
+ ///
+ /// The client MUST NOT attempt to bind a queue that does not
+ /// exist. Error code: not-found
+ pub queue: String,
+
+ /// Name of the exchange to bind to.
+ ///
+ /// A client MUST NOT be allowed to bind a queue to a non-existent
+ /// exchange. Error code: not-found
+ ///
+ /// The server MUST accept a blank exchange name to mean the
+ /// default exchange.
+ pub exchange: String,
+
+ /// Specifies the routing key for the binding. The routing key is
+ /// used for routing messages depending on the exchange
+ /// configuration. Not all exchanges use a routing key - refer to
+ /// the specific exchange documentation. If the queue name is
+ /// empty, the server uses the last queue declared on the channel.
+ /// If the routing key is also empty, the server uses this queue
+ /// name for the routing key as well. If the queue name is
+ /// provided but the routing key is empty, the server does the
+ /// binding with that empty routing key. The meaning of empty
+ /// routing keys depends on the exchange implementation.
+ ///
+ /// If a message queue binds to a direct exchange using routing
+ /// key K and a publisher sends the exchange a message with
+ /// routing key R, then the message MUST be passed to the message
+ /// queue if K = R.
+ pub routing_key: Option<String>,
+
+ /// If set, the server will not respond to the method. The client
+ /// should not wait for a reply method. If the server could not
+ /// complete the method it will raise a channel or connection
+ /// exception.
+ pub no_wait: bool,
+}
+
+pub enum ExchangeType {
+ Topic,
+ Headers,
+ Fanout,
+ Direct,
+ Custom(String),
+}
+
+impl From<ExchangeType> for String {
+ fn from(exchange_type: ExchangeType) -> String {
+ match exchange_type {
+ ExchangeType::Topic => "topic".to_owned(),
+ ExchangeType::Headers => "headers".to_owned(),
+ ExchangeType::Fanout => "fanout".to_owned(),
+ ExchangeType::Direct => "direct".to_owned(),
+ ExchangeType::Custom(x) => x,
+ }
+ }
+}
+
+pub struct ExchangeConfig {
+ /// Exchange names starting with "amq." are reserved for
+ /// pre-declared and standardised exchanges. The client MAY
+ /// declare an exchange starting with "amq." if the passive option
+ /// is set, or the exchange already exists. Error code:
+ /// access-refused
+ ///
+ /// The exchange name consists of a non-empty sequence of these
+ /// characters: letters, digits, hyphen, underscore, period, or
+ /// colon. Error code: precondition-failed
+ pub exchange: String,
+
+ /// Each exchange belongs to one of a set of exchange types
+ /// implemented by the server. The exchange types define the
+ /// functionality of the exchange - i.e. how messages are routed
+ /// through it. It is not valid or meaningful to attempt to change
+ /// the type of an existing exchange.
+ ///
+ /// Exchanges cannot be redeclared with different types. The
+ /// client MUST not attempt to redeclare an existing exchange with
+ /// a different type than used in the original Exchange.Declare
+ /// method. Error code: not-allowed
+ ///
+ /// The client MUST NOT attempt to declare an exchange with a type
+ /// that the server does not support. Error code: command-invalid
+ pub exchange_type: ExchangeType,
+
+ /// If set, the server will reply with Declare-Ok if the exchange
+ /// already exists with the same name, and raise an error if not.
+ /// The client can use this to check whether an exchange exists
+ /// without modifying the server state. When set, all other method
+ /// fields except name and no-wait are ignored. A declare with
+ /// both passive and no-wait has no effect. Arguments are compared
+ /// for semantic equivalence.
+ ///
+ /// If set, and the exchange does not already exist, the server
+ /// MUST raise a channel exception with reply code 404 (not
+ /// found).
+ ///
+ /// If not set and the exchange exists, the server MUST check that
+ /// the existing exchange has the same values for type, durable,
+ /// and arguments fields. The server MUST respond with Declare-Ok
+ /// if the requested exchange matches these fields, and MUST raise
+ /// a channel exception if not.
+ pub passive: bool,
+
+ /// If set when creating a new exchange, the exchange will be
+ /// marked as durable. Durable exchanges remain active when a
+ /// server restarts. Non-durable exchanges (transient exchanges)
+ /// are purged if/when a server restarts.
+ ///
+ /// The server MUST support both durable and transient exchanges.
+ pub durable: bool,
+
+ /// If set, the exchange is deleted when all queues have finished
+ /// using it.
+ ///
+ /// The server SHOULD allow for a reasonable delay between the
+ /// point when it determines that an exchange is not being used
+ /// (or no longer used), and the point when it deletes the
+ /// exchange. At the least it must allow a client to create an
+ /// exchange and then bind a queue to it, with a small but
+ /// non-zero delay between these two actions.
+ ///
+ /// The server MUST ignore the auto-delete field if the exchange
+ /// already exists.
+ pub auto_delete: bool,
+
+ /// If set, the exchange may not be used directly by publishers,
+ /// but only when bound to other exchanges. Internal exchanges are
+ /// used to construct wiring that is not visible to applications.
+ pub internal: bool,
+
+ /// If set, the server will not respond to the method. The client
+ /// should not wait for a reply method. If the server could not
+ /// complete the method it will raise a channel or connection
+ /// exception.
+ pub no_wait: bool,
+}
+
+pub struct QueueConfig {
+ /// The queue name MAY be empty, in which case the server MUST
+ /// create a new queue with a unique generated name and return
+ /// this to the client in the Declare-Ok method.
+ ///
+ /// Queue names starting with "amq." are reserved for pre-declared
+ /// and standardised queues. The client MAY declare a queue
+ /// starting with "amq." if the passive option is set, or the
+ /// queue already exists. Error code: access-refused
+ ///
+ /// The queue name can be empty, or a sequence of these
+ /// characters: letters, digits, hyphen, underscore, period, or
+ /// colon. Error code: precondition-failed
+ pub queue: String,
+
+ /// If set, the server will reply with Declare-Ok if the queue
+ /// already exists with the same name, and raise an error if not.
+ /// The client can use this to check whether a queue exists
+ /// without modifying the server state. When set, all other
+ /// method fields except name and no-wait are ignored. A declare
+ /// with both passive and no-wait has no effect. Arguments are
+ /// compared for semantic equivalence.
+ ///
+ /// The client MAY ask the server to assert that a queue exists
+ /// without creating the queue if not. If the queue does not
+ /// exist, the server treats this as a failure. Error code:
+ /// not-found
+ ///
+ /// If not set and the queue exists, the server MUST check that
+ /// the existing queue has the same values for durable, exclusive,
+ /// auto-delete, and arguments fields. The server MUST respond
+ /// with Declare-Ok if the requested queue matches these fields,
+ /// and MUST raise a channel exception if not.
+ pub passive: bool,
+
+ /// If set when creating a new queue, the queue will be marked as
+ /// durable. Durable queues remain active when a server restarts.
+ /// Non-durable queues (transient queues) are purged if/when a
+ /// server restarts. Note that durable queues do not necessarily
+ /// hold persistent messages, although it does not make sense to
+ /// send persistent messages to a transient queue.
+ ///
+ /// The server MUST recreate the durable queue after a restart.
+ ///
+ /// The server MUST support both durable and transient queues.
+ pub durable: bool,
+
+ /// Exclusive queues may only be accessed by the current
+ /// connection, and are deleted when that connection closes.
+ /// Passive declaration of an exclusive queue by other connections
+ /// are not allowed.
+ ///
+ /// The server MUST support both exclusive (private) and
+ /// non-exclusive (shared) queues.
+ /// The client MAY NOT attempt to use a queue that was declared as
+ /// exclusive by another still-open connection. Error code:
+ /// resource-locked
+ pub exclusive: bool,
+
+ /// If set, the queue is deleted when all consumers have finished
+ /// using it. The last consumer can be cancelled either explicitly
+ /// or because its channel is closed. If there was no consumer
+ /// ever on the queue, it won't be deleted. Applications can
+ /// explicitly delete auto-delete queues using the Delete method
+ /// as normal.
+ ///
+ /// The server MUST ignore the auto-delete field if the queue
+ /// already exists.
+ pub auto_delete: bool,
+
+ /// If set, the server will not respond to the method. The client
+ /// should not wait for a reply method. If the server could not
+ /// complete the method it will raise a channel or connection
+ /// exception.
+ pub no_wait: bool,
+}
+
+pub trait ChannelExt {
+ type Error;
+
+ fn declare_exchange(
+ &mut self,
+ config: ExchangeConfig,
+ ) -> impl std::future::Future<Output = Result<(), Self::Error>>;
+ fn declare_queue(
+ &mut self,
+ config: QueueConfig,
+ ) -> impl std::future::Future<Output = Result<(), Self::Error>>;
+ fn bind_queue(
+ &mut self,
+ config: BindQueueConfig,
+ ) -> impl std::future::Future<Output = Result<(), Self::Error>>;
+}
+
+pub trait ConsumerExt<'a, C> {
+ type Error;
+ type Handle;
+
+ fn consume(
+ self,
+ callback: C,
+ config: ConsumeConfig,
+ ) -> impl std::future::Future<Output = Result<Self::Handle, Self::Error>>;
+}
diff --git a/ofborg/tickborg/src/easylapin.rs b/ofborg/tickborg/src/easylapin.rs
new file mode 100644
index 0000000000..56d90cad15
--- /dev/null
+++ b/ofborg/tickborg/src/easylapin.rs
@@ -0,0 +1,251 @@
+use std::pin::Pin;
+use std::sync::Arc;
+
+use crate::config::RabbitMqConfig;
+use crate::easyamqp::{
+ BindQueueConfig, ChannelExt, ConsumeConfig, ConsumerExt, ExchangeConfig, ExchangeType,
+ QueueConfig,
+};
+use crate::notifyworker::{NotificationReceiver, SimpleNotifyWorker};
+use crate::tickborg;
+use crate::worker::{Action, SimpleWorker};
+
+use lapin::message::Delivery;
+use lapin::options::{
+ BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions, BasicQosOptions,
+ ExchangeDeclareOptions, QueueBindOptions, QueueDeclareOptions,
+};
+use lapin::types::FieldTable;
+use lapin::{BasicProperties, Channel, Connection, ConnectionProperties, ExchangeKind};
+use tokio_stream::StreamExt;
+use tracing::{debug, trace};
+
+pub async fn from_config(cfg: &RabbitMqConfig) -> Result<Connection, lapin::Error> {
+ let opts = ConnectionProperties::default()
+ .with_client_property("tickborg_version".into(), tickborg::VERSION.into());
+ Connection::connect(&cfg.as_uri()?, opts).await
+}
+
+impl ChannelExt for Channel {
+ type Error = lapin::Error;
+
+ async fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error> {
+ let opts = ExchangeDeclareOptions {
+ passive: config.passive,
+ durable: config.durable,
+ auto_delete: config.auto_delete,
+ internal: config.internal,
+ nowait: config.no_wait,
+ };
+
+ let kind = match config.exchange_type {
+ ExchangeType::Topic => ExchangeKind::Topic,
+ ExchangeType::Fanout => ExchangeKind::Fanout,
+ _ => panic!("exchange kind"),
+ };
+ self.exchange_declare(config.exchange.into(), kind, opts, FieldTable::default())
+ .await?;
+ Ok(())
+ }
+
+ async fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error> {
+ let opts = QueueDeclareOptions {
+ passive: config.passive,
+ durable: config.durable,
+ exclusive: config.exclusive,
+ auto_delete: config.auto_delete,
+ nowait: config.no_wait,
+ };
+
+ self.queue_declare(config.queue.into(), opts, FieldTable::default())
+ .await?;
+ Ok(())
+ }
+
+ async fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error> {
+ let opts = QueueBindOptions {
+ nowait: config.no_wait,
+ };
+
+ self.queue_bind(
+ config.queue.into(),
+ config.exchange.into(),
+ config.routing_key.unwrap_or_else(|| "".into()).into(),
+ opts,
+ FieldTable::default(),
+ )
+ .await?;
+ Ok(())
+ }
+}
+
+impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for Channel {
+ type Error = lapin::Error;
+ type Handle = Pin<Box<dyn Future<Output = ()> + 'a>>;
+
+ async fn consume(
+ self,
+ mut worker: W,
+ config: ConsumeConfig,
+ ) -> Result<Self::Handle, Self::Error> {
+ let mut consumer = self
+ .basic_consume(
+ config.queue.into(),
+ config.consumer_tag.into(),
+ BasicConsumeOptions::default(),
+ FieldTable::default(),
+ )
+ .await?;
+ Ok(Box::pin(async move {
+ while let Some(Ok(deliver)) = consumer.next().await {
+ debug!(?deliver.delivery_tag, "consumed delivery");
+ let content_type = deliver.properties.content_type();
+ let job = worker
+ .msg_to_job(
+ deliver.routing_key.as_str(),
+ &content_type.as_ref().map(|s| s.to_string()),
+ &deliver.data,
+ )
+ .await
+ .expect("worker unexpected message consumed");
+
+ for action in worker.consumer(&job).await {
+ action_deliver(&self, &deliver, action)
+ .await
+ .expect("action deliver failure");
+ }
+ debug!(?deliver.delivery_tag, "done");
+ }
+ }))
+ }
+}
+
+/// Same as a regular channel, but without prefetching,
+/// used for services with multiple instances.
+pub struct WorkerChannel(pub Channel);
+
+impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for WorkerChannel {
+ type Error = lapin::Error;
+ type Handle = Pin<Box<dyn Future<Output = ()> + 'a>>;
+
+ async fn consume(self, worker: W, config: ConsumeConfig) -> Result<Self::Handle, Self::Error> {
+ self.0.basic_qos(1, BasicQosOptions::default()).await?;
+ self.0.consume(worker, config).await
+ }
+}
+
+pub struct ChannelNotificationReceiver {
+ channel: lapin::Channel,
+ deliver: Delivery,
+}
+
+impl ChannelNotificationReceiver {
+ pub fn new(channel: lapin::Channel, deliver: Delivery) -> Self {
+ ChannelNotificationReceiver { channel, deliver }
+ }
+}
+
+#[async_trait::async_trait]
+impl NotificationReceiver for ChannelNotificationReceiver {
+ async fn tell(&self, action: Action) {
+ action_deliver(&self.channel, &self.deliver, action)
+ .await
+ .expect("action deliver failure");
+ }
+}
+
+// FIXME the consumer trait for SimpleWorker and SimpleNotifyWorker conflict,
+// but one could probably be implemented in terms of the other instead.
+pub struct NotifyChannel(pub Channel);
+
+impl<'a, W: SimpleNotifyWorker + 'a + Send> ConsumerExt<'a, W> for NotifyChannel {
+ type Error = lapin::Error;
+ type Handle = Pin<Box<dyn Future<Output = ()> + 'a + Send>>;
+
+ async fn consume(self, worker: W, config: ConsumeConfig) -> Result<Self::Handle, Self::Error> {
+ self.0.basic_qos(1, BasicQosOptions::default()).await?;
+
+ let mut consumer = self
+ .0
+ .basic_consume(
+ config.queue.into(),
+ config.consumer_tag.into(),
+ BasicConsumeOptions::default(),
+ FieldTable::default(),
+ )
+ .await?;
+ let chan = self.0;
+ Ok(Box::pin(async move {
+ while let Some(Ok(deliver)) = consumer.next().await {
+ let delivery_tag = deliver.delivery_tag;
+ debug!(?delivery_tag, "consumed delivery");
+ let receiver = ChannelNotificationReceiver {
+ channel: chan.clone(),
+ deliver,
+ };
+
+ let content_type = receiver.deliver.properties.content_type();
+ let job = worker
+ .msg_to_job(
+ receiver.deliver.routing_key.as_str(),
+ &content_type.as_ref().map(|s| s.to_string()),
+ &receiver.deliver.data,
+ )
+ .expect("worker unexpected message consumed");
+
+ worker.consumer(job, Arc::new(receiver)).await;
+ debug!(?delivery_tag, "done");
+ }
+ }))
+ }
+}
+
+async fn action_deliver(
+ chan: &Channel,
+ deliver: &Delivery,
+ action: Action,
+) -> Result<(), lapin::Error> {
+ match action {
+ Action::Ack => {
+ debug!(?deliver.delivery_tag, "action ack");
+ chan.basic_ack(deliver.delivery_tag, BasicAckOptions::default())
+ .await
+ }
+ Action::NackRequeue => {
+ debug!(?deliver.delivery_tag, "action nack requeue");
+ let opts = BasicNackOptions {
+ requeue: true,
+ ..Default::default()
+ };
+ chan.basic_nack(deliver.delivery_tag, opts).await
+ }
+ Action::NackDump => {
+ debug!(?deliver.delivery_tag, "action nack dump");
+ chan.basic_nack(deliver.delivery_tag, BasicNackOptions::default())
+ .await
+ }
+ Action::Publish(msg) => {
+ let exch = msg.exchange.as_deref().unwrap_or("");
+ let key = msg.routing_key.as_deref().unwrap_or("");
+ trace!(?exch, ?key, "action publish");
+
+ let mut props = BasicProperties::default().with_delivery_mode(2); // persistent.
+
+ if let Some(s) = msg.content_type.as_deref() {
+ props = props.with_content_type(s.into());
+ }
+
+ let _confirmaton = chan
+ .basic_publish(
+ exch.into(),
+ key.into(),
+ BasicPublishOptions::default(),
+ &msg.content,
+ props,
+ )
+ .await?
+ .await?;
+ Ok(())
+ }
+ }
+}
diff --git a/ofborg/tickborg/src/evalchecker.rs b/ofborg/tickborg/src/evalchecker.rs
new file mode 100644
index 0000000000..ac1b4f8d39
--- /dev/null
+++ b/ofborg/tickborg/src/evalchecker.rs
@@ -0,0 +1,62 @@
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+use std::process::Command;
+
+/// A generic check that can be run against a checkout
+pub struct EvalChecker {
+ name: String,
+ command: String,
+ args: Vec<String>,
+}
+
+impl EvalChecker {
+ pub fn new(name: &str, command: &str, args: Vec<String>) -> EvalChecker {
+ EvalChecker {
+ name: name.to_owned(),
+ command: command.to_owned(),
+ args,
+ }
+ }
+
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ pub fn execute(&self, path: &Path) -> Result<File, File> {
+ let output = Command::new(&self.command)
+ .args(&self.args)
+ .current_dir(path)
+ .output();
+
+ let tmp = tempfile::NamedTempFile::new().expect("Failed to create temp file");
+ let tmp_path = tmp.into_temp_path().to_path_buf();
+
+ match output {
+ Ok(result) => {
+ let mut f = File::create(&tmp_path).expect("Failed to create output file");
+ f.write_all(&result.stdout).ok();
+ f.write_all(&result.stderr).ok();
+ drop(f);
+ let file = File::open(&tmp_path).expect("Failed to open output file");
+ if result.status.success() {
+ Ok(file)
+ } else {
+ Err(file)
+ }
+ }
+ Err(e) => {
+ let mut f = File::create(&tmp_path).expect("Failed to create output file");
+ write!(f, "Failed to execute {}: {}", self.command, e).ok();
+ drop(f);
+ Err(File::open(&tmp_path).expect("Failed to open output file"))
+ }
+ }
+ }
+
+ pub fn cli_cmd(&self) -> String {
+ let mut cli = vec![self.command.clone()];
+ cli.append(&mut self.args.clone());
+ cli.join(" ")
+ }
+}
diff --git a/ofborg/tickborg/src/files.rs b/ofborg/tickborg/src/files.rs
new file mode 100644
index 0000000000..9e329d83e5
--- /dev/null
+++ b/ofborg/tickborg/src/files.rs
@@ -0,0 +1,8 @@
+use std::fs::File;
+use std::io::Read;
+
+pub fn file_to_str(f: &mut File) -> String {
+ let mut buffer = Vec::new();
+ f.read_to_end(&mut buffer).expect("Reading eval output");
+ String::from(String::from_utf8_lossy(&buffer))
+}
diff --git a/ofborg/tickborg/src/ghevent/common.rs b/ofborg/tickborg/src/ghevent/common.rs
new file mode 100644
index 0000000000..2079280b9c
--- /dev/null
+++ b/ofborg/tickborg/src/ghevent/common.rs
@@ -0,0 +1,31 @@
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct Comment {
+ pub body: String,
+ pub user: User,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct User {
+ pub login: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct Repository {
+ pub owner: User,
+ pub name: String,
+ pub full_name: String,
+ pub clone_url: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct Issue {
+ pub number: u64,
+}
+
+/// A generic webhook that we received with minimal verification, only for handling in the GitHub
+/// webhook receiver.
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct GenericWebhook {
+ /// The repository the event originated
+ pub repository: Repository,
+}
diff --git a/ofborg/tickborg/src/ghevent/issuecomment.rs b/ofborg/tickborg/src/ghevent/issuecomment.rs
new file mode 100644
index 0000000000..32fe136722
--- /dev/null
+++ b/ofborg/tickborg/src/ghevent/issuecomment.rs
@@ -0,0 +1,19 @@
+use crate::ghevent::{Comment, Issue, Repository};
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct IssueComment {
+ pub action: IssueCommentAction,
+ pub comment: Comment,
+ pub repository: Repository,
+ pub issue: Issue,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum IssueCommentAction {
+ Created,
+ Pinned,
+ Unpinned,
+ Edited,
+ Deleted,
+}
diff --git a/ofborg/tickborg/src/ghevent/mod.rs b/ofborg/tickborg/src/ghevent/mod.rs
new file mode 100644
index 0000000000..243758800a
--- /dev/null
+++ b/ofborg/tickborg/src/ghevent/mod.rs
@@ -0,0 +1,9 @@
+mod common;
+mod issuecomment;
+mod pullrequestevent;
+
+pub use self::common::{Comment, GenericWebhook, Issue, Repository, User};
+pub use self::issuecomment::{IssueComment, IssueCommentAction};
+pub use self::pullrequestevent::{
+ PullRequest, PullRequestAction, PullRequestEvent, PullRequestState,
+};
diff --git a/ofborg/tickborg/src/ghevent/pullrequestevent.rs b/ofborg/tickborg/src/ghevent/pullrequestevent.rs
new file mode 100644
index 0000000000..3f25201fe9
--- /dev/null
+++ b/ofborg/tickborg/src/ghevent/pullrequestevent.rs
@@ -0,0 +1,81 @@
+use crate::ghevent::Repository;
+
+#[derive(serde::Serialize, serde::Deserialize)]
+pub struct PullRequestEvent {
+ pub action: PullRequestAction,
+ pub number: u64,
+ pub repository: Repository,
+ pub pull_request: PullRequest,
+ pub changes: Option<PullRequestChanges>,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct PullRequestChanges {
+ pub base: Option<BaseChange>,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct BaseChange {
+ #[serde(rename = "ref")]
+ pub git_ref: ChangeWas,
+ pub sha: ChangeWas,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq)]
+pub struct ChangeWas {
+ pub from: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum PullRequestState {
+ Open,
+ Closed,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum PullRequestAction {
+ Edited,
+ Opened,
+ Reopened,
+ Synchronize,
+ #[serde(other)]
+ Unknown,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct PullRequestRef {
+ #[serde(rename = "ref")]
+ pub git_ref: String,
+ pub sha: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct PullRequest {
+ pub state: PullRequestState,
+ pub base: PullRequestRef,
+ pub head: PullRequestRef,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use serde_json;
+
+ #[test]
+ fn test_parse_changed_base() {
+ let data = include_str!("../../test-srcs/events/pr-changed-base.json");
+
+ let pr: PullRequestEvent = serde_json::from_str(data).expect("Should properly deserialize");
+ assert_eq!(pr.action, PullRequestAction::Edited);
+ }
+
+ #[test]
+ fn test_parse_unknown_action() {
+ let data = include_str!("../../test-srcs/events/pr-converted-to-draft.json");
+
+ let pr: PullRequestEvent = serde_json::from_str(data).expect("Should properly deserialize");
+ assert_eq!(pr.action, PullRequestAction::Unknown);
+ }
+}
diff --git a/ofborg/tickborg/src/lib.rs b/ofborg/tickborg/src/lib.rs
new file mode 100644
index 0000000000..313ff34ebd
--- /dev/null
+++ b/ofborg/tickborg/src/lib.rs
@@ -0,0 +1,103 @@
+#![recursion_limit = "512"]
+// Replacing .map(|arch| arch.to_string())
+// with .map(systems::System::to_string)
+//
+// seems much less clear and I just don't like it :)
+#![allow(clippy::redundant_closure)]
+
+use std::env;
+
+use tracing_subscriber::EnvFilter;
+use tracing_subscriber::prelude::*;
+
+pub mod acl;
+pub mod asynccmd;
+pub mod buildtool;
+pub mod checkout;
+pub mod clone;
+pub mod commentparser;
+pub mod commitstatus;
+pub mod config;
+pub mod easyamqp;
+pub mod easylapin;
+pub mod evalchecker;
+pub mod files;
+pub mod ghevent;
+pub mod locks;
+pub mod message;
+pub mod notifyworker;
+pub mod stats;
+pub mod systems;
+pub mod tagger;
+pub mod tasks;
+pub mod test_scratch;
+pub mod worker;
+pub mod writetoline;
+
+pub mod tickborg {
+ pub use crate::acl;
+ pub use crate::asynccmd;
+ pub use crate::buildtool;
+ pub use crate::checkout;
+ pub use crate::clone;
+ pub use crate::commentparser;
+ pub use crate::commitstatus;
+ pub use crate::config;
+ pub use crate::easyamqp;
+ pub use crate::evalchecker;
+ pub use crate::files;
+ pub use crate::ghevent;
+ pub use crate::locks;
+ pub use crate::message;
+ pub use crate::notifyworker;
+ pub use crate::stats;
+ pub use crate::systems;
+ pub use crate::tagger;
+ pub use crate::tasks;
+ pub use crate::test_scratch;
+ pub use crate::worker;
+ pub use crate::writetoline;
+
+ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
+
+ pub fn partition_result<A, B>(results: Vec<Result<A, B>>) -> (Vec<A>, Vec<B>) {
+ let mut ok = Vec::new();
+ let mut err = Vec::new();
+ for result in results.into_iter() {
+ match result {
+ Ok(x) => {
+ ok.push(x);
+ }
+ Err(x) => {
+ err.push(x);
+ }
+ }
+ }
+
+ (ok, err)
+ }
+}
+
+pub fn setup_log() {
+ let filter_layer = EnvFilter::try_from_default_env()
+ .or_else(|_| EnvFilter::try_new("info"))
+ .unwrap();
+
+ let log_json = env::var("RUST_LOG_JSON").is_ok_and(|s| s == "1");
+
+ if log_json {
+ let fmt_layer = tracing_subscriber::fmt::layer().json();
+ tracing_subscriber::registry()
+ .with(filter_layer)
+ .with(fmt_layer)
+ .init();
+ } else {
+ let fmt_layer = tracing_subscriber::fmt::layer();
+ tracing_subscriber::registry()
+ .with(filter_layer)
+ .with(fmt_layer)
+ .init();
+ }
+
+ tracing::info!("Logging configured");
+}
diff --git a/ofborg/tickborg/src/locks.rs b/ofborg/tickborg/src/locks.rs
new file mode 100644
index 0000000000..d1d2ee4788
--- /dev/null
+++ b/ofborg/tickborg/src/locks.rs
@@ -0,0 +1,25 @@
+use fs2::FileExt;
+
+use std::fs;
+use std::io::Error;
+use std::path::PathBuf;
+
+pub trait Lockable {
+ fn lock_path(&self) -> PathBuf;
+
+ fn lock(&self) -> Result<Lock, Error> {
+ let lock = fs::File::create(self.lock_path())?;
+ lock.lock_exclusive()?;
+ Ok(Lock { lock: Some(lock) })
+ }
+}
+
+pub struct Lock {
+ lock: Option<fs::File>,
+}
+
+impl Lock {
+ pub fn unlock(&mut self) {
+ self.lock = None
+ }
+}
diff --git a/ofborg/tickborg/src/maintainers.nix b/ofborg/tickborg/src/maintainers.nix
new file mode 100644
index 0000000000..85dc8d85b4
--- /dev/null
+++ b/ofborg/tickborg/src/maintainers.nix
@@ -0,0 +1,118 @@
+{ changedattrsjson, changedpathsjson }:
+let
+ pkgs = import ./. {};
+
+ changedattrs = builtins.fromJSON (builtins.readFile changedattrsjson);
+ changedpaths = builtins.fromJSON (builtins.readFile changedpathsjson);
+
+ anyMatchingFile = filename:
+ let
+ matching = builtins.filter
+ (changed: pkgs.lib.strings.hasSuffix changed filename)
+ changedpaths;
+ in (builtins.length matching) > 0;
+
+ anyMatchingFiles = files:
+ (builtins.length (builtins.filter anyMatchingFile files)) > 0;
+
+ enrichedAttrs = builtins.map
+ (path: {
+ path = path;
+ name = builtins.concatStringsSep "." path;
+ })
+ changedattrs;
+
+ validPackageAttributes = builtins.filter
+ (pkg:
+ if (pkgs.lib.attrsets.hasAttrByPath pkg.path pkgs)
+ then (if (builtins.tryEval (pkgs.lib.attrsets.attrByPath pkg.path null pkgs)).success
+ then true
+ else builtins.trace "Failed to access ${pkg.name} even though it exists" false)
+ else builtins.trace "Failed to locate ${pkg.name}." false
+ )
+ enrichedAttrs;
+
+ attrsWithPackages = builtins.map
+ (pkg: pkg // { package = pkgs.lib.attrsets.attrByPath pkg.path null pkgs; })
+ validPackageAttributes;
+
+ attrsWithMaintainers = builtins.map
+ (pkg: pkg // { maintainers = (pkg.package.meta or {}).maintainers or []; })
+ attrsWithPackages;
+
+ attrsWeCanPing = builtins.filter
+ (pkg: if (builtins.length pkg.maintainers) > 0
+ then true
+ else builtins.trace "Package has no maintainers: ${pkg.name}" false
+ )
+ attrsWithMaintainers;
+
+ relevantFilenames = drv:
+ (pkgs.lib.lists.unique
+ (builtins.map
+ (pos: pos.file)
+ (builtins.filter (x: x != null)
+ [
+ (builtins.unsafeGetAttrPos "maintainers" (drv.meta or {}))
+ (builtins.unsafeGetAttrPos "src" drv)
+ # broken because name is always set by stdenv:
+ # # A hack to make `nix-env -qa` and `nix search` ignore broken packages.
+ # # TODO(@oxij): remove this assert when something like NixOS/nix#1771 gets merged into nix.
+ # name = assert validity.handled; name + lib.optionalString
+ #(builtins.unsafeGetAttrPos "name" drv)
+ (builtins.unsafeGetAttrPos "pname" drv)
+ (builtins.unsafeGetAttrPos "version" drv)
+
+ # Use ".meta.position" for cases when most of the package is
+ # defined in a "common" section and the only place where
+ # reference to the file with a derivation the "pos"
+ # attribute.
+ #
+ # ".meta.position" has the following form:
+ # "pkgs/tools/package-management/nix/default.nix:155"
+ # We transform it to the following:
+ # { file = "pkgs/tools/package-management/nix/default.nix"; }
+ { file = pkgs.lib.head (pkgs.lib.splitString ":" (drv.meta.position or "")); }
+ ]
+ )));
+
+ attrsWithFilenames = builtins.map
+ (pkg: pkg // { filenames = relevantFilenames pkg.package; })
+ attrsWithMaintainers;
+
+ attrsWithModifiedFiles = builtins.filter
+ (pkg: anyMatchingFiles pkg.filenames)
+ attrsWithFilenames;
+
+ listToPing = pkgs.lib.lists.flatten
+ (builtins.map
+ (pkg:
+ builtins.map (maintainer: {
+ handle = pkgs.lib.toLower maintainer.github;
+ packageName = pkg.name;
+ dueToFiles = pkg.filenames;
+ })
+ pkg.maintainers
+ )
+ attrsWithModifiedFiles);
+
+ byMaintainer = pkgs.lib.lists.foldr
+ (ping: collector: collector // { "${ping.handle}" = [ { inherit (ping) packageName dueToFiles; } ] ++ (collector."${ping.handle}" or []); })
+ {}
+ listToPing;
+
+ textForPackages = packages:
+ pkgs.lib.strings.concatStringsSep ", " (
+ builtins.map (pkg: pkg.packageName)
+ packages);
+
+ textPerMaintainer = pkgs.lib.attrsets.mapAttrs
+ (maintainer: packages: "- @${maintainer} for ${textForPackages packages}")
+ byMaintainer;
+
+ packagesPerMaintainer = pkgs.lib.attrsets.mapAttrs
+ (maintainer: packages:
+ builtins.map (pkg: pkg.packageName)
+ packages)
+ byMaintainer;
+in packagesPerMaintainer
diff --git a/ofborg/tickborg/src/maintainers.rs b/ofborg/tickborg/src/maintainers.rs
new file mode 100644
index 0000000000..ff1bec0cee
--- /dev/null
+++ b/ofborg/tickborg/src/maintainers.rs
@@ -0,0 +1,211 @@
+use crate::nix::Nix;
+
+use tempfile::NamedTempFile;
+
+use std::collections::{HashMap, HashSet};
+use std::io::Write;
+use std::path::Path;
+
+#[derive(serde::Deserialize, Debug, Eq, PartialEq)]
+pub struct ImpactedMaintainers(HashMap<Maintainer, Vec<Package>>);
+pub struct MaintainersByPackage(pub HashMap<Package, HashSet<Maintainer>>);
+
+#[derive(serde::Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct Maintainer(String);
+impl<'a> From<&'a str> for Maintainer {
+ fn from(name: &'a str) -> Maintainer {
+ Maintainer(name.to_ascii_lowercase())
+ }
+}
+#[derive(serde::Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct Package(String);
+impl<'a> From<&'a str> for Package {
+ fn from(name: &'a str) -> Package {
+ Package(name.to_owned())
+ }
+}
+
+#[derive(Debug)]
+pub enum CalculationError {
+ DeserializeError(serde_json::Error),
+ Io(std::io::Error),
+ Utf8(std::string::FromUtf8Error),
+}
+impl From<serde_json::Error> for CalculationError {
+ fn from(e: serde_json::Error) -> CalculationError {
+ CalculationError::DeserializeError(e)
+ }
+}
+impl From<std::io::Error> for CalculationError {
+ fn from(e: std::io::Error) -> CalculationError {
+ CalculationError::Io(e)
+ }
+}
+impl From<std::string::FromUtf8Error> for CalculationError {
+ fn from(e: std::string::FromUtf8Error) -> CalculationError {
+ CalculationError::Utf8(e)
+ }
+}
+
+impl ImpactedMaintainers {
+ pub fn calculate(
+ nix: &Nix,
+ checkout: &Path,
+ paths: &[String],
+ attributes: &[Vec<&str>],
+ ) -> Result<ImpactedMaintainers, CalculationError> {
+ let mut path_file = NamedTempFile::new()?;
+ let pathstr = serde_json::to_string(&paths)?;
+ write!(path_file, "{pathstr}")?;
+
+ let mut attr_file = NamedTempFile::new()?;
+ let attrstr = serde_json::to_string(&attributes)?;
+ write!(attr_file, "{attrstr}")?;
+
+ let mut argstrs: HashMap<&str, &str> = HashMap::new();
+ argstrs.insert("changedattrsjson", attr_file.path().to_str().unwrap());
+ argstrs.insert("changedpathsjson", path_file.path().to_str().unwrap());
+
+ let mut cmd = nix.safely_evaluate_expr_cmd(
+ checkout,
+ include_str!("./maintainers.nix"),
+ argstrs,
+ &[path_file.path(), attr_file.path()],
+ );
+
+ let ret = cmd.output()?;
+
+ Ok(serde_json::from_str(&String::from_utf8(ret.stdout)?)?)
+ }
+
+ pub fn maintainers(&self) -> Vec<&str> {
+ self.0
+ .keys()
+ .map(|Maintainer(name)| name.as_str())
+ .collect()
+ }
+
+ pub fn maintainers_by_package(&self) -> MaintainersByPackage {
+ let mut bypkg = MaintainersByPackage(HashMap::new());
+
+ for (maintainer, packages) in self.0.iter() {
+ for package in packages.iter() {
+ bypkg
+ .0
+ .entry(package.clone())
+ .or_default()
+ .insert(maintainer.clone());
+ }
+ }
+
+ bypkg
+ }
+}
+
+impl std::fmt::Display for ImpactedMaintainers {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ let mut is_first = true;
+ for (Maintainer(maintainer), packages) in &self.0 {
+ if is_first {
+ is_first = false;
+ } else {
+ f.write_str("\n")?;
+ }
+
+ f.write_fmt(format_args!("{maintainer}"))?;
+
+ let (first, rest) = {
+ let mut packages = packages.iter();
+ (packages.next(), packages)
+ };
+ if let Some(Package(package)) = first {
+ f.write_fmt(format_args!(": {package}"))?;
+
+ for Package(package) in rest {
+ f.write_fmt(format_args!(", {package}"))?;
+ }
+ }
+ }
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::checkout::cached_cloner;
+ use crate::clone::GitClonable;
+ use crate::test_scratch::TestScratch;
+ use std::env;
+ use std::ffi::OsStr;
+ use std::path::{Path, PathBuf};
+ use std::process::Command;
+ use std::process::Stdio;
+
+ #[cfg(target_os = "linux")]
+ const SYSTEM: &str = "x86_64-linux";
+ #[cfg(target_os = "macos")]
+ const SYSTEM: &str = "x86_64-darwin";
+
+ fn tpath(component: &str) -> PathBuf {
+ Path::new(env!("CARGO_MANIFEST_DIR")).join(component)
+ }
+
+ fn make_pr_repo(bare: &Path, co: &Path) -> String {
+ let output = Command::new("bash")
+ .current_dir(tpath("./test-srcs"))
+ .arg("./make-maintainer-pr.sh")
+ .arg(bare)
+ .arg(co)
+ .stdout(Stdio::piped())
+ .output()
+ .expect("building the test PR failed");
+
+ let stderr =
+ String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {err}"));
+ println!("{stderr}");
+
+ let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
+ hash.trim().to_owned()
+ }
+
+ #[test]
+ fn example() {
+ let workingdir = TestScratch::new_dir("test-maintainers-example");
+
+ let bare = TestScratch::new_dir("test-maintainers-example-bare");
+ let mk_co = TestScratch::new_dir("test-maintainers-example-co");
+ let hash = make_pr_repo(&bare.path(), &mk_co.path());
+
+ let attributes = vec![vec!["foo", "bar", "packageA"]];
+
+ let cloner = cached_cloner(&workingdir.path());
+ let project = cloner.project("maintainer-test", bare.string());
+
+ let working_co = project
+ .clone_for("testing-maintainer-list".to_owned(), "123".to_owned())
+ .expect("clone should work");
+
+ working_co
+ .checkout_origin_ref(OsStr::new("master"))
+ .unwrap();
+
+ let paths = working_co.files_changed_from_head(&hash).unwrap();
+
+ working_co.checkout_ref(OsStr::new(&hash)).unwrap();
+
+ let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
+ let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, None);
+
+ let parsed =
+ ImpactedMaintainers::calculate(&nix, &working_co.clone_to(), &paths, &attributes);
+
+ let mut expect = ImpactedMaintainers(HashMap::new());
+ expect.0.insert(
+ Maintainer::from("test"),
+ vec![Package::from("foo.bar.packageA")],
+ );
+
+ assert_eq!(parsed.unwrap(), expect);
+ }
+}
diff --git a/ofborg/tickborg/src/message/buildjob.rs b/ofborg/tickborg/src/message/buildjob.rs
new file mode 100644
index 0000000000..b09eae58bf
--- /dev/null
+++ b/ofborg/tickborg/src/message/buildjob.rs
@@ -0,0 +1,55 @@
+use crate::commentparser::Subset;
+use crate::message::{Pr, Repo};
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct BuildJob {
+ pub repo: Repo,
+ pub pr: Pr,
+ pub subset: Option<Subset>,
+ pub attrs: Vec<String>,
+ pub request_id: String,
+ pub logs: Option<ExchangeQueue>, // (Exchange, Routing Key)
+ pub statusreport: Option<ExchangeQueue>, // (Exchange, Routing Key)
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct QueuedBuildJobs {
+ pub job: BuildJob,
+ pub architectures: Vec<String>,
+}
+
+pub type ExchangeQueue = (Option<Exchange>, Option<RoutingKey>);
+type Exchange = String;
+type RoutingKey = String;
+
+impl BuildJob {
+ pub fn new(
+ repo: Repo,
+ pr: Pr,
+ subset: Subset,
+ attrs: Vec<String>,
+ logs: Option<ExchangeQueue>,
+ statusreport: Option<ExchangeQueue>,
+ request_id: String,
+ ) -> BuildJob {
+ let logbackrk = format!("{}.{}", repo.full_name.to_lowercase(), pr.number);
+
+ BuildJob {
+ repo,
+ pr,
+ subset: Some(subset),
+ attrs,
+ logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
+ statusreport: Some(statusreport.unwrap_or((Some("build-results".to_owned()), None))),
+ request_id,
+ }
+ }
+}
+
+pub fn from(data: &[u8]) -> Result<BuildJob, serde_json::error::Error> {
+ serde_json::from_slice(data)
+}
+
+pub struct Actions {
+ pub system: String,
+}
diff --git a/ofborg/tickborg/src/message/buildlogmsg.rs b/ofborg/tickborg/src/message/buildlogmsg.rs
new file mode 100644
index 0000000000..1aed51829e
--- /dev/null
+++ b/ofborg/tickborg/src/message/buildlogmsg.rs
@@ -0,0 +1,17 @@
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+pub struct BuildLogMsg {
+ pub system: String,
+ pub identity: String,
+ pub attempt_id: String,
+ pub line_number: u64,
+ pub output: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+pub struct BuildLogStart {
+ pub system: String,
+ pub identity: String,
+ pub attempt_id: String,
+ pub attempted_attrs: Option<Vec<String>>,
+ pub skipped_attrs: Option<Vec<String>>,
+}
diff --git a/ofborg/tickborg/src/message/buildresult.rs b/ofborg/tickborg/src/message/buildresult.rs
new file mode 100644
index 0000000000..122edacae3
--- /dev/null
+++ b/ofborg/tickborg/src/message/buildresult.rs
@@ -0,0 +1,225 @@
+use crate::message::{Pr, Repo};
+
+use hubcaps::checks::Conclusion;
+
+#[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)]
+pub enum BuildStatus {
+ Skipped,
+ Success,
+ Failure,
+ TimedOut,
+ HashMismatch,
+ UnexpectedError { err: String },
+}
+
+impl From<BuildStatus> for String {
+ fn from(status: BuildStatus) -> String {
+ match status {
+ BuildStatus::Skipped => "No attempt".into(),
+ BuildStatus::Success => "Success".into(),
+ BuildStatus::Failure => "Failure".into(),
+ BuildStatus::HashMismatch => "A fixed output derivation's hash was incorrect".into(),
+ BuildStatus::TimedOut => "Timed out, unknown build status".into(),
+ BuildStatus::UnexpectedError { ref err } => format!("Unexpected error: {err}"),
+ }
+ }
+}
+
+impl From<BuildStatus> for Conclusion {
+ fn from(status: BuildStatus) -> Conclusion {
+ match status {
+ BuildStatus::Skipped => Conclusion::Skipped,
+ BuildStatus::Success => Conclusion::Success,
+ BuildStatus::Failure => Conclusion::Neutral,
+ BuildStatus::HashMismatch => Conclusion::Failure,
+ BuildStatus::TimedOut => Conclusion::Neutral,
+ BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
+ }
+ }
+}
+
+pub struct LegacyBuildResult {
+ pub repo: Repo,
+ pub pr: Pr,
+ pub system: String,
+ pub output: Vec<String>,
+ pub attempt_id: String,
+ pub request_id: String,
+ pub status: BuildStatus,
+ pub skipped_attrs: Option<Vec<String>>,
+ pub attempted_attrs: Option<Vec<String>>,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub enum V1Tag {
+ V1,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+#[serde(untagged)]
+pub enum BuildResult {
+ V1 {
+ tag: V1Tag, // use serde once all enum variants have a tag
+ repo: Repo,
+ pr: Pr,
+ system: String,
+ output: Vec<String>,
+ attempt_id: String,
+ request_id: String,
+ // removed success
+ status: BuildStatus,
+ skipped_attrs: Option<Vec<String>>,
+ attempted_attrs: Option<Vec<String>>,
+ },
+ Legacy {
+ repo: Repo,
+ pr: Pr,
+ system: String,
+ output: Vec<String>,
+ attempt_id: String,
+ request_id: String,
+ success: Option<bool>, // replaced by status
+ status: Option<BuildStatus>,
+ skipped_attrs: Option<Vec<String>>,
+ attempted_attrs: Option<Vec<String>>,
+ },
+}
+
+impl BuildResult {
+ pub fn legacy(&self) -> LegacyBuildResult {
+ // TODO: replace this with simpler structs for specific usecases, since
+ // it's decouples the structs from serialization. These can be changed
+ // as long as we can translate all enum variants.
+ match *self {
+ BuildResult::Legacy {
+ ref repo,
+ ref pr,
+ ref system,
+ ref output,
+ ref attempt_id,
+ ref request_id,
+ ref attempted_attrs,
+ ref skipped_attrs,
+ ..
+ } => LegacyBuildResult {
+ repo: repo.to_owned(),
+ pr: pr.to_owned(),
+ system: system.to_owned(),
+ output: output.to_owned(),
+ attempt_id: attempt_id.to_owned(),
+ request_id: request_id.to_owned(),
+ status: self.status(),
+ attempted_attrs: attempted_attrs.to_owned(),
+ skipped_attrs: skipped_attrs.to_owned(),
+ },
+ BuildResult::V1 {
+ ref repo,
+ ref pr,
+ ref system,
+ ref output,
+ ref attempt_id,
+ ref request_id,
+ ref attempted_attrs,
+ ref skipped_attrs,
+ ..
+ } => LegacyBuildResult {
+ repo: repo.to_owned(),
+ pr: pr.to_owned(),
+ system: system.to_owned(),
+ output: output.to_owned(),
+ attempt_id: attempt_id.to_owned(),
+ request_id: request_id.to_owned(),
+ status: self.status(),
+ attempted_attrs: attempted_attrs.to_owned(),
+ skipped_attrs: skipped_attrs.to_owned(),
+ },
+ }
+ }
+
+ pub fn pr(&self) -> Pr {
+ match self {
+ BuildResult::Legacy { pr, .. } => pr.to_owned(),
+ BuildResult::V1 { pr, .. } => pr.to_owned(),
+ }
+ }
+
+ pub fn status(&self) -> BuildStatus {
+ match *self {
+ BuildResult::Legacy {
+ ref status,
+ ref success,
+ ..
+ } => status.to_owned().unwrap_or({
+ // Fallback for old format.
+ match *success {
+ None => BuildStatus::Skipped,
+ Some(true) => BuildStatus::Success,
+ Some(false) => BuildStatus::Failure,
+ }
+ }),
+ BuildResult::V1 { ref status, .. } => status.to_owned(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use serde_json;
+
+ #[test]
+ fn v1_serialization() {
+ let input = r#"{"tag":"V1","repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#;
+ let result: BuildResult = serde_json::from_str(input).expect("result required");
+ assert_eq!(result.status(), BuildStatus::Success);
+ let output = serde_json::to_string(&result).expect("json required");
+ assert_eq!(
+ output,
+ r#"{"tag":"V1","repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
+ "json of: {:?}",
+ result
+ );
+ }
+
+ #[test]
+ fn legacy_serialization() {
+ let input = r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#;
+ let result: BuildResult = serde_json::from_str(input).expect("result required");
+ assert_eq!(result.status(), BuildStatus::Success);
+ let output = serde_json::to_string(&result).expect("json required");
+ assert_eq!(
+ output,
+ r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
+ "json of: {:?}",
+ result
+ );
+ }
+
+ #[test]
+ fn legacy_none_serialization() {
+ let input = r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":[],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id"}"#;
+ let result: BuildResult = serde_json::from_str(input).expect("result required");
+ assert_eq!(result.status(), BuildStatus::Skipped);
+ let output = serde_json::to_string(&result).expect("json required");
+ assert_eq!(
+ output,
+ r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":[],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":null,"status":null,"skipped_attrs":null,"attempted_attrs":null}"#,
+ "json of: {:?}",
+ result
+ );
+ }
+
+ #[test]
+ fn legacy_no_status_serialization() {
+ let input = r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":null,"skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#;
+ let result: BuildResult = serde_json::from_str(input).expect("result required");
+ assert_eq!(result.status(), BuildStatus::Success);
+ let output = serde_json::to_string(&result).expect("json required");
+ assert_eq!(
+ output,
+ r#"{"repo":{"owner":"project-tick","name":"Project-Tick","full_name":"project-tick/Project-Tick","clone_url":"https://github.com/project-tick/Project-Tick.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":null,"skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
+ "json of: {:?}",
+ result
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/message/common.rs b/ofborg/tickborg/src/message/common.rs
new file mode 100644
index 0000000000..c8fcd16ea2
--- /dev/null
+++ b/ofborg/tickborg/src/message/common.rs
@@ -0,0 +1,14 @@
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+pub struct Repo {
+ pub owner: String,
+ pub name: String,
+ pub full_name: String,
+ pub clone_url: String,
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
+pub struct Pr {
+ pub target_branch: Option<String>,
+ pub number: u64,
+ pub head_sha: String,
+}
diff --git a/ofborg/tickborg/src/message/evaluationjob.rs b/ofborg/tickborg/src/message/evaluationjob.rs
new file mode 100644
index 0000000000..bd51546e4a
--- /dev/null
+++ b/ofborg/tickborg/src/message/evaluationjob.rs
@@ -0,0 +1,29 @@
+use crate::message::{Pr, Repo};
+use crate::worker;
+
+pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
+ serde_json::from_slice(data)
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct EvaluationJob {
+ pub repo: Repo,
+ pub pr: Pr,
+}
+
+pub struct Actions {}
+
+impl Actions {
+ pub fn retry_later(&mut self, _job: &EvaluationJob) -> worker::Actions {
+ vec![worker::Action::NackRequeue]
+ }
+
+ pub fn skip(&mut self, _job: &EvaluationJob) -> worker::Actions {
+ vec![worker::Action::Ack]
+ }
+
+ pub fn done(&mut self, _job: &EvaluationJob, mut response: worker::Actions) -> worker::Actions {
+ response.push(worker::Action::Ack);
+ response
+ }
+}
diff --git a/ofborg/tickborg/src/message/mod.rs b/ofborg/tickborg/src/message/mod.rs
new file mode 100644
index 0000000000..03551cd1ce
--- /dev/null
+++ b/ofborg/tickborg/src/message/mod.rs
@@ -0,0 +1,7 @@
+pub mod buildjob;
+pub mod buildlogmsg;
+pub mod buildresult;
+mod common;
+pub mod evaluationjob;
+
+pub use self::common::{Pr, Repo};
diff --git a/ofborg/tickborg/src/nix.rs b/ofborg/tickborg/src/nix.rs
new file mode 100644
index 0000000000..77aece6d6f
--- /dev/null
+++ b/ofborg/tickborg/src/nix.rs
@@ -0,0 +1,893 @@
+use crate::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
+use crate::message::buildresult::BuildStatus;
+use crate::ofborg::partition_result;
+
+use std::collections::HashMap;
+use std::env;
+use std::ffi::OsStr;
+use std::fmt;
+use std::fs;
+use std::io::{BufRead, BufReader, Seek, SeekFrom};
+use std::path::Path;
+use std::process::{Command, Stdio};
+
+use tempfile::tempfile;
+
+#[allow(clippy::upper_case_acronyms)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum File {
+ DefaultNixpkgs,
+ ReleaseNixOS,
+}
+
+impl fmt::Display for File {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ File::DefaultNixpkgs => write!(f, "./default.nix"),
+ File::ReleaseNixOS => write!(f, "./nixos/release.nix"),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub enum Operation {
+ Evaluate,
+ Instantiate,
+ Build,
+ QueryPackagesJson,
+ QueryPackagesOutputs,
+ NoOp { operation: Box<Operation> },
+ Unknown { program: String },
+}
+
+impl Operation {
+ fn command(&self) -> Command {
+ match *self {
+ Operation::Evaluate => Command::new("nix-instantiate"),
+ Operation::Instantiate => Command::new("nix-instantiate"),
+ Operation::Build => Command::new("nix-build"),
+ Operation::QueryPackagesJson => Command::new("nix-env"),
+ Operation::QueryPackagesOutputs => Command::new("nix-env"),
+ Operation::NoOp { .. } => Command::new("echo"),
+ Operation::Unknown { ref program } => Command::new(program),
+ }
+ }
+
+ fn args(&self, command: &mut Command) {
+ match *self {
+ Operation::Build => {
+ command.args([
+ "--no-out-link",
+ "--keep-going",
+ "--option",
+ "extra-experimental-features",
+ "no-url-literals",
+ ]);
+ }
+ Operation::QueryPackagesJson => {
+ command.args([
+ "--query",
+ "--available",
+ "--json",
+ "--option",
+ "extra-experimental-features",
+ "no-url-literals",
+ ]);
+ }
+ Operation::QueryPackagesOutputs => {
+ command.args([
+ "--query",
+ "--available",
+ "--no-name",
+ "--attr-path",
+ "--out-path",
+ "--option",
+ "extra-experimental-features",
+ "no-url-literals",
+ ]);
+ }
+ Operation::NoOp { ref operation } => {
+ operation.args(command);
+ }
+ Operation::Evaluate => {
+ command.args([
+ "--eval",
+ "--strict",
+ "--json",
+ "--option",
+ "extra-experimental-features",
+ "no-url-literals",
+ ]);
+ }
+ Operation::Instantiate => {
+ command.args(["--option", "extra-experimental-features", "no-url-literals"]);
+ }
+ _ => (),
+ };
+ }
+}
+
+impl fmt::Display for Operation {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Operation::Build => write!(f, "nix-build"),
+ Operation::Instantiate => write!(f, "nix-instantiate"),
+ Operation::QueryPackagesJson => write!(f, "nix-env -qa --json"),
+ Operation::QueryPackagesOutputs => write!(f, "nix-env -qaP --no-name --out-path"),
+ Operation::NoOp { ref operation } => operation.fmt(f),
+ Operation::Unknown { ref program } => write!(f, "{}", program),
+ Operation::Evaluate => write!(f, "nix-instantiate --strict --json ..."),
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Nix {
+ pub system: String,
+ remote: String,
+ build_timeout: u16,
+ limit_supported_systems: bool,
+ initial_heap_size: Option<String>,
+}
+
+impl Nix {
+ pub fn new(
+ system: String,
+ remote: String,
+ build_timeout: u16,
+ initial_heap_size: Option<String>,
+ ) -> Nix {
+ Nix {
+ system,
+ remote,
+ build_timeout,
+ initial_heap_size,
+ limit_supported_systems: true,
+ }
+ }
+
+ pub fn with_system(&self, system: String) -> Nix {
+ let mut n = self.clone();
+ n.system = system;
+ n
+ }
+
+ pub fn with_limited_supported_systems(&self) -> Nix {
+ let mut n = self.clone();
+ n.limit_supported_systems = true;
+ n
+ }
+
+ pub fn without_limited_supported_systems(&self) -> Nix {
+ let mut n = self.clone();
+ n.limit_supported_systems = false;
+ n
+ }
+
+ pub fn safely_partition_instantiable_attrs(
+ &self,
+ nixpkgs: &Path,
+ file: File,
+ attrs: Vec<String>,
+ ) -> (Vec<String>, Vec<(String, Vec<String>)>) {
+ let attr_instantiations: Vec<Result<String, (String, Vec<String>)>> = attrs
+ .into_iter()
+ .map(
+ |attr| match self.safely_instantiate_attrs(nixpkgs, file, vec![attr.clone()]) {
+ Ok(_) => Ok(attr),
+ Err(f) => Err((attr, lines_from_file(f))),
+ },
+ )
+ .collect();
+
+ partition_result(attr_instantiations)
+ }
+
+ pub fn safely_instantiate_attrs(
+ &self,
+ nixpkgs: &Path,
+ file: File,
+ attrs: Vec<String>,
+ ) -> Result<fs::File, fs::File> {
+ let mut command = self.safe_command::<&OsStr>(&Operation::Instantiate, nixpkgs, &[], &[]);
+ self.set_attrs_command(&mut command, file, attrs);
+ self.run(command, true)
+ }
+
+ pub fn safely_evaluate_expr_cmd(
+ &self,
+ nixpkgs: &Path,
+ expr: &str,
+ argstrs: HashMap<&str, &str>,
+ extra_paths: &[&Path],
+ ) -> Command {
+ let mut attrargs: Vec<String> = Vec::with_capacity(2 + (argstrs.len() * 3));
+ attrargs.push("--expr".to_owned());
+ attrargs.push(expr.to_owned());
+ for (argname, argstr) in argstrs {
+ attrargs.push(String::from("--argstr"));
+ attrargs.push(argname.to_owned());
+ attrargs.push(argstr.to_owned());
+ }
+
+ self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, extra_paths)
+ }
+
+ pub fn safely_build_attrs(
+ &self,
+ nixpkgs: &Path,
+ file: File,
+ attrs: Vec<String>,
+ ) -> Result<fs::File, fs::File> {
+ let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
+ self.set_attrs_command(&mut command, file, attrs);
+ self.run(command, true)
+ }
+
+ pub fn safely_build_attrs_async(
+ &self,
+ nixpkgs: &Path,
+ file: File,
+ attrs: Vec<String>,
+ ) -> SpawnedAsyncCmd {
+ let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
+ self.set_attrs_command(&mut command, file, attrs);
+ AsyncCmd::new(command).spawn()
+ }
+
+ fn set_attrs_command(&self, command: &mut Command, file: File, attrs: Vec<String>) {
+ let mut args: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
+ args.push(format!("{file}"));
+ for attr in attrs {
+ args.push(String::from("-A"));
+ args.push(attr);
+ }
+ if let File::ReleaseNixOS = file {
+ args.push(String::from("--arg"));
+ args.push(String::from("nixpkgs"));
+ args.push(String::from(
+ "{ outPath=./.; revCount=999999; shortRev=\"ofborg\"; rev=\"0000000000000000000000000000000000000000\"; }",
+ ));
+ }
+ command.args(args);
+ }
+
+ pub fn safely(
+ &self,
+ op: &Operation,
+ nixpkgs: &Path,
+ args: Vec<String>,
+ keep_stdout: bool,
+ ) -> Result<fs::File, fs::File> {
+ self.run(self.safe_command(op, nixpkgs, &args, &[]), keep_stdout)
+ }
+
+ pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
+ let stderr = tempfile().expect("Fetching a stderr tempfile");
+ let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
+
+ let stdout: Stdio = if keep_stdout {
+ Stdio::from(stderr.try_clone().expect("Cloning stderr for stdout"))
+ } else {
+ Stdio::null()
+ };
+
+ let status = cmd
+ .stdout(stdout)
+ .stderr(Stdio::from(stderr))
+ .status()
+ .expect("Running a program ...");
+
+ reader
+ .seek(SeekFrom::Start(0))
+ .expect("Seeking to Start(0)");
+
+ if status.success() {
+ Ok(reader)
+ } else {
+ Err(reader)
+ }
+ }
+
+ pub fn run_stderr_stdout(&self, mut cmd: Command) -> (bool, fs::File, fs::File) {
+ let stdout_file = tempfile().expect("Fetching a stdout tempfile");
+ let mut stdout_reader = stdout_file
+ .try_clone()
+ .expect("Cloning stdout to the reader");
+
+ let stderr_file = tempfile().expect("Fetching a stderr tempfile");
+ let mut stderr_reader = stderr_file
+ .try_clone()
+ .expect("Cloning stderr to the reader");
+
+ let status = cmd
+ .stdout(Stdio::from(stdout_file))
+ .stderr(Stdio::from(stderr_file))
+ .status()
+ .expect("Running a program ...");
+
+ stdout_reader
+ .seek(SeekFrom::Start(0))
+ .expect("Seeking dout to Start(0)");
+ stderr_reader
+ .seek(SeekFrom::Start(0))
+ .expect("Seeking stderr to Start(0)");
+
+ (status.success(), stdout_reader, stderr_reader)
+ }
+
+ pub fn safe_command<S>(
+ &self,
+ op: &Operation,
+ nixpkgs: &Path,
+ args: &[S],
+ safe_paths: &[&Path],
+ ) -> Command
+ where
+ S: AsRef<OsStr>,
+ {
+ let nixpkgspath = format!("ofborg-nixpkgs-pr={}", nixpkgs.display());
+ let mut nixpath: Vec<String> = safe_paths
+ .iter()
+ .map(|path| format!("{}", path.display()))
+ .collect();
+ nixpath.push(nixpkgspath);
+
+ let mut command = op.command();
+ op.args(&mut command);
+
+ command.env_clear();
+ command.current_dir(nixpkgs);
+ command.env("HOME", "/homeless-shelter");
+ command.env("NIX_PATH", nixpath.join(":"));
+ command.env("NIX_REMOTE", &self.remote);
+
+ if let Some(ref initial_heap_size) = self.initial_heap_size {
+ command.env("GC_INITIAL_HEAP_SIZE", initial_heap_size);
+ }
+
+ let path = env::var("PATH").unwrap();
+ command.env("PATH", path);
+
+ command.args(["--show-trace"]);
+ command.args(["--option", "restrict-eval", "true"]);
+ command.args([
+ "--option",
+ "build-timeout",
+ &format!("{}", self.build_timeout),
+ ]);
+ command.args(["--argstr", "system", &self.system]);
+
+ if self.limit_supported_systems {
+ command.args([
+ "--arg",
+ "supportedSystems",
+ &format!("[\"{}\"]", &self.system),
+ ]);
+ }
+
+ command.args(args);
+ command
+ }
+}
+
+fn lines_from_file(file: fs::File) -> Vec<String> {
+ BufReader::new(file)
+ .lines()
+ .map_while(Result::ok)
+ .filter(|msg| !is_user_setting_warning(msg))
+ .collect()
+}
+
+pub fn is_user_setting_warning(line: &str) -> bool {
+ let line = line.trim();
+ line.starts_with("warning: ignoring the user-specified setting '")
+ && line.ends_with("because it is a restricted setting and you are not a trusted user")
+}
+
+pub fn wait_for_build_status(spawned: SpawnedAsyncCmd) -> BuildStatus {
+ match spawned.wait() {
+ Ok(s) => match s.code() {
+ Some(0) => BuildStatus::Success,
+ Some(100) => BuildStatus::Failure, // nix permanent failure
+ Some(101) => BuildStatus::TimedOut, // nix build timedout
+ Some(102) => BuildStatus::HashMismatch, // Fixed Output Derivation's hash was wrong
+ Some(code) => BuildStatus::UnexpectedError {
+ err: format!("command failed with exit code {code}"),
+ },
+ None => BuildStatus::UnexpectedError {
+ err: "unexpected build failure".into(),
+ },
+ },
+ Err(err) => BuildStatus::UnexpectedError {
+ err: format!("failed on interior command {err}"),
+ },
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::env;
+ use std::path::PathBuf;
+
+ #[cfg(target_os = "linux")]
+ const SYSTEM: &str = "x86_64-linux";
+ #[cfg(target_os = "macos")]
+ const SYSTEM: &str = "x86_64-darwin";
+
+ fn nix() -> Nix {
+ let path = env::var("PATH").unwrap();
+ let test_path = format!("{}/test-nix/bin:{path}", env!("CARGO_MANIFEST_DIR"));
+ unsafe { env::set_var("PATH", test_path) };
+ let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
+ Nix::new(SYSTEM.to_owned(), remote, 1800, None)
+ }
+
+ fn noop(operation: Operation) -> Operation {
+ Operation::NoOp {
+ operation: Box::new(operation),
+ }
+ }
+
+ fn env_noop() -> Operation {
+ Operation::Unknown {
+ program: "./environment.sh".to_owned(),
+ }
+ }
+
+ fn build_path() -> PathBuf {
+ let mut cwd = env::current_dir().unwrap();
+ cwd.push(Path::new("./test-srcs/build"));
+ cwd
+ }
+
+ fn passing_eval_path() -> PathBuf {
+ let mut cwd = env::current_dir().unwrap();
+ cwd.push(Path::new("./test-srcs/eval"));
+ cwd
+ }
+
+ fn individual_eval_path() -> PathBuf {
+ let mut cwd = env::current_dir().unwrap();
+ cwd.push(Path::new("./test-srcs/eval-mixed-failure"));
+ cwd
+ }
+
+ fn strip_ansi(string: &str) -> String {
+ string
+ .replace(['‘', '’'], "'")
+ .replace("\u{1b}[31;1m", "") // red
+ .replace("\u{1b}[0m", "") // reset
+ }
+
+ #[derive(Debug)]
+ enum Expect {
+ Pass,
+ Fail,
+ }
+
+ fn assert_run(res: Result<fs::File, fs::File>, expected: Expect, require: Vec<&str>) {
+ let expectation_held: bool = match expected {
+ Expect::Pass => res.is_ok(),
+ Expect::Fail => res.is_err(),
+ };
+
+ let file: fs::File = match res {
+ Ok(file) => file,
+ Err(file) => file,
+ };
+
+ let lines = lines_from_file(file);
+
+ let buildlog = lines
+ .into_iter()
+ .map(|line| strip_ansi(&line))
+ .map(|line| format!(" | {line}"))
+ .collect::<Vec<String>>()
+ .join("\n");
+
+ let total_requirements = require.len();
+ let mut missed_requirements: usize = 0;
+ let requirements_held: Vec<Result<String, String>> = require
+ .into_iter()
+ .map(|line| line.to_owned())
+ .map(|line| {
+ if buildlog.contains(&line) {
+ Ok(line)
+ } else {
+ missed_requirements += 1;
+ Err(line)
+ }
+ })
+ .collect();
+
+ let mut prefixes: Vec<String> = vec!["".to_owned(), "".to_owned()];
+
+ if !expectation_held {
+ prefixes.push(format!(
+ "The run was expected to {:?}, but did not.",
+ expected
+ ));
+ prefixes.push("".to_owned());
+ } else {
+ prefixes.push(format!("The run was expected to {expected:?}, and did."));
+ prefixes.push("".to_owned());
+ }
+
+ let mut suffixes = vec![
+ "".to_owned(),
+ format!(
+ "{} out of {} required lines matched.",
+ (total_requirements - missed_requirements),
+ total_requirements
+ ),
+ "".to_owned(),
+ ];
+
+ for expected_line in requirements_held {
+ suffixes.push(format!(" - {expected_line:?}"));
+ }
+ suffixes.push("".to_owned());
+
+ let output_blocks: Vec<Vec<String>> =
+ vec![prefixes, vec![buildlog, "".to_owned()], suffixes];
+
+ let output_blocks_strings: Vec<String> = output_blocks
+ .into_iter()
+ .map(|lines| lines.join("\n"))
+ .collect();
+
+ let output: String = output_blocks_strings.join("\n");
+
+ if expectation_held && missed_requirements == 0 {
+ } else {
+ panic!("{output}");
+ }
+ }
+
+ #[test]
+ fn test_build_operations() {
+ let nix = nix();
+ let op = noop(Operation::Build);
+ assert_eq!(op.to_string(), "nix-build");
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec!["--no-out-link --keep-going", "--version"],
+ );
+ }
+
+ #[test]
+ fn test_instantiate_operation() {
+ let nix = nix();
+ let op = noop(Operation::Instantiate);
+ assert_eq!(op.to_string(), "nix-instantiate");
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
+ true,
+ );
+
+ assert_run(ret, Expect::Pass, vec!["--version"]);
+ }
+
+ #[test]
+ fn test_query_packages_json() {
+ let nix = nix();
+ let op = noop(Operation::QueryPackagesJson);
+ assert_eq!(op.to_string(), "nix-env -qa --json");
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec!["--query --available --json", "--version"],
+ );
+ }
+
+ #[test]
+ fn test_query_packages_outputs() {
+ let nix = nix();
+ let op = noop(Operation::QueryPackagesOutputs);
+ assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec![
+ "--query --available --no-name --attr-path --out-path",
+ "--version",
+ ],
+ );
+ }
+
+ #[test]
+ fn safe_command_environment() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec![
+ "HOME=/homeless-shelter",
+ "NIX_PATH=ofborg-nixpkgs-pr=",
+ "NIX_REMOTE=",
+ "PATH=",
+ ],
+ );
+ }
+
+ #[test]
+ fn safe_command_custom_gc() {
+ let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
+ let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, Some("4g".to_owned()));
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec![
+ "HOME=/homeless-shelter",
+ "NIX_PATH=ofborg-nixpkgs-pr=",
+ "NIX_REMOTE=",
+ "PATH=",
+ "GC_INITIAL_HEAP_SIZE=4g",
+ ],
+ );
+ }
+
+ #[test]
+ fn safe_command_options() {
+ let nix = nix();
+ let op = noop(Operation::Build);
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]),
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec!["--option restrict-eval true", "--option build-timeout 1800"],
+ );
+ }
+
+ #[test]
+ fn set_attrs_nixpkgs() {
+ let nix = nix();
+ let op = noop(Operation::Build);
+
+ let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
+ nix.set_attrs_command(
+ &mut command,
+ File::DefaultNixpkgs,
+ vec!["foo".into(), "bar".into()],
+ );
+
+ let ret: Result<fs::File, fs::File> = nix.run(command, true);
+
+ assert_run(ret, Expect::Pass, vec!["./default.nix", "-A foo -A bar"]);
+ }
+
+ #[test]
+ fn set_attrs_nixos() {
+ let nix = nix();
+ let op = noop(Operation::Instantiate);
+
+ let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
+ nix.set_attrs_command(
+ &mut command,
+ File::ReleaseNixOS,
+ vec!["foo".into(), "bar".into()],
+ );
+
+ let ret: Result<fs::File, fs::File> = nix.run(command, true);
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec![
+ "./nixos/release.nix",
+ "--arg nixpkgs { outPath=./.; revCount=999999; shortRev=\"ofborg\"; rev=\"0000000000000000000000000000000000000000\"; }",
+ ],
+ );
+ }
+
+ #[test]
+ fn safely_build_attrs_success() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.safely_build_attrs(
+ build_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![String::from("success")],
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec!["-success.drv", "building ", "hi", "-success"],
+ );
+ }
+
+ #[test]
+ fn safely_build_attrs_failure() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.safely_build_attrs(
+ build_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![String::from("failed")],
+ );
+
+ assert_run(
+ ret,
+ Expect::Fail,
+ vec![
+ "-failed.drv",
+ "building ",
+ "hi",
+ "failed to produce output path",
+ ],
+ );
+ }
+
+ #[test]
+ fn partition_instantiable_attributes() {
+ let nix = nix();
+
+ let ret: (Vec<String>, Vec<(String, Vec<String>)>) = nix
+ .safely_partition_instantiable_attrs(
+ individual_eval_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![
+ String::from("fails-instantiation"),
+ String::from("passes-instantiation"),
+ String::from("missing-attr"),
+ ],
+ );
+
+ assert_eq!(ret.0, vec!["passes-instantiation"]);
+
+ assert_eq!(ret.1[0].0, "fails-instantiation");
+ assert_eq!(
+ ret.1[0].1[0],
+ "trace: You just can't frooble the frozz on this particular system."
+ );
+
+ eprintln!("{:?}", ret.1[1].1);
+ assert_eq!(ret.1[1].0, "missing-attr");
+ let s = strip_ansi(ret.1[1].1.last().unwrap());
+ assert_eq!(
+ s.trim_start_matches("error: "),
+ "attribute 'missing-attr' in selection path 'missing-attr' not found"
+ );
+ }
+
+ #[test]
+ fn safely_instantiate_attrs_failure() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.safely_instantiate_attrs(
+ individual_eval_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![String::from("fails-instantiation")],
+ );
+
+ assert_run(
+ ret,
+ Expect::Fail,
+ vec!["You just can't", "assertion", "failed"],
+ );
+ }
+
+ #[test]
+ fn safely_instantiate_attrs_success() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.safely_instantiate_attrs(
+ individual_eval_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![String::from("passes-instantiation")],
+ );
+
+ assert_run(ret, Expect::Pass, vec!["-passes-instantiation.drv"]);
+ }
+
+ #[test]
+ fn safely_evaluate_expr_success() {
+ let nix = nix();
+
+ let ret: Result<fs::File, fs::File> = nix.run(
+ nix.safely_evaluate_expr_cmd(
+ individual_eval_path().as_path(),
+ r#"{ foo ? "bar" }: "The magic value is ${foo}""#,
+ [("foo", "tux")].iter().cloned().collect(),
+ &[],
+ ),
+ true,
+ );
+
+ assert_run(ret, Expect::Pass, vec!["The magic value is tux"]);
+ }
+
+ #[test]
+ fn strict_sandboxing() {
+ let ret: Result<fs::File, fs::File> = nix().safely_build_attrs(
+ build_path().as_path(),
+ File::DefaultNixpkgs,
+ vec![String::from("sandbox-violation")],
+ );
+
+ assert_run(
+ ret,
+ Expect::Fail,
+ vec!["access to absolute path", "is forbidden in restricted mode"],
+ );
+ }
+
+ #[test]
+ fn instantiation_success() {
+ let ret: Result<fs::File, fs::File> = nix().safely(
+ &Operation::Instantiate,
+ passing_eval_path().as_path(),
+ vec![],
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Pass,
+ vec![
+ "the result might be removed by the garbage collector",
+ "-failed.drv",
+ "-success.drv",
+ ],
+ );
+ }
+
+ #[test]
+ fn instantiation_nixpkgs_restricted_mode() {
+ let ret: Result<fs::File, fs::File> = nix().safely(
+ &Operation::Instantiate,
+ individual_eval_path().as_path(),
+ vec![String::from("-A"), String::from("nixpkgs-restricted-mode")],
+ true,
+ );
+
+ assert_run(
+ ret,
+ Expect::Fail,
+ vec![
+ "access to URI 'git+file:///fake",
+ "is forbidden in restricted mode",
+ ],
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/nixenv.rs b/ofborg/tickborg/src/nixenv.rs
new file mode 100644
index 0000000000..675eecfed9
--- /dev/null
+++ b/ofborg/tickborg/src/nixenv.rs
@@ -0,0 +1,70 @@
+//! Evaluates the expression like Hydra would, with regards to
+//! architecture support and recursed packages.
+
+use std::fs::File;
+use std::io::{self, Read};
+
+#[derive(Debug)]
+pub enum Error {
+ Io(io::Error),
+ Internal(Box<dyn std::error::Error>),
+ CommandFailed(File),
+ StatsParse(File, Result<u64, io::Error>, serde_json::Error),
+ UncleanEvaluation(Vec<String>),
+}
+
+impl From<io::Error> for Error {
+ fn from(e: io::Error) -> Error {
+ Error::Io(e)
+ }
+}
+
+impl Error {
+ pub fn display(self) -> String {
+ match self {
+ Error::Io(err) => format!("Failed during the setup of executing nix-env: {err:?}"),
+ Error::Internal(err) => format!("Internal error: {err:?}"),
+ Error::CommandFailed(mut fd) => {
+ let mut buffer = Vec::new();
+ let read_result = fd.read_to_end(&mut buffer);
+ let bufstr = String::from_utf8_lossy(&buffer);
+
+ match read_result {
+ Ok(_) => format!("nix-env failed:\n{bufstr}"),
+ Err(err) => format!(
+ "nix-env failed and loading the error result caused a new error {err:?}\n\n{bufstr}"
+ ),
+ }
+ }
+ Error::UncleanEvaluation(warnings) => {
+ format!("nix-env did not evaluate cleanly:\n {warnings:?}")
+ }
+ Error::StatsParse(mut fd, seek, parse_err) => {
+ let mut buffer = Vec::new();
+ let read_result = fd.read_to_end(&mut buffer);
+ let bufstr = String::from_utf8_lossy(&buffer);
+
+ let mut lines =
+ String::from("Parsing nix-env's performance statistics failed.\n\n");
+
+ if let Err(seek_err) = seek {
+ lines.push_str(&format!(
+ "Additionally, resetting to the beginning of the output failed with:\n{seek_err:?}\n\n"
+ ));
+ }
+
+ if let Err(read_err) = read_result {
+ lines.push_str(&format!(
+ "Additionally, loading the output failed with:\n{read_err:?}\n\n"
+ ));
+ }
+
+ lines.push_str(&format!("Parse error:\n{parse_err:?}\n\n"));
+
+ lines.push_str(&format!("Evaluation output:\n{bufstr}"));
+
+ lines
+ }
+ }
+ }
+}
diff --git a/ofborg/tickborg/src/notifyworker.rs b/ofborg/tickborg/src/notifyworker.rs
new file mode 100644
index 0000000000..f83ab04a1a
--- /dev/null
+++ b/ofborg/tickborg/src/notifyworker.rs
@@ -0,0 +1,45 @@
+use std::sync::Arc;
+
+use crate::worker::Action;
+
+#[async_trait::async_trait]
+pub trait SimpleNotifyWorker {
+ type J;
+
+ async fn consumer(
+ &self,
+ job: Self::J,
+ notifier: Arc<dyn NotificationReceiver + std::marker::Send + std::marker::Sync>,
+ );
+
+ fn msg_to_job(
+ &self,
+ routing_key: &str,
+ content_type: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String>;
+}
+
+#[async_trait::async_trait]
+pub trait NotificationReceiver {
+ async fn tell(&self, action: Action);
+}
+
+#[derive(Default)]
+pub struct DummyNotificationReceiver {
+ pub actions: parking_lot::Mutex<Vec<Action>>,
+}
+
+impl DummyNotificationReceiver {
+ pub fn new() -> DummyNotificationReceiver {
+ Default::default()
+ }
+}
+
+#[async_trait::async_trait]
+impl NotificationReceiver for DummyNotificationReceiver {
+ async fn tell(&self, action: Action) {
+ let mut actions = self.actions.lock();
+ actions.push(action);
+ }
+}
diff --git a/ofborg/tickborg/src/outpathdiff.rs b/ofborg/tickborg/src/outpathdiff.rs
new file mode 100644
index 0000000000..511890d20d
--- /dev/null
+++ b/ofborg/tickborg/src/outpathdiff.rs
@@ -0,0 +1,7 @@
+#[derive(Debug, PartialEq, Hash, Eq, Clone)]
+pub struct PackageArch {
+ pub package: Package,
+ pub architecture: Architecture,
+}
+type Package = String;
+type Architecture = String;
diff --git a/ofborg/tickborg/src/stats.rs b/ofborg/tickborg/src/stats.rs
new file mode 100644
index 0000000000..16705e6a68
--- /dev/null
+++ b/ofborg/tickborg/src/stats.rs
@@ -0,0 +1,57 @@
+use lapin::options::BasicPublishOptions;
+
+include!(concat!(env!("OUT_DIR"), "/events.rs"));
+
+#[macro_use]
+mod macros {
+ #[macro_export]
+ macro_rules! my_macro(() => (FooBar));
+}
+
+pub trait SysEvents: Send {
+ fn notify(&mut self, event: Event) -> impl std::future::Future<Output = ()>;
+}
+
+#[derive(serde::Serialize, serde::Deserialize, Debug)]
+pub struct EventMessage {
+ pub sender: String,
+ pub events: Vec<Event>,
+}
+
+pub struct RabbitMq<C> {
+ identity: String,
+ channel: C,
+}
+
+impl RabbitMq<lapin::Channel> {
+ pub fn from_lapin(identity: &str, channel: lapin::Channel) -> Self {
+ RabbitMq {
+ identity: identity.to_owned(),
+ channel,
+ }
+ }
+}
+
+impl SysEvents for RabbitMq<lapin::Channel> {
+ async fn notify(&mut self, event: Event) {
+ let props = lapin::BasicProperties::default().with_content_type("application/json".into());
+ let _confirmaton = self
+ .channel
+ .basic_publish(
+ "stats".into(),
+ "".into(),
+ BasicPublishOptions::default(),
+ &serde_json::to_string(&EventMessage {
+ sender: self.identity.clone(),
+ events: vec![event],
+ })
+ .unwrap()
+ .into_bytes(),
+ props,
+ )
+ .await
+ .unwrap()
+ .await
+ .unwrap();
+ }
+}
diff --git a/ofborg/tickborg/src/systems.rs b/ofborg/tickborg/src/systems.rs
new file mode 100644
index 0000000000..36f5f32fff
--- /dev/null
+++ b/ofborg/tickborg/src/systems.rs
@@ -0,0 +1,74 @@
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum System {
+ X8664Linux,
+ Aarch64Linux,
+ X8664Darwin,
+ Aarch64Darwin,
+ X8664Windows,
+ Aarch64Windows,
+ X8664FreeBSD,
+}
+
+impl System {
+ pub fn all_known_systems() -> Vec<Self> {
+ vec![
+ Self::X8664Linux,
+ Self::Aarch64Linux,
+ Self::X8664Darwin,
+ Self::Aarch64Darwin,
+ Self::X8664Windows,
+ Self::Aarch64Windows,
+ Self::X8664FreeBSD,
+ ]
+ }
+
+ /// The primary CI platforms (Linux + macOS + Windows x86_64)
+ pub fn primary_systems() -> Vec<Self> {
+ vec![
+ Self::X8664Linux,
+ Self::X8664Darwin,
+ Self::X8664Windows,
+ ]
+ }
+
+ /// Systems that can run full test suites
+ pub fn can_run_tests(&self) -> bool {
+ matches!(
+ self,
+ System::X8664Linux | System::Aarch64Linux | System::X8664Darwin | System::Aarch64Darwin | System::X8664Windows
+ )
+ }
+
+ /// GitHub Actions runner label for this system
+ pub fn runner_label(&self) -> &'static str {
+ match self {
+ System::X8664Linux => "ubuntu-latest",
+ System::Aarch64Linux => "ubuntu-24.04-arm",
+ System::X8664Darwin => "macos-15",
+ System::Aarch64Darwin => "macos-15",
+ System::X8664Windows => "windows-2025",
+ System::Aarch64Windows => "windows-2025-arm",
+ System::X8664FreeBSD => "ubuntu-latest", // cross-compile or VM
+ }
+ }
+}
+
+impl std::fmt::Display for System {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ match self {
+ System::X8664Linux => write!(f, "x86_64-linux"),
+ System::Aarch64Linux => write!(f, "aarch64-linux"),
+ System::X8664Darwin => write!(f, "x86_64-darwin"),
+ System::Aarch64Darwin => write!(f, "aarch64-darwin"),
+ System::X8664Windows => write!(f, "x86_64-windows"),
+ System::Aarch64Windows => write!(f, "aarch64-windows"),
+ System::X8664FreeBSD => write!(f, "x86_64-freebsd"),
+ }
+ }
+}
+
+impl System {
+ pub fn as_build_destination(&self) -> (Option<String>, Option<String>) {
+ (None, Some(format!("build-inputs-{self}")))
+ }
+}
diff --git a/ofborg/tickborg/src/tagger.rs b/ofborg/tickborg/src/tagger.rs
new file mode 100644
index 0000000000..e718901ded
--- /dev/null
+++ b/ofborg/tickborg/src/tagger.rs
@@ -0,0 +1,87 @@
+use crate::buildtool::detect_changed_projects;
+
+/// Tags PRs based on which projects were modified.
+pub struct ProjectTagger {
+ selected: Vec<String>,
+}
+
+impl Default for ProjectTagger {
+ fn default() -> Self {
+ Self {
+ selected: vec![],
+ }
+ }
+}
+
+impl ProjectTagger {
+ pub fn new() -> Self {
+ Default::default()
+ }
+
+ /// Analyze changed files and generate project labels.
+ pub fn analyze_changes(&mut self, changed_files: &[String]) {
+ let projects = detect_changed_projects(changed_files);
+ for project in projects {
+ self.selected.push(format!("project: {project}"));
+ }
+
+ // Check for cross-cutting changes
+ let has_ci = changed_files.iter().any(|f| {
+ f.starts_with(".github/") || f.starts_with("ci/")
+ });
+ let has_docs = changed_files.iter().any(|f| {
+ f.starts_with("docs/") || f.ends_with(".md")
+ });
+ let has_root = changed_files.iter().any(|f| {
+ !f.contains('/') && !f.ends_with(".md")
+ });
+
+ if has_ci {
+ self.selected.push("scope: ci".into());
+ }
+ if has_docs {
+ self.selected.push("scope: docs".into());
+ }
+ if has_root {
+ self.selected.push("scope: root".into());
+ }
+ }
+
+ pub fn tags_to_add(&self) -> Vec<String> {
+ self.selected.clone()
+ }
+
+ pub fn tags_to_remove(&self) -> Vec<String> {
+ vec![]
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_project_tagger() {
+ let mut tagger = ProjectTagger::new();
+ tagger.analyze_changes(&[
+ "meshmc/CMakeLists.txt".into(),
+ "mnv/src/main.c".into(),
+ ".github/workflows/ci.yml".into(),
+ "README.md".into(),
+ ]);
+ let tags = tagger.tags_to_add();
+ assert!(tags.contains(&"project: meshmc".into()));
+ assert!(tags.contains(&"project: mnv".into()));
+ assert!(tags.contains(&"scope: ci".into()));
+ assert!(tags.contains(&"scope: docs".into()));
+ }
+
+ #[test]
+ fn test_project_tagger_no_projects() {
+ let mut tagger = ProjectTagger::new();
+ tagger.analyze_changes(&["README.md".into()]);
+ let tags = tagger.tags_to_add();
+ assert!(!tags.iter().any(|t| t.starts_with("project:")));
+ assert!(tags.contains(&"scope: docs".into()));
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/build.rs b/ofborg/tickborg/src/tasks/build.rs
new file mode 100644
index 0000000000..56583b28b4
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/build.rs
@@ -0,0 +1,599 @@
+use crate::checkout;
+use crate::message::buildresult::{BuildResult, BuildStatus, V1Tag};
+use crate::message::{buildjob, buildlogmsg};
+use crate::buildtool;
+use crate::notifyworker;
+use crate::worker;
+
+use std::collections::VecDeque;
+use std::sync::Arc;
+use std::sync::atomic::{AtomicU64, Ordering};
+
+use tracing::{debug, debug_span, error, info};
+use uuid::Uuid;
+
+pub struct BuildWorker {
+ cloner: checkout::CachedCloner,
+ build_executor: buildtool::BuildExecutor,
+ system: String,
+ identity: String,
+}
+
+impl BuildWorker {
+ pub fn new(
+ cloner: checkout::CachedCloner,
+ build_executor: buildtool::BuildExecutor,
+ system: String,
+ identity: String,
+ ) -> BuildWorker {
+ BuildWorker {
+ cloner,
+ build_executor,
+ system,
+ identity,
+ }
+ }
+
+ fn actions(
+ &self,
+ job: buildjob::BuildJob,
+ receiver: Arc<
+ dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync,
+ >,
+ ) -> JobActions {
+ JobActions::new(&self.system, &self.identity, job, receiver)
+ }
+}
+
+pub struct JobActions {
+ system: String,
+ identity: String,
+ receiver: Arc<dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync>,
+ job: buildjob::BuildJob,
+ line_counter: AtomicU64,
+ snippet_log: parking_lot::RwLock<VecDeque<String>>,
+ attempt_id: String,
+ log_exchange: Option<String>,
+ log_routing_key: Option<String>,
+ result_exchange: Option<String>,
+ result_routing_key: Option<String>,
+}
+
+impl JobActions {
+ pub fn new(
+ system: &str,
+ identity: &str,
+ job: buildjob::BuildJob,
+ receiver: Arc<
+ dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync,
+ >,
+ ) -> JobActions {
+ let (log_exchange, log_routing_key) = job
+ .logs
+ .clone()
+ .unwrap_or((Some(String::from("logs")), Some(String::from("build.log"))));
+
+ let (result_exchange, result_routing_key) = job
+ .statusreport
+ .clone()
+ .unwrap_or((Some(String::from("build-results")), None));
+
+ JobActions {
+ system: system.to_owned(),
+ identity: identity.to_owned(),
+ receiver,
+ job,
+ line_counter: 0.into(),
+ snippet_log: parking_lot::RwLock::new(VecDeque::with_capacity(10)),
+ attempt_id: Uuid::new_v4().to_string(),
+ log_exchange,
+ log_routing_key,
+ result_exchange,
+ result_routing_key,
+ }
+ }
+
+ pub fn log_snippet(&self) -> Vec<String> {
+ self.snippet_log.read().clone().into()
+ }
+
+ pub async fn pr_head_missing(&self) {
+ self.tell(worker::Action::Ack).await;
+ }
+
+ pub async fn commit_missing(&self) {
+ self.tell(worker::Action::Ack).await;
+ }
+
+ pub async fn nothing_to_do(&self) {
+ self.tell(worker::Action::Ack).await;
+ }
+
+ pub async fn merge_failed(&self) {
+ let msg = BuildResult::V1 {
+ tag: V1Tag::V1,
+ repo: self.job.repo.clone(),
+ pr: self.job.pr.clone(),
+ system: self.system.clone(),
+ output: vec![String::from("Merge failed")],
+ attempt_id: self.attempt_id.clone(),
+ request_id: self.job.request_id.clone(),
+ attempted_attrs: None,
+ skipped_attrs: None,
+ status: BuildStatus::Failure,
+ };
+
+ let result_exchange = self.result_exchange.clone();
+ let result_routing_key = self.result_routing_key.clone();
+
+ self.tell(worker::publish_serde_action(
+ result_exchange,
+ result_routing_key,
+ &msg,
+ ))
+ .await;
+ self.tell(worker::Action::Ack).await;
+ }
+
+ pub async fn log_started(&self, can_build: Vec<String>, cannot_build: Vec<String>) {
+ let msg = buildlogmsg::BuildLogStart {
+ identity: self.identity.clone(),
+ system: self.system.clone(),
+ attempt_id: self.attempt_id.clone(),
+ attempted_attrs: Some(can_build),
+ skipped_attrs: Some(cannot_build),
+ };
+
+ let log_exchange = self.log_exchange.clone();
+ let log_routing_key = self.log_routing_key.clone();
+
+ self.tell(worker::publish_serde_action(
+ log_exchange,
+ log_routing_key,
+ &msg,
+ ))
+ .await;
+ }
+
+ pub async fn log_instantiation_errors(&self, cannot_build: Vec<(String, Vec<String>)>) {
+ for (attr, log) in cannot_build {
+ self.log_line(format!("Cannot build `{attr}` because:"))
+ .await;
+
+ for line in log {
+ self.log_line(line).await;
+ }
+ self.log_line("".into()).await;
+ }
+ }
+
+ pub async fn log_line(&self, line: String) {
+ self.line_counter.fetch_add(1, Ordering::SeqCst);
+
+ {
+ let mut snippet_log = self.snippet_log.write();
+ if snippet_log.len() >= 10 {
+ snippet_log.pop_front();
+ }
+ snippet_log.push_back(line.clone());
+ }
+
+ let msg = buildlogmsg::BuildLogMsg {
+ identity: self.identity.clone(),
+ system: self.system.clone(),
+ attempt_id: self.attempt_id.clone(),
+ line_number: self.line_counter.load(Ordering::SeqCst),
+ output: line,
+ };
+
+ let log_exchange = self.log_exchange.clone();
+ let log_routing_key = self.log_routing_key.clone();
+
+ self.tell(worker::publish_serde_action(
+ log_exchange,
+ log_routing_key,
+ &msg,
+ ))
+ .await;
+ }
+
+ pub async fn build_not_attempted(&self, not_attempted_attrs: Vec<String>) {
+ let msg = BuildResult::V1 {
+ tag: V1Tag::V1,
+ repo: self.job.repo.clone(),
+ pr: self.job.pr.clone(),
+ system: self.system.clone(),
+ output: self.log_snippet(),
+ attempt_id: self.attempt_id.clone(),
+ request_id: self.job.request_id.clone(),
+ skipped_attrs: Some(not_attempted_attrs),
+ attempted_attrs: None,
+ status: BuildStatus::Skipped,
+ };
+
+ let result_exchange = self.result_exchange.clone();
+ let result_routing_key = self.result_routing_key.clone();
+ self.tell(worker::publish_serde_action(
+ result_exchange,
+ result_routing_key,
+ &msg,
+ ))
+ .await;
+
+ let log_exchange = self.log_exchange.clone();
+ let log_routing_key = self.log_routing_key.clone();
+ self.tell(worker::publish_serde_action(
+ log_exchange,
+ log_routing_key,
+ &msg,
+ ))
+ .await;
+
+ self.tell(worker::Action::Ack).await;
+ }
+
+ pub async fn build_finished(
+ &self,
+ status: BuildStatus,
+ attempted_attrs: Vec<String>,
+ not_attempted_attrs: Vec<String>,
+ ) {
+ let msg = BuildResult::V1 {
+ tag: V1Tag::V1,
+ repo: self.job.repo.clone(),
+ pr: self.job.pr.clone(),
+ system: self.system.clone(),
+ output: self.log_snippet(),
+ attempt_id: self.attempt_id.clone(),
+ request_id: self.job.request_id.clone(),
+ status,
+ attempted_attrs: Some(attempted_attrs),
+ skipped_attrs: Some(not_attempted_attrs),
+ };
+
+ let result_exchange = self.result_exchange.clone();
+ let result_routing_key = self.result_routing_key.clone();
+ self.tell(worker::publish_serde_action(
+ result_exchange,
+ result_routing_key,
+ &msg,
+ ))
+ .await;
+
+ let log_exchange = self.log_exchange.clone();
+ let log_routing_key = self.log_routing_key.clone();
+ self.tell(worker::publish_serde_action(
+ log_exchange,
+ log_routing_key,
+ &msg,
+ ))
+ .await;
+
+ self.tell(worker::Action::Ack).await;
+ }
+
+ async fn tell(&self, action: worker::Action) {
+ self.receiver.tell(action).await;
+ }
+}
+
+#[async_trait::async_trait]
+impl notifyworker::SimpleNotifyWorker for BuildWorker {
+ type J = buildjob::BuildJob;
+
+ fn msg_to_job(&self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
+ info!("lmao I got a job?");
+ match buildjob::from(body) {
+ Ok(job) => Ok(job),
+ Err(err) => {
+ error!("{:?}", std::str::from_utf8(body).unwrap_or("<not utf8>"));
+ panic!("{err:?}");
+ }
+ }
+ }
+
+ // FIXME: remove with rust/cargo update
+ #[allow(clippy::cognitive_complexity)]
+ async fn consumer(
+ &self,
+ job: buildjob::BuildJob,
+ notifier: Arc<
+ dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync,
+ >,
+ ) {
+ let span = debug_span!("job", pr = ?job.pr.number);
+ let _enter = span.enter();
+
+ let actions = self.actions(job, notifier);
+
+ if actions.job.attrs.is_empty() {
+ debug!("No attrs to build");
+ actions.nothing_to_do().await;
+ return;
+ }
+
+ info!(
+ "Working on https://github.com/{}/pull/{}",
+ actions.job.repo.full_name, actions.job.pr.number
+ );
+ let project = self.cloner.project(
+ &actions.job.repo.full_name,
+ actions.job.repo.clone_url.clone(),
+ );
+ let co = project
+ .clone_for("builder".to_string(), self.identity.clone())
+ .unwrap();
+
+ let target_branch = match actions.job.pr.target_branch.clone() {
+ Some(x) => x,
+ None => String::from("origin/main"),
+ };
+
+ let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
+
+ if co.fetch_pr(actions.job.pr.number).is_err() {
+ info!("Failed to fetch {}", actions.job.pr.number);
+ actions.pr_head_missing().await;
+ return;
+ }
+
+ if !co.commit_exists(actions.job.pr.head_sha.as_ref()) {
+ info!("Commit {} doesn't exist", actions.job.pr.head_sha);
+ actions.commit_missing().await;
+ return;
+ }
+
+ if co.merge_commit(actions.job.pr.head_sha.as_ref()).is_err() {
+ info!("Failed to merge {}", actions.job.pr.head_sha);
+ actions.merge_failed().await;
+ return;
+ }
+
+ // Determine which projects to build from the requested attrs
+ let can_build: Vec<String> = actions.job.attrs.clone();
+ let cannot_build: Vec<(String, Vec<String>)> = Vec::new();
+ let cannot_build_attrs: Vec<String> = Vec::new();
+
+ info!(
+ "Can build: '{}', Cannot build: '{}'",
+ can_build.join(", "),
+ cannot_build_attrs.join(", ")
+ );
+
+ actions
+ .log_started(can_build.clone(), cannot_build_attrs.clone())
+ .await;
+ actions.log_instantiation_errors(cannot_build).await;
+
+ if can_build.is_empty() {
+ actions.build_not_attempted(cannot_build_attrs).await;
+ return;
+ }
+
+ // Build each requested project using the build executor
+ let mut overall_status = BuildStatus::Success;
+ for project_name in &can_build {
+ if let Some(config) = buildtool::find_project(project_name) {
+ actions.log_line(format!("Building project: {}", project_name)).await;
+ let result = self.build_executor.build_project(refpath.as_ref(), &config);
+
+ match result {
+ Ok(mut output) => {
+ use std::io::Read;
+ let mut buf = String::new();
+ output.read_to_string(&mut buf).ok();
+ for line in buf.lines() {
+ actions.log_line(line.to_string()).await;
+ }
+ }
+ Err(mut output) => {
+ overall_status = BuildStatus::Failure;
+ use std::io::Read;
+ let mut buf = String::new();
+ output.read_to_string(&mut buf).ok();
+ for line in buf.lines() {
+ actions.log_line(line.to_string()).await;
+ }
+ }
+ }
+ } else {
+ actions.log_line(format!("Unknown project: {}", project_name)).await;
+ }
+ }
+
+ info!("Build finished ({:?})", overall_status);
+ info!("Lines:");
+ info!("-----8<-----");
+ actions
+ .log_snippet()
+ .iter()
+ .inspect(|x| info!("{}", x))
+ .next_back();
+ info!("----->8-----");
+
+ actions
+ .build_finished(overall_status, can_build, cannot_build_attrs)
+ .await;
+ info!("Build done!");
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::message::{Pr, Repo};
+ use crate::notifyworker::SimpleNotifyWorker;
+ use crate::test_scratch::TestScratch;
+ use std::env;
+ use std::path::{Path, PathBuf};
+ use std::process::{Command, Stdio};
+ use std::vec::IntoIter;
+
+ #[cfg(target_os = "linux")]
+ const SYSTEM: &str = "x86_64-linux";
+ #[cfg(target_os = "macos")]
+ const SYSTEM: &str = "x86_64-darwin";
+
+ fn build_executor() -> buildtool::BuildExecutor {
+ buildtool::BuildExecutor::new(1800)
+ }
+
+ fn tpath(component: &str) -> PathBuf {
+ Path::new(env!("CARGO_MANIFEST_DIR")).join(component)
+ }
+
+ fn make_worker(path: &Path) -> BuildWorker {
+ let cloner = checkout::cached_cloner(path);
+ let executor = build_executor();
+
+ BuildWorker::new(
+ cloner,
+ executor,
+ SYSTEM.to_owned(),
+ "cargo-test-build".to_owned(),
+ )
+ }
+
+ fn make_pr_repo(bare: &Path, co: &Path) -> String {
+ let output = Command::new("bash")
+ .current_dir(tpath("./test-srcs"))
+ .arg("make-pr.sh")
+ .arg(bare)
+ .arg(co)
+ .stderr(Stdio::null())
+ .stdout(Stdio::piped())
+ .output()
+ .expect("building the test PR failed");
+ let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
+
+ hash.trim().to_owned()
+ }
+
+ fn strip_escaped_ansi(string: &str) -> String {
+ string
+ .replace(['‘', '’'], "'")
+ .replace("\\u001b[31;1m", "") // red
+ .replace("\\u001b[0m", "") // reset
+ }
+
+ fn assert_contains_job(actions: &mut IntoIter<worker::Action>, text_to_match: &str) {
+ println!("\n\n Searching: {text_to_match:?}");
+ actions
+ .position(|job| match job {
+ worker::Action::Publish(ref body) => {
+ let content = std::str::from_utf8(&body.content).unwrap();
+ let text = strip_escaped_ansi(content);
+ eprintln!("{text}");
+ if text.contains(text_to_match) {
+ println!(" ok");
+ true
+ } else {
+ println!(" notContains: {text}");
+ false
+ }
+ }
+ other => {
+ println!(" notPublish: {other:?}");
+ false
+ }
+ })
+ .unwrap_or_else(|| {
+ panic!("Actions should contain a job matching {text_to_match}, after the previous check")
+ });
+ }
+
+ #[tokio::test]
+ pub async fn test_simple_build() {
+ let p = TestScratch::new_dir("build-simple-build-working");
+ let bare_repo = TestScratch::new_dir("build-simple-build-bare");
+ let co_repo = TestScratch::new_dir("build-simple-build-co");
+
+ let head_sha = make_pr_repo(&bare_repo.path(), &co_repo.path());
+ let worker = make_worker(&p.path());
+
+ let job = buildjob::BuildJob {
+ attrs: vec!["success".to_owned()],
+ pr: Pr {
+ head_sha,
+ number: 1,
+ target_branch: Some("main".to_owned()),
+ },
+ repo: Repo {
+ clone_url: bare_repo.path().to_str().unwrap().to_owned(),
+ full_name: "test-git".to_owned(),
+ name: "project-tick".to_owned(),
+ owner: "tickborg-test".to_owned(),
+ },
+ subset: None,
+ logs: Some((Some(String::from("logs")), Some(String::from("build.log")))),
+ statusreport: Some((Some(String::from("build-results")), None)),
+ request_id: "bogus-request-id".to_owned(),
+ };
+
+ let dummyreceiver = Arc::new(notifyworker::DummyNotificationReceiver::new());
+
+ worker.consumer(job, dummyreceiver.clone()).await;
+
+ println!("Total actions: {:?}", dummyreceiver.actions.lock().len());
+ let actions_vec = dummyreceiver.actions.lock().clone();
+ let mut actions = actions_vec.into_iter();
+
+ assert_contains_job(&mut actions, "output\":\"hi");
+ assert_contains_job(&mut actions, "output\":\"1");
+ assert_contains_job(&mut actions, "output\":\"2");
+ assert_contains_job(&mut actions, "output\":\"3");
+ assert_contains_job(&mut actions, "output\":\"4");
+ assert_contains_job(&mut actions, "status\":\"Success\""); // First one to the github poster
+ assert_contains_job(&mut actions, "status\":\"Success\""); // This one to the logs
+ assert_eq!(actions.next(), Some(worker::Action::Ack));
+ }
+
+ #[tokio::test]
+ pub async fn test_all_jobs_skipped() {
+ let p = TestScratch::new_dir("no-attempt");
+ let bare_repo = TestScratch::new_dir("no-attempt-bare");
+ let co_repo = TestScratch::new_dir("no-attempt-co");
+
+ let head_sha = make_pr_repo(&bare_repo.path(), &co_repo.path());
+ let worker = make_worker(&p.path());
+
+ let job = buildjob::BuildJob {
+ attrs: vec!["not-real".to_owned()],
+ pr: Pr {
+ head_sha,
+ number: 1,
+ target_branch: Some("main".to_owned()),
+ },
+ repo: Repo {
+ clone_url: bare_repo.path().to_str().unwrap().to_owned(),
+ full_name: "test-git".to_owned(),
+ name: "project-tick".to_owned(),
+ owner: "tickborg-test".to_owned(),
+ },
+ subset: None,
+ logs: Some((Some(String::from("logs")), Some(String::from("build.log")))),
+ statusreport: Some((Some(String::from("build-results")), None)),
+ request_id: "bogus-request-id".to_owned(),
+ };
+
+ let dummyreceiver = Arc::new(notifyworker::DummyNotificationReceiver::new());
+
+ worker.consumer(job, dummyreceiver.clone()).await;
+
+ println!("Total actions: {:?}", dummyreceiver.actions.lock().len());
+ let actions_vec = dummyreceiver.actions.lock().clone();
+ let mut actions = actions_vec.into_iter();
+
+ assert_contains_job(
+ &mut actions,
+ r#""line_number":1,"output":"Cannot build `not-real` because:""#,
+ );
+ assert_contains_job(
+ &mut actions,
+ "attribute 'not-real' in selection path 'not-real' not found\"}",
+ );
+ assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // First one to the github poster
+ assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // This one to the logs
+ assert_eq!(actions.next(), Some(worker::Action::Ack));
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/eval/mod.rs b/ofborg/tickborg/src/tasks/eval/mod.rs
new file mode 100644
index 0000000000..6f8a2d1955
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/eval/mod.rs
@@ -0,0 +1,48 @@
+mod monorepo;
+
+pub use self::monorepo::MonorepoStrategy;
+use crate::checkout::CachedProjectCo;
+use crate::commitstatus::{CommitStatus, CommitStatusError};
+use crate::evalchecker::EvalChecker;
+use crate::message::buildjob::BuildJob;
+
+use std::path::Path;
+
+pub trait EvaluationStrategy {
+ fn pre_clone(&mut self) -> impl std::future::Future<Output = StepResult<()>>;
+
+ fn on_target_branch(
+ &mut self,
+ co: &Path,
+ status: &mut CommitStatus,
+ ) -> impl std::future::Future<Output = StepResult<()>>;
+ fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()>;
+ fn after_merge(
+ &mut self,
+ status: &mut CommitStatus,
+ ) -> impl std::future::Future<Output = StepResult<()>>;
+ fn evaluation_checks(&self) -> Vec<EvalChecker>;
+ fn all_evaluations_passed(
+ &mut self,
+ status: &mut CommitStatus,
+ ) -> impl std::future::Future<Output = StepResult<EvaluationComplete>>;
+}
+
+pub type StepResult<T> = Result<T, Error>;
+
+#[derive(Default)]
+pub struct EvaluationComplete {
+ pub builds: Vec<BuildJob>,
+}
+
+#[derive(Debug)]
+pub enum Error {
+ CommitStatusWrite(CommitStatusError),
+ Fail(String),
+}
+
+impl From<CommitStatusError> for Error {
+ fn from(e: CommitStatusError) -> Error {
+ Error::CommitStatusWrite(e)
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/eval/monorepo.rs b/ofborg/tickborg/src/tasks/eval/monorepo.rs
new file mode 100644
index 0000000000..cc86653f0c
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/eval/monorepo.rs
@@ -0,0 +1,254 @@
+use crate::buildtool::detect_changed_projects;
+use crate::checkout::CachedProjectCo;
+use crate::commentparser::Subset;
+use crate::commitstatus::CommitStatus;
+use crate::evalchecker::EvalChecker;
+use crate::message::buildjob::BuildJob;
+use crate::message::evaluationjob::EvaluationJob;
+use crate::tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
+use crate::tasks::evaluate::update_labels;
+
+use std::path::Path;
+
+use hubcaps::issues::IssueRef;
+use regex::Regex;
+use uuid::Uuid;
+
+/// Project Tick specific labels from PR title keywords
+const TITLE_LABELS: [(&str, &str); 12] = [
+ ("meshmc", "project: meshmc"),
+ ("mnv", "project: mnv"),
+ ("neozip", "project: neozip"),
+ ("cmark", "project: cmark"),
+ ("cgit", "project: cgit"),
+ ("json4cpp", "project: json4cpp"),
+ ("tomlplusplus", "project: tomlplusplus"),
+ ("corebinutils", "project: corebinutils"),
+ ("forgewrapper", "project: forgewrapper"),
+ ("genqrcode", "project: genqrcode"),
+ ("darwin", "platform: macos"),
+ ("windows", "platform: windows"),
+];
+
+fn label_from_title(title: &str) -> Vec<String> {
+ let title_lower = title.to_lowercase();
+ TITLE_LABELS
+ .iter()
+ .filter(|(word, _label)| {
+ let re = Regex::new(&format!("\\b{word}\\b")).unwrap();
+ re.is_match(&title_lower)
+ })
+ .map(|(_word, label)| (*label).into())
+ .collect()
+}
+
+/// Parses Conventional Commit messages to extract affected project scopes
+fn parse_commit_scopes(messages: &[String]) -> Vec<String> {
+ let scope_re = Regex::new(r"^[a-z]+\(([^)]+)\)").unwrap();
+ let colon_re = Regex::new(r"^([a-z0-9_-]+):").unwrap();
+
+ let mut projects: Vec<String> = messages
+ .iter()
+ .filter_map(|line| {
+ let trimmed = line.trim();
+ // Try Conventional Commits: "feat(meshmc): ..."
+ if let Some(caps) = scope_re.captures(trimmed) {
+ Some(caps[1].to_string())
+ }
+ // Try simple "project: description"
+ else if let Some(caps) = colon_re.captures(trimmed) {
+ let candidate = caps[1].to_string();
+ // Only accept known project names
+ if crate::buildtool::find_project(&candidate).is_some() {
+ Some(candidate)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ projects.sort();
+ projects.dedup();
+ projects
+}
+
+pub struct MonorepoStrategy<'a> {
+ job: &'a EvaluationJob,
+ issue_ref: &'a IssueRef,
+ changed_projects: Option<Vec<String>>,
+}
+
+impl<'a> MonorepoStrategy<'a> {
+ pub fn new(job: &'a EvaluationJob, issue_ref: &'a IssueRef) -> MonorepoStrategy<'a> {
+ Self {
+ job,
+ issue_ref,
+ changed_projects: None,
+ }
+ }
+
+ async fn tag_from_title(&self) {
+ let title = match self.issue_ref.get().await {
+ Ok(issue) => issue.title.to_lowercase(),
+ Err(_) => return,
+ };
+
+ let labels = label_from_title(&title);
+
+ if labels.is_empty() {
+ return;
+ }
+
+ update_labels(self.issue_ref, &labels, &[]).await;
+ }
+
+ fn queue_builds(&self) -> StepResult<Vec<BuildJob>> {
+ if let Some(ref projects) = self.changed_projects {
+ if !projects.is_empty() && projects.len() <= 15 {
+ Ok(vec![BuildJob::new(
+ self.job.repo.clone(),
+ self.job.pr.clone(),
+ Subset::Project,
+ projects.clone(),
+ None,
+ None,
+ Uuid::new_v4().to_string(),
+ )])
+ } else {
+ Ok(vec![])
+ }
+ } else {
+ Ok(vec![])
+ }
+ }
+}
+
+impl EvaluationStrategy for MonorepoStrategy<'_> {
+ async fn pre_clone(&mut self) -> StepResult<()> {
+ self.tag_from_title().await;
+ Ok(())
+ }
+
+ async fn on_target_branch(&mut self, _dir: &Path, status: &mut CommitStatus) -> StepResult<()> {
+ status
+ .set_with_description(
+ "Analyzing changed projects",
+ hubcaps::statuses::State::Pending,
+ )
+ .await?;
+ Ok(())
+ }
+
+ fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> {
+ // Strategy 1: detect from changed files
+ let changed_files = co
+ .files_changed_from_head(&self.job.pr.head_sha)
+ .unwrap_or_default();
+ let mut projects = detect_changed_projects(&changed_files);
+
+ // Strategy 2: also parse commit messages for scopes
+ let commit_scopes = parse_commit_scopes(
+ &co.commit_messages_from_head(&self.job.pr.head_sha)
+ .unwrap_or_else(|_| vec!["".to_owned()]),
+ );
+
+ for scope in commit_scopes {
+ if !projects.contains(&scope) {
+ projects.push(scope);
+ }
+ }
+
+ projects.sort();
+ projects.dedup();
+ self.changed_projects = Some(projects);
+
+ Ok(())
+ }
+
+ async fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
+ let project_list = self
+ .changed_projects
+ .as_ref()
+ .map(|p| p.join(", "))
+ .unwrap_or_else(|| "none".into());
+ status
+ .set_with_description(
+ &format!("Changed: {project_list}"),
+ hubcaps::statuses::State::Pending,
+ )
+ .await?;
+ Ok(())
+ }
+
+ fn evaluation_checks(&self) -> Vec<EvalChecker> {
+ vec![]
+ }
+
+ async fn all_evaluations_passed(
+ &mut self,
+ status: &mut CommitStatus,
+ ) -> StepResult<EvaluationComplete> {
+ status
+ .set_with_description(
+ "Scheduling project builds",
+ hubcaps::statuses::State::Pending,
+ )
+ .await?;
+
+ let builds = self.queue_builds()?;
+ Ok(EvaluationComplete { builds })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_label_from_title() {
+ assert_eq!(
+ label_from_title("feat(meshmc): add new block type"),
+ vec![String::from("project: meshmc")]
+ );
+ assert_eq!(
+ label_from_title("fix windows build for meshmc"),
+ vec![String::from("project: meshmc"), String::from("platform: windows")]
+ );
+ assert_eq!(
+ label_from_title("fix darwin support"),
+ vec![String::from("platform: macos")]
+ );
+ assert_eq!(
+ label_from_title("docs: update README"),
+ Vec::<String>::new()
+ );
+ }
+
+ #[test]
+ fn test_parse_commit_scopes() {
+ let messages = vec![
+ "feat(meshmc): add new feature".into(),
+ "fix(mnv): resolve segfault".into(),
+ "chore: update CI".into(),
+ "Merge pull request #123 from feature/xyz".into(),
+ "neozip: bump version".into(),
+ ];
+ let scopes = parse_commit_scopes(&messages);
+ assert_eq!(scopes, vec!["meshmc", "mnv", "neozip"]);
+ }
+
+ #[test]
+ fn test_parse_commit_scopes_unknown() {
+ let messages = vec![
+ "feat(unknownproject): something".into(),
+ "docs: update readme".into(),
+ ];
+ let scopes = parse_commit_scopes(&messages);
+ // "unknownproject" should be included (scope from conventional commit)
+ // "docs" should NOT be included (not a known project)
+ assert_eq!(scopes, vec!["unknownproject"]);
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/evaluate.rs b/ofborg/tickborg/src/tasks/evaluate.rs
new file mode 100644
index 0000000000..8f277aa228
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/evaluate.rs
@@ -0,0 +1,556 @@
+/// This is what evaluates every pull-request
+use crate::acl::Acl;
+use crate::checkout;
+use crate::commitstatus::{CommitStatus, CommitStatusError};
+use crate::config::GithubAppVendingMachine;
+use crate::message::{buildjob, evaluationjob};
+use crate::stats::{self, Event};
+use crate::systems;
+use crate::tasks::eval;
+use crate::tasks::eval::EvaluationStrategy;
+use crate::worker;
+use futures::stream::StreamExt;
+use futures_util::TryFutureExt;
+
+use std::path::Path;
+use std::time::Instant;
+
+use tracing::{debug_span, error, info, warn};
+
+pub struct EvaluationWorker<E> {
+ cloner: checkout::CachedCloner,
+ github_vend: tokio::sync::RwLock<GithubAppVendingMachine>,
+ acl: Acl,
+ identity: String,
+ events: E,
+}
+
+impl<E: stats::SysEvents> EvaluationWorker<E> {
+ pub fn new(
+ cloner: checkout::CachedCloner,
+ github_vend: GithubAppVendingMachine,
+ acl: Acl,
+ identity: String,
+ events: E,
+ ) -> EvaluationWorker<E> {
+ EvaluationWorker {
+ cloner,
+ github_vend: tokio::sync::RwLock::new(github_vend),
+ acl,
+ identity,
+ events,
+ }
+ }
+}
+
+impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E> {
+ type J = evaluationjob::EvaluationJob;
+
+ async fn msg_to_job(
+ &mut self,
+ _: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ self.events.notify(Event::JobReceived).await;
+ match evaluationjob::from(body) {
+ Ok(job) => {
+ self.events.notify(Event::JobDecodeSuccess).await;
+ Ok(job)
+ }
+ Err(err) => {
+ self.events.notify(Event::JobDecodeFailure).await;
+ error!(
+ "Failed to decode message: {}, Err: {err:?}",
+ std::str::from_utf8(body).unwrap_or("<message not utf8>")
+ );
+ Err("Failed to decode message".to_owned())
+ }
+ }
+ }
+
+ async fn consumer(&mut self, job: &evaluationjob::EvaluationJob) -> worker::Actions {
+ let span = debug_span!("job", pr = ?job.pr.number);
+ let _enter = span.enter();
+
+ let mut vending_machine = self.github_vend.write().await;
+
+ let github_client = vending_machine
+ .for_repo(&job.repo.owner, &job.repo.name)
+ .await
+ .expect("Failed to get a github client token");
+
+ OneEval::new(
+ github_client,
+ &self.acl,
+ &mut self.events,
+ &self.identity,
+ &self.cloner,
+ job,
+ )
+ .worker_actions()
+ .await
+ }
+}
+
+struct OneEval<'a, E> {
+ client_app: &'a hubcaps::Github,
+ repo: hubcaps::repositories::Repository,
+ acl: &'a Acl,
+ events: &'a mut E,
+ identity: &'a str,
+ cloner: &'a checkout::CachedCloner,
+ job: &'a evaluationjob::EvaluationJob,
+}
+
+impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
+ #[allow(clippy::too_many_arguments)]
+ fn new(
+ client_app: &'a hubcaps::Github,
+ acl: &'a Acl,
+ events: &'a mut E,
+ identity: &'a str,
+ cloner: &'a checkout::CachedCloner,
+ job: &'a evaluationjob::EvaluationJob,
+ ) -> OneEval<'a, E> {
+ let repo = client_app.repo(job.repo.owner.clone(), job.repo.name.clone());
+ OneEval {
+ client_app,
+ repo,
+ acl,
+ events,
+ identity,
+ cloner,
+ job,
+ }
+ }
+
+ fn actions(&self) -> evaluationjob::Actions {
+ evaluationjob::Actions {}
+ }
+
+ async fn update_status(
+ &self,
+ description: String,
+ url: Option<String>,
+ state: hubcaps::statuses::State,
+ ) -> Result<(), CommitStatusError> {
+ let description = if description.len() >= 140 {
+ warn!(
+ "description is over 140 char; truncating: {:?}",
+ &description
+ );
+ description.chars().take(140).collect()
+ } else {
+ description
+ };
+ let repo = self
+ .client_app
+ .repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
+ let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha).await?;
+
+ let mut builder = hubcaps::statuses::StatusOptions::builder(state);
+ builder.context(format!("{prefix}-eval"));
+ builder.description(description.clone());
+
+ if let Some(url) = url {
+ builder.target_url(url);
+ }
+
+ info!(
+ "Updating status on {}:{} -> {}",
+ &self.job.pr.number, &self.job.pr.head_sha, &description
+ );
+
+ self.repo
+ .statuses()
+ .create(&self.job.pr.head_sha, &builder.build())
+ .map_ok(|_| ())
+ .map_err(|e| CommitStatusError::from(e))
+ .await
+ }
+
+ async fn worker_actions(&mut self) -> worker::Actions {
+ let eval_result = match self.evaluate_job().await {
+ Ok(v) => Ok(v),
+ Err(eval_error) => match eval_error {
+ // Handle error cases which expect us to post statuses
+ // to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
+ EvalWorkerError::EvalError(eval::Error::Fail(msg)) => Err(self
+ .update_status(msg, None, hubcaps::statuses::State::Failure)
+ .await),
+ EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(Err(e)),
+ EvalWorkerError::CommitStatusWrite(e) => Err(Err(e)),
+ },
+ };
+
+ match eval_result {
+ Ok(eval_actions) => {
+ let issue_ref = self.repo.issue(self.job.pr.number);
+ update_labels(&issue_ref, &[], &[String::from("tickborg-internal-error")]).await;
+
+ eval_actions
+ }
+ Err(Ok(())) => {
+ // There was an error during eval, but we successfully
+ // updated the PR.
+
+ let issue_ref = self.repo.issue(self.job.pr.number);
+ update_labels(&issue_ref, &[], &[String::from("tickborg-internal-error")]).await;
+
+ self.actions().skip(self.job)
+ }
+ Err(Err(CommitStatusError::ExpiredCreds(e))) => {
+ error!("Failed writing commit status: creds expired: {:?}", e);
+ self.actions().retry_later(self.job)
+ }
+ Err(Err(CommitStatusError::InternalError(e))) => {
+ error!("Failed writing commit status: internal error: {:?}", e);
+ self.actions().retry_later(self.job)
+ }
+ Err(Err(CommitStatusError::MissingSha(e))) => {
+ error!(
+ "Failed writing commit status: commit sha was force-pushed away: {:?}",
+ e
+ );
+ self.actions().skip(self.job)
+ }
+
+ Err(Err(CommitStatusError::Error(cswerr))) => {
+ error!(
+ "Internal error writing commit status: {:?}, marking internal error",
+ cswerr
+ );
+ let issue_ref = self.repo.issue(self.job.pr.number);
+ update_labels(&issue_ref, &[String::from("tickborg-internal-error")], &[]).await;
+
+ self.actions().skip(self.job)
+ }
+ }
+ }
+
+ async fn evaluate_job(&mut self) -> Result<worker::Actions, EvalWorkerError> {
+ let job = self.job;
+ let repo = self
+ .client_app
+ .repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
+ let issue_ref = repo.issue(job.pr.number);
+ let auto_schedule_build_archs: Vec<systems::System>;
+
+ match issue_ref.get().await {
+ Ok(iss) => {
+ if iss.state == "closed" {
+ self.events.notify(Event::IssueAlreadyClosed).await;
+ info!("Skipping {} because it is closed", job.pr.number);
+ return Ok(self.actions().skip(job));
+ }
+
+ if issue_is_wip(&iss) {
+ auto_schedule_build_archs = vec![];
+ } else {
+ auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
+ &iss.user.login,
+ &job.repo.full_name,
+ );
+ }
+ }
+
+ Err(e) => {
+ self.events.notify(Event::IssueFetchFailed).await;
+ error!("Error fetching {}!", job.pr.number);
+ error!("E: {:?}", e);
+ return Ok(self.actions().skip(job));
+ }
+ };
+
+ let mut evaluation_strategy = eval::MonorepoStrategy::new(job, &issue_ref);
+
+ let prefix = get_prefix(repo.statuses(), &job.pr.head_sha).await?;
+
+ let mut overall_status = CommitStatus::new(
+ repo.statuses(),
+ job.pr.head_sha.clone(),
+ format!("{prefix}-eval"),
+ "Starting".to_owned(),
+ None,
+ );
+
+ overall_status
+ .set_with_description("Starting", hubcaps::statuses::State::Pending)
+ .await?;
+
+ evaluation_strategy.pre_clone().await?;
+
+ let project = self
+ .cloner
+ .project(&job.repo.full_name, job.repo.clone_url.clone());
+
+ overall_status
+ .set_with_description("Cloning project", hubcaps::statuses::State::Pending)
+ .await?;
+
+ info!("Working on {}", job.pr.number);
+ let co = project
+ .clone_for("mr-est".to_string(), self.identity.to_string())
+ .map_err(|e| {
+ EvalWorkerError::CommitStatusWrite(CommitStatusError::InternalError(format!(
+ "Cloning failed: {e}"
+ )))
+ })?;
+
+ let target_branch = match job.pr.target_branch.clone() {
+ Some(x) => x,
+ None => String::from("main"),
+ };
+
+ overall_status
+ .set_with_description(
+ format!("Checking out {}", &target_branch).as_ref(),
+ hubcaps::statuses::State::Pending,
+ )
+ .await?;
+ info!("Checking out target branch {}", &target_branch);
+ let refpath = co
+ .checkout_origin_ref(target_branch.as_ref())
+ .map_err(|e| {
+ EvalWorkerError::CommitStatusWrite(CommitStatusError::InternalError(format!(
+ "Checking out target branch failed: {e}"
+ )))
+ })?;
+
+ evaluation_strategy
+ .on_target_branch(Path::new(&refpath), &mut overall_status)
+ .await?;
+
+ let target_branch_rebuild_sniff_start = Instant::now();
+
+ self.events
+ .notify(Event::EvaluationDuration(
+ target_branch.clone(),
+ target_branch_rebuild_sniff_start.elapsed().as_secs(),
+ ))
+ .await;
+ self.events
+ .notify(Event::EvaluationDurationCount(target_branch))
+ .await;
+
+ overall_status
+ .set_with_description("Fetching PR", hubcaps::statuses::State::Pending)
+ .await?;
+
+ co.fetch_pr(job.pr.number).map_err(|e| {
+ EvalWorkerError::CommitStatusWrite(CommitStatusError::InternalError(format!(
+ "Fetching PR failed: {e}"
+ )))
+ })?;
+
+ if !co.commit_exists(job.pr.head_sha.as_ref()) {
+ overall_status
+ .set_with_description("Commit not found", hubcaps::statuses::State::Error)
+ .await?;
+
+ info!("Commit {} doesn't exist", job.pr.head_sha);
+ return Ok(self.actions().skip(job));
+ }
+
+ evaluation_strategy.after_fetch(&co)?;
+
+ overall_status
+ .set_with_description("Merging PR", hubcaps::statuses::State::Pending)
+ .await?;
+
+ if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
+ overall_status
+ .set_with_description("Failed to merge", hubcaps::statuses::State::Failure)
+ .await?;
+
+ info!("Failed to merge {}", job.pr.head_sha);
+
+ return Ok(self.actions().skip(job));
+ }
+
+ evaluation_strategy.after_merge(&mut overall_status).await?;
+
+ info!("Got path: {:?}, building", refpath);
+ overall_status
+ .set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)
+ .await?;
+
+ let eval_results: bool = futures::stream::iter(evaluation_strategy.evaluation_checks())
+ .map(|check| {
+ // We need to clone or move variables into the async block
+ let repo_statuses = repo.statuses();
+ let head_sha = job.pr.head_sha.clone();
+ let refpath = refpath.clone();
+
+ async move {
+ let status = CommitStatus::new(
+ repo_statuses,
+ head_sha,
+ format!("{prefix}-eval-{}", check.name()),
+ check.cli_cmd(),
+ None,
+ );
+
+ status
+ .set(hubcaps::statuses::State::Pending)
+ .await
+ .expect("Failed to set status on eval strategy");
+
+ let state = match check.execute(Path::new(&refpath)) {
+ Ok(_) => hubcaps::statuses::State::Success,
+ Err(_) => hubcaps::statuses::State::Failure,
+ };
+
+ status
+ .set(state.clone())
+ .await
+ .expect("Failed to set status on eval strategy");
+
+ if state == hubcaps::statuses::State::Success {
+ Ok(())
+ } else {
+ Err(())
+ }
+ }
+ })
+ .buffered(1)
+ .all(|res| async move { res.is_ok() })
+ .await;
+
+ info!("Finished evaluations");
+ let mut response: worker::Actions = vec![];
+
+ if eval_results {
+ let complete = evaluation_strategy
+ .all_evaluations_passed(&mut overall_status)
+ .await?;
+
+ response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
+
+ overall_status
+ .set_with_description("^.^!", hubcaps::statuses::State::Success)
+ .await?;
+ } else {
+ overall_status
+ .set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)
+ .await?;
+ }
+
+ self.events.notify(Event::TaskEvaluationCheckComplete).await;
+
+ info!("Evaluations done!");
+ Ok(self.actions().done(job, response))
+ }
+}
+
+fn schedule_builds(
+ builds: Vec<buildjob::BuildJob>,
+ auto_schedule_build_archs: Vec<systems::System>,
+) -> Vec<worker::Action> {
+ let mut response = vec![];
+ info!(
+ "Scheduling build jobs {:?} on arches {:?}",
+ builds, auto_schedule_build_archs
+ );
+ for buildjob in builds {
+ for arch in auto_schedule_build_archs.iter() {
+ let (exchange, routingkey) = arch.as_build_destination();
+ response.push(worker::publish_serde_action(
+ exchange, routingkey, &buildjob,
+ ));
+ }
+ response.push(worker::publish_serde_action(
+ Some("build-results".to_string()),
+ None,
+ &buildjob::QueuedBuildJobs {
+ job: buildjob,
+ architectures: auto_schedule_build_archs
+ .iter()
+ .map(|arch| arch.to_string())
+ .collect(),
+ },
+ ));
+ }
+
+ response
+}
+
+pub async fn update_labels(
+ issueref: &hubcaps::issues::IssueRef,
+ add: &[String],
+ remove: &[String],
+) {
+ let l = issueref.labels();
+ let issue = issueref.get().await.expect("Failed to get issue");
+
+ let existing: Vec<String> = issue.labels.iter().map(|l| l.name.clone()).collect();
+
+ let to_add: Vec<&str> = add
+ .iter()
+ .filter(|l| !existing.contains(l)) // Remove labels already on the issue
+ .map(|l| l.as_ref())
+ .collect();
+
+ let to_remove: Vec<String> = remove
+ .iter()
+ .filter(|l| existing.contains(l)) // Remove labels already on the issue
+ .cloned()
+ .collect();
+
+ let issue = issue.number;
+
+ info!("Labeling issue #{issue}: + {to_add:?} , - {to_remove:?}, = {existing:?}");
+
+ l.add(to_add.clone())
+ .await
+ .unwrap_or_else(|err| panic!("Failed to add labels {to_add:?} to issue #{issue}: {err:?}"));
+
+ for label in to_remove {
+ l.remove(&label).await.unwrap_or_else(|err| {
+ panic!("Failed to remove label {label:?} from issue #{issue}: {err:?}")
+ });
+ }
+}
+
+fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool {
+ issue.title.starts_with("WIP:") || issue.title.contains("[WIP]")
+}
+
+/// Determine whether or not to use the "old" status prefix, `grahamcofborg`, or
+/// the new one, `tickborg`.
+///
+/// If the PR already has any `grahamcofborg`-prefixed statuses, continue to use
+/// that for backwards compatibility. Otherwise use the new prefix.
+pub async fn get_prefix(
+ statuses: hubcaps::statuses::Statuses,
+ sha: &str,
+) -> Result<&str, CommitStatusError> {
+ if statuses
+ .list(sha)
+ .await?
+ .iter()
+ .any(|s| s.context.starts_with("grahamcofborg-"))
+ {
+ Ok("grahamcofborg")
+ } else {
+ Ok("tickborg")
+ }
+}
+
+enum EvalWorkerError {
+ EvalError(eval::Error),
+ CommitStatusWrite(CommitStatusError),
+}
+
+impl From<eval::Error> for EvalWorkerError {
+ fn from(e: eval::Error) -> EvalWorkerError {
+ EvalWorkerError::EvalError(e)
+ }
+}
+
+impl From<CommitStatusError> for EvalWorkerError {
+ fn from(e: CommitStatusError) -> EvalWorkerError {
+ EvalWorkerError::CommitStatusWrite(e)
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/evaluationfilter.rs b/ofborg/tickborg/src/tasks/evaluationfilter.rs
new file mode 100644
index 0000000000..85d61b6f3a
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/evaluationfilter.rs
@@ -0,0 +1,146 @@
+use crate::acl;
+use crate::ghevent;
+use crate::message::{Pr, Repo, evaluationjob};
+use crate::worker;
+
+use tracing::{debug_span, info};
+
+pub struct EvaluationFilterWorker {
+ acl: acl::Acl,
+}
+
+impl EvaluationFilterWorker {
+ pub fn new(acl: acl::Acl) -> EvaluationFilterWorker {
+ EvaluationFilterWorker { acl }
+ }
+}
+
+impl worker::SimpleWorker for EvaluationFilterWorker {
+ type J = ghevent::PullRequestEvent;
+
+ async fn msg_to_job(
+ &mut self,
+ _: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ match serde_json::from_slice(body) {
+ Ok(event) => Ok(event),
+ Err(err) => Err(format!(
+ "Failed to deserialize job {err:?}: {:?}",
+ std::str::from_utf8(body).unwrap_or("<job not utf8>")
+ )),
+ }
+ }
+
+ async fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions {
+ let span = debug_span!("job", pr = ?job.number);
+ let _enter = span.enter();
+
+ if !self.acl.is_repo_eligible(&job.repository.full_name) {
+ info!("Repo not authorized ({})", job.repository.full_name);
+ return vec![worker::Action::Ack];
+ }
+
+ if job.pull_request.state != ghevent::PullRequestState::Open {
+ info!(
+ "PR is not open ({}#{})",
+ job.repository.full_name, job.number
+ );
+ return vec![worker::Action::Ack];
+ }
+
+ let interesting: bool = match job.action {
+ ghevent::PullRequestAction::Opened => true,
+ ghevent::PullRequestAction::Synchronize => true,
+ ghevent::PullRequestAction::Reopened => true,
+ ghevent::PullRequestAction::Edited => {
+ if let Some(ref changes) = job.changes {
+ changes.base.is_some()
+ } else {
+ false
+ }
+ }
+ _ => false,
+ };
+
+ if !interesting {
+ info!(
+ "Not interesting: {}#{} because of {:?}",
+ job.repository.full_name, job.number, job.action
+ );
+
+ return vec![worker::Action::Ack];
+ }
+
+ info!(
+ "Found {}#{} to be interesting because of {:?}",
+ job.repository.full_name, job.number, job.action
+ );
+ let repo_msg = Repo {
+ clone_url: job.repository.clone_url.clone(),
+ full_name: job.repository.full_name.clone(),
+ owner: job.repository.owner.login.clone(),
+ name: job.repository.name.clone(),
+ };
+
+ let pr_msg = Pr {
+ number: job.number,
+ head_sha: job.pull_request.head.sha.clone(),
+ target_branch: Some(job.pull_request.base.git_ref.clone()),
+ };
+
+ let msg = evaluationjob::EvaluationJob {
+ repo: repo_msg,
+ pr: pr_msg,
+ };
+
+ vec![
+ worker::publish_serde_action(None, Some("mass-rebuild-check-jobs".to_owned()), &msg),
+ worker::Action::Ack,
+ ]
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::worker::SimpleWorker;
+
+ #[tokio::test]
+ async fn changed_base() {
+ let data = include_str!("../../test-srcs/events/pr-changed-base.json");
+
+ let job: ghevent::PullRequestEvent =
+ serde_json::from_str(data).expect("Should properly deserialize");
+
+ let mut worker = EvaluationFilterWorker::new(acl::Acl::new(
+ vec!["project-tick/Project-Tick".to_owned()],
+ Some(vec![]),
+ ));
+
+ assert_eq!(
+ worker.consumer(&job).await,
+ vec![
+ worker::publish_serde_action(
+ None,
+ Some("mass-rebuild-check-jobs".to_owned()),
+ &evaluationjob::EvaluationJob {
+ repo: Repo {
+ clone_url: String::from("https://github.com/project-tick/Project-Tick.git"),
+ full_name: String::from("project-tick/Project-Tick"),
+ owner: String::from("project-tick"),
+ name: String::from("Project-Tick"),
+ },
+ pr: Pr {
+ number: 33299,
+ head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
+ target_branch: Some(String::from("staging")),
+ },
+ }
+ ),
+ worker::Action::Ack,
+ ]
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/githubcommentfilter.rs b/ofborg/tickborg/src/tasks/githubcommentfilter.rs
new file mode 100644
index 0000000000..2a27061577
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/githubcommentfilter.rs
@@ -0,0 +1,182 @@
+use crate::acl;
+use crate::commentparser;
+use crate::ghevent;
+use crate::message::{Pr, Repo, buildjob, evaluationjob};
+use crate::worker;
+
+use tracing::{debug_span, error, info};
+use uuid::Uuid;
+
+pub struct GitHubCommentWorker {
+ acl: acl::Acl,
+ github: hubcaps::Github,
+}
+
+impl GitHubCommentWorker {
+ pub fn new(acl: acl::Acl, github: hubcaps::Github) -> GitHubCommentWorker {
+ GitHubCommentWorker { acl, github }
+ }
+}
+
+impl worker::SimpleWorker for GitHubCommentWorker {
+ type J = ghevent::IssueComment;
+
+ async fn msg_to_job(
+ &mut self,
+ _: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ match serde_json::from_slice(body) {
+ Ok(comment) => Ok(comment),
+ Err(err) => {
+ error!(
+ "Failed to deserialize IsssueComment: {:?}",
+ std::str::from_utf8(body).unwrap_or("<not utf8>")
+ );
+ panic!("{err:?}");
+ }
+ }
+ }
+
+ // FIXME: remove with rust/cargo update
+ #[allow(clippy::cognitive_complexity)]
+ async fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions {
+ let span = debug_span!("job", pr = ?job.issue.number);
+ let _enter = span.enter();
+
+ if job.action == ghevent::IssueCommentAction::Deleted
+ || job.action == ghevent::IssueCommentAction::Pinned
+ || job.action == ghevent::IssueCommentAction::Unpinned
+ {
+ return vec![worker::Action::Ack];
+ }
+
+ let instructions = commentparser::parse(&job.comment.body);
+ if instructions.is_none() {
+ return vec![worker::Action::Ack];
+ }
+
+ let build_destinations = self.acl.build_job_architectures_for_user_repo(
+ &job.comment.user.login,
+ &job.repository.full_name,
+ );
+
+ if build_destinations.is_empty() {
+ info!("No build destinations for: {:?}", job);
+ // Don't process comments if they can't build anything
+ return vec![worker::Action::Ack];
+ }
+
+ info!("Got job: {:?}", job);
+
+ let instructions = commentparser::parse(&job.comment.body);
+ info!("Instructions: {:?}", instructions);
+
+ let pr = self
+ .github
+ .repo(
+ job.repository.owner.login.clone(),
+ job.repository.name.clone(),
+ )
+ .pulls()
+ .get(job.issue.number)
+ .get()
+ .await;
+
+ if let Err(x) = pr {
+ info!(
+ "fetching PR {}#{} from GitHub yielded error {}",
+ job.repository.full_name, job.issue.number, x
+ );
+ return vec![worker::Action::Ack];
+ }
+
+ let pr = pr.unwrap();
+
+ let repo_msg = Repo {
+ clone_url: job.repository.clone_url.clone(),
+ full_name: job.repository.full_name.clone(),
+ owner: job.repository.owner.login.clone(),
+ name: job.repository.name.clone(),
+ };
+
+ let pr_msg = Pr {
+ number: job.issue.number,
+ head_sha: pr.head.sha.clone(),
+ target_branch: Some(pr.base.commit_ref),
+ };
+
+ let mut response: Vec<worker::Action> = vec![];
+ if let Some(instructions) = instructions {
+ for instruction in instructions {
+ match instruction {
+ commentparser::Instruction::Build(subset, attrs) => {
+ let build_destinations = build_destinations.clone();
+
+ let msg = buildjob::BuildJob::new(
+ repo_msg.clone(),
+ pr_msg.clone(),
+ subset,
+ attrs,
+ None,
+ None,
+ Uuid::new_v4().to_string(),
+ );
+
+ for arch in build_destinations.iter() {
+ let (exchange, routingkey) = arch.as_build_destination();
+ response.push(worker::publish_serde_action(exchange, routingkey, &msg));
+ }
+
+ response.push(worker::publish_serde_action(
+ Some("build-results".to_string()),
+ None,
+ &buildjob::QueuedBuildJobs {
+ job: msg,
+ architectures: build_destinations
+ .iter()
+ .cloned()
+ .map(|arch| arch.to_string())
+ .collect(),
+ },
+ ));
+ }
+ commentparser::Instruction::Test(attrs) => {
+ let msg = buildjob::BuildJob::new(
+ repo_msg.clone(),
+ pr_msg.clone(),
+ commentparser::Subset::Project,
+ attrs,
+ None,
+ None,
+ Uuid::new_v4().to_string(),
+ );
+
+ for arch in build_destinations.iter() {
+ if arch.can_run_tests() {
+ let (exchange, routingkey) = arch.as_build_destination();
+ response.push(worker::publish_serde_action(exchange, routingkey, &msg));
+ }
+ }
+ }
+ commentparser::Instruction::Eval => {
+ let msg = evaluationjob::EvaluationJob {
+ repo: repo_msg.clone(),
+ pr: pr_msg.clone(),
+ };
+
+ response.push(worker::publish_serde_action(
+ None,
+ Some("mass-rebuild-check-jobs".to_owned()),
+ &msg,
+ ));
+ }
+ }
+ }
+ }
+
+ response.push(worker::Action::Ack);
+ response
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/githubcommentposter.rs b/ofborg/tickborg/src/tasks/githubcommentposter.rs
new file mode 100644
index 0000000000..70c4a118e4
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/githubcommentposter.rs
@@ -0,0 +1,765 @@
+use crate::config::GithubAppVendingMachine;
+use crate::message::Repo;
+use crate::message::buildjob::{BuildJob, QueuedBuildJobs};
+use crate::message::buildresult::{BuildResult, BuildStatus, LegacyBuildResult};
+use crate::worker;
+
+use chrono::{DateTime, Utc};
+use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
+use tracing::{debug, debug_span, info, warn};
+
+pub struct GitHubCommentPoster {
+ github_vend: GithubAppVendingMachine,
+}
+
+impl GitHubCommentPoster {
+ pub fn new(github_vend: GithubAppVendingMachine) -> GitHubCommentPoster {
+ GitHubCommentPoster { github_vend }
+ }
+}
+
+pub enum PostableEvent {
+ BuildQueued(QueuedBuildJobs),
+ BuildFinished(BuildResult),
+}
+
+impl PostableEvent {
+ fn from(bytes: &[u8]) -> Result<PostableEvent, String> {
+ match serde_json::from_slice::<QueuedBuildJobs>(bytes) {
+ Ok(e) => Ok(PostableEvent::BuildQueued(e)),
+ Err(_) => match serde_json::from_slice::<BuildResult>(bytes) {
+ Ok(e) => Ok(PostableEvent::BuildFinished(e)),
+ Err(e) => Err(format!(
+ "Failed to deserialize PostableEvent: {:?}, err: {:}",
+ String::from_utf8_lossy(bytes),
+ e
+ )),
+ },
+ }
+ }
+}
+
+impl worker::SimpleWorker for GitHubCommentPoster {
+ type J = PostableEvent;
+
+ async fn msg_to_job(
+ &mut self,
+ _: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ PostableEvent::from(body)
+ }
+
+ async fn consumer(&mut self, job: &PostableEvent) -> worker::Actions {
+ let mut checks: Vec<CheckRunOptions> = vec![];
+ let repo: Repo;
+
+ let pr = match job {
+ PostableEvent::BuildQueued(queued_job) => {
+ repo = queued_job.job.repo.clone();
+ for architecture in queued_job.architectures.iter() {
+ checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
+ }
+ queued_job.job.pr.to_owned()
+ }
+ PostableEvent::BuildFinished(finished_job) => {
+ let result = finished_job.legacy();
+ repo = result.repo.clone();
+ checks.push(result_to_check(&result, Utc::now()));
+ finished_job.pr()
+ }
+ };
+
+ let span = debug_span!("job", pr = ?pr.number);
+ let _enter = span.enter();
+
+ for check in checks {
+ info!(
+ "check {:?} {} {}",
+ check.status,
+ check.name,
+ check.details_url.as_ref().unwrap_or(&String::from("-"))
+ );
+ debug!("{:?}", check);
+
+ let check_create_attempt = self
+ .github_vend
+ .for_repo(&repo.owner, &repo.name)
+ .await
+ .unwrap()
+ .repo(repo.owner.clone(), repo.name.clone())
+ .checkruns()
+ .create(&check)
+ .await;
+
+ match check_create_attempt {
+ Ok(_) => info!("Successfully sent."),
+ Err(err) => warn!("Failed to send check {:?}", err),
+ }
+ }
+
+ vec![worker::Action::Ack]
+ }
+}
+
+fn job_to_check(job: &BuildJob, architecture: &str, timestamp: DateTime<Utc>) -> CheckRunOptions {
+ let mut all_attrs: Vec<String> = job.attrs.clone();
+ all_attrs.sort();
+
+ if all_attrs.is_empty() {
+ all_attrs = vec![String::from("(unknown attributes)")];
+ }
+
+ CheckRunOptions {
+ name: format!("{} on {architecture}", all_attrs.join(", ")),
+ actions: None,
+ completed_at: None,
+ started_at: Some(timestamp.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
+ conclusion: None,
+ details_url: Some(format!(
+ "https://logs.tickborg.project-tick.net/?key={}/{}.{}",
+ &job.repo.owner.to_lowercase(),
+ &job.repo.name.to_lowercase(),
+ job.pr.number,
+ )),
+ external_id: None,
+ head_sha: job.pr.head_sha.clone(),
+ output: None,
+ status: Some(CheckRunState::Queued),
+ }
+}
+
+fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> CheckRunOptions {
+ let mut all_attrs: Vec<String> =
+ vec![result.attempted_attrs.clone(), result.skipped_attrs.clone()]
+ .into_iter()
+ .map(|opt| opt.unwrap_or_else(|| vec![]))
+ .flat_map(|list| list.into_iter())
+ .collect();
+ all_attrs.sort();
+
+ if all_attrs.is_empty() {
+ all_attrs = vec![String::from("(unknown attributes)")];
+ }
+
+ let conclusion: Conclusion = result.status.clone().into();
+
+ let mut summary: Vec<String> = vec![];
+ if let Some(ref attempted) = result.attempted_attrs {
+ summary.extend(list_segment("Attempted", attempted));
+ }
+
+ if result.status == BuildStatus::TimedOut {
+ summary.push(String::from("Build timed out."));
+ }
+
+ if let Some(ref skipped) = result.skipped_attrs {
+ summary.extend(list_segment(
+ &format!(
+ "The following builds were skipped because they don't evaluate on {}",
+ result.system
+ ),
+ skipped,
+ ));
+ }
+
+ // Allow the clippy violation for improved readability
+ #[allow(clippy::vec_init_then_push)]
+ let text: String = if !result.output.is_empty() {
+ let mut reply: Vec<String> = vec![];
+
+ reply.push("## Partial log".to_owned());
+ reply.push("".to_owned());
+ reply.push("```".to_owned());
+ reply.extend(result.output.clone());
+ reply.push("```".to_owned());
+
+ reply.join("\n")
+ } else {
+ String::from("No partial log is available.")
+ };
+
+ CheckRunOptions {
+ name: format!("{} on {}", all_attrs.join(", "), result.system),
+ actions: None,
+ completed_at: Some(timestamp.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
+ started_at: None,
+ conclusion: Some(conclusion),
+ details_url: Some(format!(
+ "https://logs.tickborg.project-tick.net/?key={}/{}.{}&attempt_id={}",
+ &result.repo.owner.to_lowercase(),
+ &result.repo.name.to_lowercase(),
+ result.pr.number,
+ result.attempt_id,
+ )),
+ external_id: Some(result.attempt_id.clone()),
+ head_sha: result.pr.head_sha.clone(),
+
+ output: Some(Output {
+ annotations: None,
+ images: None,
+ summary: summary.join("\n"),
+ text: Some(text),
+ title: result.status.clone().into(),
+ }),
+ status: Some(CheckRunState::Completed),
+ }
+}
+
+fn list_segment(name: &str, things: &[String]) -> Vec<String> {
+ let mut reply: Vec<String> = vec![];
+
+ if !things.is_empty() {
+ reply.push(format!("{name}: {}", things.join(", ")));
+ reply.push("".to_owned());
+ }
+
+ reply
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::message::{Pr, Repo};
+ use chrono::TimeZone;
+
+ #[test]
+ pub fn test_queued_build() {
+ let job = BuildJob {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ logs: None,
+ statusreport: None,
+ subset: None,
+
+ request_id: "bogus-request-id".to_owned(),
+ attrs: vec!["foo".to_owned(), "bar".to_owned()],
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+ assert_eq!(
+ job_to_check(&job, "x86_64-linux", timestamp),
+ CheckRunOptions {
+ name: "bar, foo on x86_64-linux".to_string(),
+ actions: None,
+ started_at: Some("2023-04-20T13:37:42Z".to_string()),
+ completed_at: None,
+ status: Some(CheckRunState::Queued),
+ conclusion: None,
+ details_url: Some("https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345".to_string()),
+ external_id: None,
+ head_sha: "abc123".to_string(),
+ output: None,
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_passing_build() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![
+ "make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[2]: Nothing to be done for 'install'.".to_owned(),
+ "make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[1]: Nothing to be done for 'install-target'.".to_owned(),
+ "make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
+ "removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
+ "post-installation fixup".to_owned(),
+ "strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
+ "patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ "/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ ],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: Some(vec!["foo".to_owned()]),
+ skipped_attrs: Some(vec!["bar".to_owned()]),
+ status: BuildStatus::Success,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "bar, foo on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Success),
+ details_url: Some(
+ "https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid"
+ .to_string()
+ ),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "Success".to_string(),
+ summary: "Attempted: foo
+
+The following builds were skipped because they don't evaluate on x86_64-linux: bar
+"
+ .to_string(),
+ text: Some(
+ "## Partial log
+
+```
+make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[2]: Nothing to be done for 'install'.
+make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[1]: Nothing to be done for 'install-target'.
+make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
+removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
+post-installation fixup
+strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
+patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+```"
+ .to_string()
+ ),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_failing_build() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![
+ "make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[2]: Nothing to be done for 'install'.".to_owned(),
+ "make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[1]: Nothing to be done for 'install-target'.".to_owned(),
+ "make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
+ "removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
+ "post-installation fixup".to_owned(),
+ "strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
+ "patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ "/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ ],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: Some(vec!["foo".to_owned()]),
+ skipped_attrs: None,
+ status: BuildStatus::Failure,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "foo on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Neutral),
+ details_url: Some(
+ "https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid"
+ .to_string()
+ ),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "Failure".to_string(),
+ summary: "Attempted: foo
+"
+ .to_string(),
+ text: Some(
+ "## Partial log
+
+```
+make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[2]: Nothing to be done for 'install'.
+make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[1]: Nothing to be done for 'install-target'.
+make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
+removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
+post-installation fixup
+strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
+patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+```"
+ .to_string()
+ ),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_timedout_build() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![
+ "make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[2]: Nothing to be done for 'install'.".to_owned(),
+ "make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[1]: Nothing to be done for 'install-target'.".to_owned(),
+ "make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
+ "removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
+ "post-installation fixup".to_owned(),
+ "building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds".to_owned(),
+ "error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed".to_owned(),
+ ],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: Some(vec!["foo".to_owned()]),
+ skipped_attrs: None,
+ status: BuildStatus::TimedOut,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "foo on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Neutral),
+ details_url: Some(
+ "https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid"
+ .to_string()
+ ),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "Timed out, unknown build status".to_string(),
+ summary: "Attempted: foo
+
+Build timed out."
+ .to_string(),
+ text: Some(
+ "## Partial log
+
+```
+make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[2]: Nothing to be done for 'install'.
+make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[1]: Nothing to be done for 'install-target'.
+make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
+removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
+post-installation fixup
+building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds
+error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
+```"
+ .to_string()
+ ),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_passing_build_unspecified_attributes() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![
+ "make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[2]: Nothing to be done for 'install'.".to_owned(),
+ "make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[1]: Nothing to be done for 'install-target'.".to_owned(),
+ "make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
+ "removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
+ "post-installation fixup".to_owned(),
+ "strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
+ "patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ "/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ ],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: None,
+ skipped_attrs: None,
+ status: BuildStatus::Success,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "(unknown attributes) on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Success),
+ details_url: Some(
+ "https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid"
+ .to_string()
+ ),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "Success".to_string(),
+ summary: "".to_string(),
+ text: Some(
+ "## Partial log
+
+```
+make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[2]: Nothing to be done for 'install'.
+make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[1]: Nothing to be done for 'install-target'.
+make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
+removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
+post-installation fixup
+strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
+patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+```"
+ .to_string()
+ ),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_failing_build_unspecified_attributes() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![
+ "make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[2]: Nothing to be done for 'install'.".to_owned(),
+ "make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
+ "make[1]: Nothing to be done for 'install-target'.".to_owned(),
+ "make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
+ "removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
+ "post-installation fixup".to_owned(),
+ "strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
+ "patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ "/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
+ ],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: None,
+ skipped_attrs: None,
+ status: BuildStatus::Failure,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "(unknown attributes) on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Neutral),
+ details_url: Some(
+ "https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid"
+ .to_string()
+ ),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "Failure".to_string(),
+ summary: "".to_string(),
+ text: Some(
+ "## Partial log
+
+```
+make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[2]: Nothing to be done for 'install'.
+make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
+make[1]: Nothing to be done for 'install-target'.
+make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
+removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
+post-installation fixup
+strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
+patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
+```"
+ .to_string()
+ ),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_no_attempt() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec!["foo".to_owned()],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: None,
+ skipped_attrs: Some(vec!["not-attempted".to_owned()]),
+ status: BuildStatus::Skipped,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "not-attempted on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Skipped),
+ details_url: Some("https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid".to_string()),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "No attempt".to_string(),
+ summary: "The following builds were skipped because they don\'t evaluate on x86_64-linux: not-attempted
+".to_string(),
+ text: Some("## Partial log
+
+```
+foo
+```".to_string()),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+
+ #[test]
+ pub fn test_check_no_attempt_no_log() {
+ let result = LegacyBuildResult {
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/Project-Tick.git".to_owned(),
+ full_name: "project-tick/Project-Tick".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "Project-Tick".to_owned(),
+ },
+ pr: Pr {
+ head_sha: "abc123".to_owned(),
+ number: 2345,
+ target_branch: Some("master".to_owned()),
+ },
+ output: vec![],
+ attempt_id: "neatattemptid".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ system: "x86_64-linux".to_owned(),
+ attempted_attrs: None,
+ skipped_attrs: Some(vec!["not-attempted".to_owned()]),
+ status: BuildStatus::Skipped,
+ };
+
+ let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
+
+ assert_eq!(
+ result_to_check(&result, timestamp),
+ CheckRunOptions {
+ name: "not-attempted on x86_64-linux".to_string(),
+ actions: None,
+ started_at: None,
+ completed_at: Some("2023-04-20T13:37:42Z".to_string()),
+ status: Some(CheckRunState::Completed),
+ conclusion: Some(Conclusion::Skipped),
+ details_url: Some("https://logs.tickborg.project-tick.net/?key=project-tick/Project-Tick.2345&attempt_id=neatattemptid".to_string()),
+ external_id: Some("neatattemptid".to_string()),
+ head_sha: "abc123".to_string(),
+ output: Some(Output {
+ title: "No attempt".to_string(),
+ summary: "The following builds were skipped because they don\'t evaluate on x86_64-linux: not-attempted
+".to_string(),
+ text: Some("No partial log is available.".to_string()),
+ annotations: None,
+ images: None,
+ })
+ }
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/log_message_collector.rs b/ofborg/tickborg/src/tasks/log_message_collector.rs
new file mode 100644
index 0000000000..2d80f72f03
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/log_message_collector.rs
@@ -0,0 +1,487 @@
+use crate::message::buildlogmsg::{BuildLogMsg, BuildLogStart};
+use crate::message::buildresult::BuildResult;
+use crate::worker;
+use crate::writetoline::LineWriter;
+
+use std::fs::{self, File, OpenOptions};
+use std::io::Write;
+use std::path::{Component, Path, PathBuf};
+
+use lru_cache::LruCache;
+use tracing::warn;
+
+#[derive(Eq, PartialEq, Hash, Debug, Clone)]
+pub struct LogFrom {
+ routing_key: String,
+ attempt_id: String,
+}
+
+pub struct LogMessageCollector {
+ handles: LruCache<LogFrom, LineWriter>,
+ log_root: PathBuf,
+}
+
+#[derive(Debug)]
+enum MsgType {
+ Start(BuildLogStart),
+ Msg(BuildLogMsg),
+ Finish(Box<BuildResult>),
+}
+
+#[derive(Debug)]
+pub struct LogMessage {
+ from: LogFrom,
+ message: MsgType,
+}
+
+fn validate_path_segment(segment: &Path) -> Result<(), String> {
+ let components = segment.components();
+
+ if components.count() == 0 {
+ return Err(String::from("Segment has no components"));
+ }
+
+ if segment.components().all(|component| match component {
+ Component::Normal(_) => true,
+ e => {
+ warn!("Invalid path component: {:?}", e);
+ false
+ }
+ }) {
+ Ok(())
+ } else {
+ Err(String::from("Path contained invalid components"))
+ }
+}
+
+impl LogMessageCollector {
+ pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector {
+ LogMessageCollector {
+ handles: LruCache::new(max_open),
+ log_root,
+ }
+ }
+
+ pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String> {
+ let metapath = self.path_for_metadata(from)?;
+ let mut fp = self.open_file(&metapath)?;
+
+ match serde_json::to_string(data) {
+ Ok(data) => {
+ if let Err(err) = fp.write(data.as_bytes()) {
+ Err(format!("Failed to write metadata: {err:?}"))
+ } else {
+ Ok(())
+ }
+ }
+ Err(err) => Err(format!("Failed to stringify metadata: {err:?}")),
+ }
+ }
+
+ pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String> {
+ let path = self.path_for_result(from)?;
+ let mut fp = self.open_file(&path)?;
+
+ match serde_json::to_string(data) {
+ Ok(data) => {
+ if let Err(err) = fp.write(data.as_bytes()) {
+ Err(format!("Failed to write result: {err:?}"))
+ } else {
+ Ok(())
+ }
+ }
+ Err(err) => Err(format!("Failed to stringify result: {err:?}")),
+ }
+ }
+
+ pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> {
+ if self.handles.contains_key(from) {
+ Ok(self
+ .handles
+ .get_mut(from)
+ .expect("handles just contained the key"))
+ } else {
+ let logpath = self.path_for_log(from)?;
+ let fp = self.open_file(&logpath)?;
+ let writer = LineWriter::new(fp);
+ self.handles.insert(from.clone(), writer);
+ if let Some(handle) = self.handles.get_mut(from) {
+ Ok(handle)
+ } else {
+ Err(String::from(
+ "A just-inserted value should already be there",
+ ))
+ }
+ }
+ }
+
+ fn path_for_metadata(&self, from: &LogFrom) -> Result<PathBuf, String> {
+ let mut path = self.path_for_log(from)?;
+ path.set_extension("metadata.json");
+ Ok(path)
+ }
+
+ fn path_for_result(&self, from: &LogFrom) -> Result<PathBuf, String> {
+ let mut path = self.path_for_log(from)?;
+ path.set_extension("result.json");
+ Ok(path)
+ }
+
+ fn path_for_log(&self, from: &LogFrom) -> Result<PathBuf, String> {
+ let mut location = self.log_root.clone();
+
+ let routing_key = PathBuf::from(from.routing_key.clone());
+ validate_path_segment(&routing_key)?;
+ location.push(routing_key);
+
+ let attempt_id = PathBuf::from(from.attempt_id.clone());
+ validate_path_segment(&attempt_id)?;
+ location.push(attempt_id);
+
+ if location.starts_with(&self.log_root) {
+ Ok(location)
+ } else {
+ Err(format!(
+ "Calculating the log location for {from:?} resulted in an invalid path {location:?}"
+ ))
+ }
+ }
+
+ fn open_file(&self, path: &Path) -> Result<File, String> {
+ let dir = path.parent().unwrap();
+ fs::create_dir_all(dir).unwrap();
+
+ let attempt = OpenOptions::new()
+ .append(true)
+ .read(true)
+ .create(true)
+ .open(path);
+
+ match attempt {
+ Ok(handle) => Ok(handle),
+ Err(err) => Err(format!(
+ "Failed to open the file for {path:?}, err: {err:?}"
+ )),
+ }
+ }
+}
+
+impl worker::SimpleWorker for LogMessageCollector {
+ type J = LogMessage;
+
+ async fn msg_to_job(
+ &mut self,
+ routing_key: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ let message: MsgType;
+ let attempt_id: String;
+
+ let decode_msg: Result<BuildLogMsg, _> = serde_json::from_slice(body);
+ if let Ok(msg) = decode_msg {
+ attempt_id = msg.attempt_id.clone();
+ message = MsgType::Msg(msg);
+ } else {
+ let decode_msg: Result<BuildLogStart, _> = serde_json::from_slice(body);
+ if let Ok(msg) = decode_msg {
+ attempt_id = msg.attempt_id.clone();
+ message = MsgType::Start(msg);
+ } else {
+ let decode_msg: Result<BuildResult, _> = serde_json::from_slice(body);
+ if let Ok(msg) = decode_msg {
+ attempt_id = msg.legacy().attempt_id;
+ message = MsgType::Finish(Box::new(msg));
+ } else {
+ return Err(format!("failed to decode job: {decode_msg:?}"));
+ }
+ }
+ }
+
+ Ok(LogMessage {
+ from: LogFrom {
+ routing_key: routing_key.to_string(),
+ attempt_id,
+ },
+ message,
+ })
+ }
+
+ async fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
+ match job.message {
+ MsgType::Start(ref start) => {
+ self.write_metadata(&job.from, start)
+ .expect("failed to write metadata");
+
+ // Make sure the log content exists by opening its handle.
+ // This (hopefully) prevents builds that produce no output (for any reason) from
+ // having their logs.tickborg.project-tick.net link complaining about a 404.
+ let _ = self.handle_for(&job.from).unwrap();
+ }
+ MsgType::Msg(ref message) => {
+ let handle = self.handle_for(&job.from).unwrap();
+
+ handle.write_to_line((message.line_number - 1) as usize, &message.output);
+ }
+ MsgType::Finish(ref finish) => {
+ self.write_result(&job.from, finish)
+ .expect("failed to write result");
+ }
+ }
+
+ vec![worker::Action::Ack]
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::message::buildresult::{BuildStatus, V1Tag};
+ use crate::message::{Pr, Repo};
+ use crate::test_scratch::TestScratch;
+ use crate::worker::SimpleWorker;
+ use std::io::Read;
+ use std::path::PathBuf;
+
+ fn make_worker(path: PathBuf) -> LogMessageCollector {
+ LogMessageCollector::new(path, 3)
+ }
+
+ fn make_from(id: &str) -> LogFrom {
+ LogFrom {
+ attempt_id: format!("attempt-id-{id}"),
+ routing_key: format!("routing-key-{id}"),
+ }
+ }
+
+ #[test]
+ fn test_handle_for() {
+ let p = TestScratch::new_dir("log-message-collector-handle_for");
+
+ let a = make_from("a.foo/123");
+ let b = make_from("b.foo/123");
+ let c = make_from("c.foo/123");
+ let d = make_from("d.foo/123");
+
+ let mut worker = make_worker(p.path());
+ assert!(worker.handle_for(&a).is_ok());
+ assert!(worker.handle_for(&b).is_ok());
+ assert!(worker.handle_for(&c).is_ok());
+ assert!(worker.handle_for(&d).is_ok());
+ assert!(worker.handle_for(&a).is_ok());
+ }
+
+ #[test]
+ fn test_path_for_metadata() {
+ let p = TestScratch::new_dir("log-message-collector-path_for_metadata");
+ let worker = make_worker(p.path());
+
+ let path = worker
+ .path_for_metadata(&LogFrom {
+ attempt_id: String::from("my-attempt-id"),
+ routing_key: String::from("my-routing-key"),
+ })
+ .expect("the path should be valid");
+
+ assert!(path.starts_with(p.path()));
+ assert!(
+ path.as_os_str()
+ .to_string_lossy()
+ .ends_with("my-routing-key/my-attempt-id.metadata.json")
+ );
+ }
+
+ #[test]
+ fn test_path_for_result() {
+ let p = TestScratch::new_dir("log-message-collector-path_for_result");
+ let worker = make_worker(p.path());
+
+ let path = worker
+ .path_for_result(&LogFrom {
+ attempt_id: String::from("my-attempt-id"),
+ routing_key: String::from("my-routing-key"),
+ })
+ .expect("the path should be valid");
+
+ assert!(path.starts_with(p.path()));
+ assert!(
+ path.as_os_str()
+ .to_string_lossy()
+ .ends_with("my-routing-key/my-attempt-id.result.json")
+ );
+ }
+
+ #[test]
+ fn test_path_for_log() {
+ let p = TestScratch::new_dir("log-message-collector-path_for_log");
+ let worker = make_worker(p.path());
+
+ let path = worker
+ .path_for_log(&LogFrom {
+ attempt_id: String::from("my-attempt-id"),
+ routing_key: String::from("my-routing-key"),
+ })
+ .expect("the path should be valid");
+
+ assert!(path.starts_with(p.path()));
+ assert!(path.ends_with("my-routing-key/my-attempt-id"));
+ }
+
+ #[test]
+ fn test_path_for_log_malicious() {
+ let p = TestScratch::new_dir("log-message-collector-for_malicious");
+ let worker = make_worker(p.path());
+
+ let path = worker.path_for_log(&LogFrom {
+ attempt_id: String::from("./../../"),
+ routing_key: String::from("./../../foobar"),
+ });
+
+ println!("path: {path:?}");
+ assert!(path.is_err());
+ }
+
+ #[test]
+ fn test_validate_path_segment() {
+ assert!(validate_path_segment(&PathBuf::from("foo")).is_ok());
+ assert!(validate_path_segment(&PathBuf::from("foo/bar")).is_ok());
+ assert!(validate_path_segment(&PathBuf::from("foo.bar/123")).is_ok());
+ assert!(validate_path_segment(&PathBuf::from("..")).is_err());
+ assert!(validate_path_segment(&PathBuf::from(".")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("./././")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("foo/..")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("foo/../bar")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("foo/./bar")).is_ok());
+ assert!(validate_path_segment(&PathBuf::from("/foo/bar")).is_err());
+ assert!(validate_path_segment(&PathBuf::from("/foo")).is_err());
+ }
+
+ #[test]
+ fn test_open_file() {
+ let p = TestScratch::new_dir("log-message-collector-open_file");
+ let worker = make_worker(p.path());
+
+ assert!(
+ worker
+ .open_file(&worker.path_for_log(&make_from("a")).unwrap())
+ .is_ok()
+ );
+ assert!(
+ worker
+ .open_file(&worker.path_for_log(&make_from("b.foo/123")).unwrap())
+ .is_ok()
+ );
+ }
+
+ #[tokio::test]
+ pub async fn test_logs_collect() {
+ let mut logmsg = BuildLogMsg {
+ attempt_id: String::from("my-attempt-id"),
+ identity: String::from("my-identity"),
+ system: String::from("foobar-x8664"),
+ line_number: 1,
+ output: String::from("line-1"),
+ };
+ let mut job = LogMessage {
+ from: make_from("foo"),
+ message: MsgType::Msg(logmsg.clone()),
+ };
+
+ let p = TestScratch::new_dir("log-message-collector-logs_collector");
+
+ {
+ let mut worker = make_worker(p.path());
+ assert_eq!(
+ vec![worker::Action::Ack],
+ worker
+ .consumer(&LogMessage {
+ from: make_from("foo"),
+ message: MsgType::Start(BuildLogStart {
+ attempt_id: String::from("my-attempt-id"),
+ identity: String::from("my-identity"),
+ system: String::from("foobar-x8664"),
+ attempted_attrs: Some(vec!["foo".to_owned()]),
+ skipped_attrs: Some(vec!["bar".to_owned()]),
+ })
+ })
+ .await
+ );
+
+ assert!(p.path().join("routing-key-foo/attempt-id-foo").exists());
+ assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await);
+
+ logmsg.line_number = 5;
+ logmsg.output = String::from("line-5");
+ job.message = MsgType::Msg(logmsg.clone());
+ assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await);
+
+ job.from.attempt_id = String::from("my-other-attempt");
+ logmsg.attempt_id = String::from("my-other-attempt");
+ logmsg.line_number = 3;
+ logmsg.output = String::from("line-3");
+ job.message = MsgType::Msg(logmsg);
+ assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await);
+
+ assert_eq!(
+ vec![worker::Action::Ack],
+ worker
+ .consumer(&LogMessage {
+ from: make_from("foo"),
+ message: MsgType::Finish(Box::new(BuildResult::V1 {
+ tag: V1Tag::V1,
+ repo: Repo {
+ clone_url: "https://github.com/project-tick/tickborg.git".to_owned(),
+ full_name: "project-tick/tickborg".to_owned(),
+ owner: "project-tick".to_owned(),
+ name: "tickborg".to_owned(),
+ },
+ pr: Pr {
+ number: 42,
+ head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
+ target_branch: Some("scratch".to_owned()),
+ },
+ system: "x86_64-linux".to_owned(),
+ output: vec![],
+ attempt_id: "attempt-id-foo".to_owned(),
+ request_id: "bogus-request-id".to_owned(),
+ status: BuildStatus::Success,
+ attempted_attrs: Some(vec!["foo".to_owned()]),
+ skipped_attrs: Some(vec!["bar".to_owned()]),
+ }))
+ })
+ .await
+ );
+ }
+
+ let mut prm = p.path();
+ let mut sm = String::new();
+ prm.push("routing-key-foo/attempt-id-foo.metadata.json");
+ File::open(prm).unwrap().read_to_string(&mut sm).unwrap();
+ assert_eq!(
+ &sm,
+ "{\"system\":\"foobar-x8664\",\"identity\":\"my-identity\",\"attempt_id\":\"my-attempt-id\",\"attempted_attrs\":[\"foo\"],\"skipped_attrs\":[\"bar\"]}"
+ );
+
+ let mut prf = p.path();
+ let mut sf = String::new();
+ prf.push("routing-key-foo/attempt-id-foo");
+ File::open(prf).unwrap().read_to_string(&mut sf).unwrap();
+ assert_eq!(&sf, "line-1\n\n\n\nline-5\n");
+
+ let mut pr = p.path();
+ let mut s = String::new();
+ pr.push("routing-key-foo/my-other-attempt");
+ File::open(pr).unwrap().read_to_string(&mut s).unwrap();
+ assert_eq!(&s, "\n\nline-3\n");
+
+ let mut prr = p.path();
+ let mut sr = String::new();
+ prr.push("routing-key-foo/attempt-id-foo.result.json");
+ File::open(prr).unwrap().read_to_string(&mut sr).unwrap();
+ assert_eq!(
+ &sr,
+ "{\"tag\":\"V1\",\"repo\":{\"owner\":\"project-tick\",\"name\":\"tickborg\",\"full_name\":\"project-tick/tickborg\",\"clone_url\":\"https://github.com/project-tick/tickborg.git\"},\"pr\":{\"target_branch\":\"scratch\",\"number\":42,\"head_sha\":\"6dd9f0265d52b946dd13daf996f30b64e4edb446\"},\"system\":\"x86_64-linux\",\"output\":[],\"attempt_id\":\"attempt-id-foo\",\"request_id\":\"bogus-request-id\",\"status\":\"Success\",\"skipped_attrs\":[\"bar\"],\"attempted_attrs\":[\"foo\"]}"
+ );
+ }
+}
diff --git a/ofborg/tickborg/src/tasks/mod.rs b/ofborg/tickborg/src/tasks/mod.rs
new file mode 100644
index 0000000000..5aab0fa631
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/mod.rs
@@ -0,0 +1,8 @@
+pub mod build;
+pub mod eval;
+pub mod evaluate;
+pub mod evaluationfilter;
+pub mod githubcommentfilter;
+pub mod githubcommentposter;
+pub mod log_message_collector;
+pub mod statscollector;
diff --git a/ofborg/tickborg/src/tasks/statscollector.rs b/ofborg/tickborg/src/tasks/statscollector.rs
new file mode 100644
index 0000000000..fef23ad3c6
--- /dev/null
+++ b/ofborg/tickborg/src/tasks/statscollector.rs
@@ -0,0 +1,68 @@
+use crate::stats;
+use crate::worker;
+
+use tracing::error;
+
+pub struct StatCollectorWorker<E> {
+ events: E,
+ collector: stats::MetricCollector,
+}
+
+impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> {
+ pub fn new(events: E, collector: stats::MetricCollector) -> StatCollectorWorker<E> {
+ StatCollectorWorker { events, collector }
+ }
+}
+
+impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker<E> {
+ type J = stats::EventMessage;
+
+ async fn msg_to_job(
+ &mut self,
+ _: &str,
+ _: &Option<String>,
+ body: &[u8],
+ ) -> Result<Self::J, String> {
+ match serde_json::from_slice(body) {
+ Ok(e) => Ok(e),
+ Err(_) => {
+ let mut modified_body: Vec<u8> = vec![b"\""[0]];
+ modified_body.append(&mut body.to_vec());
+ modified_body.push(b"\""[0]);
+
+ match serde_json::from_slice(&modified_body) {
+ Ok(event) => {
+ self.events
+ .notify(stats::Event::StatCollectorLegacyEvent(
+ stats::event_metric_name(&event),
+ ))
+ .await;
+ Ok(stats::EventMessage {
+ sender: "".to_owned(),
+ events: vec![event],
+ })
+ }
+ Err(err) => {
+ self.events
+ .notify(stats::Event::StatCollectorBogusEvent)
+ .await;
+ error!(
+ "Failed to decode message: {:?}, Err: {err:?}",
+ std::str::from_utf8(body).unwrap_or("<message not utf8>")
+ );
+ Err("Failed to decode message".to_owned())
+ }
+ }
+ }
+ }
+ }
+
+ async fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
+ let sender = job.sender.clone();
+ for event in job.events.iter() {
+ self.collector.record(sender.clone(), event.clone());
+ }
+
+ vec![worker::Action::Ack]
+ }
+}
diff --git a/ofborg/tickborg/src/test_scratch.rs b/ofborg/tickborg/src/test_scratch.rs
new file mode 100644
index 0000000000..d63632ecd0
--- /dev/null
+++ b/ofborg/tickborg/src/test_scratch.rs
@@ -0,0 +1,61 @@
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use tracing::debug;
+
+pub struct TestScratch {
+ root: PathBuf,
+}
+
+impl TestScratch {
+ pub fn new_dir(ident: &str) -> TestScratch {
+ let scratch = TestScratch {
+ root: Path::new(env!("CARGO_MANIFEST_DIR"))
+ .join("test-scratch")
+ .join("dirs")
+ .join(format!("dir-{ident}")),
+ };
+
+ TestScratch::create_dir(&scratch);
+
+ scratch
+ }
+
+ pub fn new_file(ident: &str) -> TestScratch {
+ let scratch = TestScratch {
+ root: Path::new(env!("CARGO_MANIFEST_DIR"))
+ .join("test-scratch")
+ .join("files")
+ .join(format!("file-{ident}")),
+ };
+
+ TestScratch::create_dir(&scratch);
+ scratch
+ }
+
+ fn create_dir(path: &TestScratch) {
+ let target = path.root.parent().unwrap();
+ debug!("Creating directory {target:?}");
+ fs::create_dir_all(target).unwrap();
+ }
+
+ pub fn path(&self) -> PathBuf {
+ self.root.clone()
+ }
+
+ pub fn string(&self) -> String {
+ self.path().to_str().unwrap().to_owned()
+ }
+}
+
+impl Drop for TestScratch {
+ fn drop(&mut self) {
+ debug!("Deleting root {:?}", self.root);
+ Command::new("rm")
+ .arg("-rf")
+ .arg(self.root.clone())
+ .status()
+ .expect("cleanup of test-scratch should work");
+ }
+}
diff --git a/ofborg/tickborg/src/worker.rs b/ofborg/tickborg/src/worker.rs
new file mode 100644
index 0000000000..9569b450b9
--- /dev/null
+++ b/ofborg/tickborg/src/worker.rs
@@ -0,0 +1,53 @@
+use std::{marker::Send, sync::Arc};
+
+use serde::Serialize;
+
+pub struct Response {}
+
+pub type Actions = Vec<Action>;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Action {
+ Ack,
+ NackRequeue,
+ NackDump,
+ Publish(Arc<QueueMsg>),
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct QueueMsg {
+ pub exchange: Option<String>,
+ pub routing_key: Option<String>,
+ pub mandatory: bool,
+ pub immediate: bool,
+ pub content_type: Option<String>,
+ pub content: Vec<u8>,
+}
+
+pub fn publish_serde_action<T: Serialize + ?Sized>(
+ exchange: Option<String>,
+ routing_key: Option<String>,
+ msg: &T,
+) -> Action {
+ Action::Publish(Arc::new(QueueMsg {
+ exchange,
+ routing_key,
+ mandatory: false,
+ immediate: false,
+ content_type: Some("application/json".to_owned()),
+ content: serde_json::to_string(&msg).unwrap().into_bytes(),
+ }))
+}
+
+pub trait SimpleWorker: Send {
+ type J: Send;
+
+ fn consumer(&mut self, job: &Self::J) -> impl std::future::Future<Output = Actions>;
+
+ fn msg_to_job(
+ &mut self,
+ method: &str,
+ headers: &Option<String>,
+ body: &[u8],
+ ) -> impl std::future::Future<Output = Result<Self::J, String>>;
+}
diff --git a/ofborg/tickborg/src/writetoline.rs b/ofborg/tickborg/src/writetoline.rs
new file mode 100644
index 0000000000..848464242d
--- /dev/null
+++ b/ofborg/tickborg/src/writetoline.rs
@@ -0,0 +1,356 @@
+use std::fs::File;
+use std::io::{BufRead, BufReader, Seek, SeekFrom, Write};
+
+pub struct LineWriter {
+ file: File,
+ buffer: Vec<String>,
+ last_line: usize,
+}
+
+impl LineWriter {
+ pub fn new(mut rw: File) -> LineWriter {
+ let buf = LineWriter::load_buffer(&mut rw);
+ let len = buf.len();
+
+ LineWriter {
+ file: rw,
+ buffer: buf,
+ last_line: len,
+ }
+ }
+
+ fn load_buffer(file: &mut File) -> Vec<String> {
+ file.seek(SeekFrom::Start(0)).unwrap();
+
+ let reader = BufReader::new(file.try_clone().unwrap());
+ reader
+ .lines()
+ .map(|line| match line {
+ Ok(s) => s,
+ Err(err) => format!("UTF-8 Decode err: {err:?}"),
+ })
+ .collect()
+ }
+
+ pub fn write_to_line(&mut self, line: usize, data: &str) {
+ let original_len = self.buffer.len();
+ while self.buffer.len() <= line {
+ self.buffer.push("".to_owned());
+ }
+
+ self.buffer.remove(line);
+ self.buffer.insert(line, data.to_owned());
+
+ if self.last_line > line {
+ // println!("taking the rewrite option");
+ // We're inserting in to the middle of a file, so just
+ // write the entire buffer again
+ self.file.set_len(0).unwrap();
+ self.file.seek(SeekFrom::Start(0)).unwrap();
+ self.file
+ .write_all(self.buffer.join("\n").as_bytes())
+ .unwrap();
+ self.file.write_all(b"\n").unwrap();
+ } else {
+ // println!("taking the append option");
+ // println!("Writing {:?} to line {}", data, line);
+
+ let buffer_start = original_len;
+ let buffer_end = line + 1;
+ let to_write = self.buffer[buffer_start..buffer_end].join("\n");
+ // println!("Full buffer: {:?}", self.buffer);
+ // println!("buffer[{}..{}] = {:?}", buffer_start, buffer_end, to_write);
+ // Inclusive range syntax (ie: ...) is experimental, so
+ // to include the final newline in to the written buffer
+ // we have to use one more than the range we want for the
+ // end
+ // println!("selected buffer: {:?}", to_write);
+ self.file.write_all(to_write.as_bytes()).unwrap();
+ self.file.write_all(b"\n").unwrap();
+ }
+
+ self.last_line = line;
+ }
+
+ pub fn inner(self) -> File {
+ self.file
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::test_scratch::TestScratch;
+ use std::fs::File;
+ use std::fs::OpenOptions;
+ use std::io::Read;
+ use std::path::Path;
+ use std::time::Instant;
+
+ fn testfile(path: &Path) -> File {
+ OpenOptions::new()
+ .read(true)
+ .write(true)
+ .truncate(true)
+ .create(true)
+ .open(path)
+ .expect("failed to open scratch file")
+ }
+
+ fn assert_file_content<T>(f: &mut T, value: &str)
+ where
+ T: Read + Seek,
+ {
+ let mut mystr: String = String::new();
+ f.seek(SeekFrom::Start(0)).unwrap();
+ f.read_to_string(&mut mystr).unwrap();
+ assert_eq!(mystr, value);
+ }
+
+ #[test]
+ fn test_writer_line_ordered() {
+ let p = TestScratch::new_file("writetoline-ordered");
+ let mut f = testfile(&p.path());
+
+ assert_file_content(&mut f, "");
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(0, "hello");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "hello\n");
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(1, "world");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "hello\nworld\n");
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(2, ":)");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "hello\nworld\n:)\n");
+ }
+
+ #[test]
+ fn test_writer_line_unordered() {
+ let p = TestScratch::new_file("writetoline-unordered");
+ let mut f = testfile(&p.path());
+
+ assert_file_content(&mut f, "");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(2, ":)");
+ f = writer.inner();
+ }
+
+ assert_file_content(&mut f, "\n\n:)\n");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(1, "world");
+ f = writer.inner();
+ }
+
+ assert_file_content(&mut f, "\nworld\n:)\n");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(0, "hello");
+ f = writer.inner();
+ }
+
+ assert_file_content(&mut f, "hello\nworld\n:)\n");
+ }
+
+ #[test]
+ fn test_writer_line_unordered_long() {
+ let p = TestScratch::new_file("writetoline-unordered-long");
+ let mut f = testfile(&p.path());
+
+ assert_file_content(&mut f, "");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(
+ 2,
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
+ );
+ f = writer.inner();
+ }
+ assert_file_content(
+ &mut f,
+ "\n\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n",
+ );
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(
+ 1,
+ "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",
+ );
+ f = writer.inner();
+ }
+ assert_file_content(
+ &mut f,
+ "\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n",
+ );
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(
+ 0,
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
+ );
+ f = writer.inner();
+ }
+ assert_file_content(
+ &mut f,
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n",
+ );
+ }
+
+ #[test]
+ fn test_writer_line_unordered_longish() {
+ let p = TestScratch::new_file("writetoline-unordered-longish");
+ let mut f = testfile(&p.path());
+
+ assert_file_content(&mut f, "");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(2, "hello");
+ f = writer.inner();
+ }
+ assert_file_content(&mut f, "\n\nhello\n");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(1, "mynameis");
+ f = writer.inner();
+ }
+ assert_file_content(&mut f, "\nmynameis\nhello\n");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(0, "graham");
+ f = writer.inner();
+ }
+ assert_file_content(&mut f, "graham\nmynameis\nhello\n");
+ }
+
+ #[test]
+ fn test_writer_line_ordered_result() {
+ let p = TestScratch::new_file("writetoline-ordered-result");
+ let mut f = testfile(&p.path());
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(0, "hello");
+ writer.write_to_line(1, "world");
+ writer.write_to_line(2, ":)");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "hello\nworld\n:)\n");
+ }
+
+ #[test]
+ fn test_writer_line_unordered_result() {
+ let p = TestScratch::new_file("writetoline-unordered-result");
+ let mut f = testfile(&p.path());
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(2, ":)");
+ writer.write_to_line(1, "world");
+ writer.write_to_line(0, "hello");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "hello\nworld\n:)\n");
+ }
+
+ #[test]
+ fn test_writer_line_unordered_long_result() {
+ let p = TestScratch::new_file("writetoline-unordered-long-result");
+ let mut f = testfile(&p.path());
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(
+ 2,
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
+ );
+ writer.write_to_line(
+ 1,
+ "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",
+ );
+ writer.write_to_line(
+ 0,
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
+ );
+ f = writer.inner();
+
+ assert_file_content(
+ &mut f,
+ "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n",
+ );
+ }
+
+ #[test]
+ fn test_writer_line_unordered_longish_result() {
+ let p = TestScratch::new_file("writetoline-unordered-longish-result");
+ let mut f = testfile(&p.path());
+
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(2, "hello");
+ writer.write_to_line(1, "mynameis");
+ writer.write_to_line(0, "graham");
+ f = writer.inner();
+
+ assert_file_content(&mut f, "graham\nmynameis\nhello\n");
+ }
+
+ #[test]
+ fn test_writer_line_middle() {
+ let p = TestScratch::new_file("writetoline-middle");
+ let mut f = testfile(&p.path());
+
+ assert_file_content(&mut f, "");
+
+ {
+ let mut writer = LineWriter::new(f);
+ writer.write_to_line(5, "hello");
+ f = writer.inner();
+ }
+ assert_file_content(&mut f, "\n\n\n\n\nhello\n");
+ }
+
+ #[test]
+ fn bench_lots_of_ordered_lines() {
+ let p = TestScratch::new_file("bench-ordered-lines");
+ let f = testfile(&p.path());
+ let mut writer = LineWriter::new(f);
+
+ let timer = Instant::now();
+
+ for i in 0..3000 {
+ writer.write_to_line(i, "This is my line!");
+ }
+
+ println!("ordered took: {:?}", timer.elapsed());
+ }
+
+ #[test]
+ fn bench_lots_of_reversed_lines() {
+ let p = TestScratch::new_file("bench-reversed-lines");
+ let f = testfile(&p.path());
+ let mut writer = LineWriter::new(f);
+
+ let timer = Instant::now();
+
+ for i in (0..3000).rev() {
+ writer.write_to_line(i, "This is my line!");
+ }
+
+ println!("reversed took: {:?}", timer.elapsed());
+ }
+}
diff --git a/ofborg/tickborg/test-nix/bin/nix-build b/ofborg/tickborg/test-nix/bin/nix-build
new file mode 100755
index 0000000000..24911e3328
--- /dev/null
+++ b/ofborg/tickborg/test-nix/bin/nix-build
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+export PATH=${PATH#*:}
+exec nix-build "$@" -I "ofborg-test-bash=$(command -v bash)"
diff --git a/ofborg/tickborg/test-nix/bin/nix-instantiate b/ofborg/tickborg/test-nix/bin/nix-instantiate
new file mode 100755
index 0000000000..5cf75e2dc0
--- /dev/null
+++ b/ofborg/tickborg/test-nix/bin/nix-instantiate
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+export PATH=${PATH#*:}
+exec nix-instantiate "$@" -I "ofborg-test-bash=$(command -v bash)"
diff --git a/ofborg/tickborg/test-srcs/build-pr/default.nix b/ofborg/tickborg/test-srcs/build-pr/default.nix
new file mode 100644
index 0000000000..231e5840f7
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/build-pr/default.nix
@@ -0,0 +1,26 @@
+let
+ builder = builtins.storePath <ofborg-test-bash>;
+in
+{
+ success = derivation {
+ name = "success";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; printf '1\n2\n3\n4\n'; echo ${toString builtins.currentTime} > $out" ];
+ };
+
+ failed = derivation {
+ name = "failed";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime}" ];
+ };
+
+ sandbox-violation = derivation {
+ name = "sandbox-violation";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime} > $out" ];
+ src = ./../../src;
+ };
+}
diff --git a/ofborg/tickborg/test-srcs/build-pr/hi another file b/ofborg/tickborg/test-srcs/build-pr/hi another file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/build-pr/hi another file
diff --git a/ofborg/tickborg/test-srcs/build-pr/succeed.sh b/ofborg/tickborg/test-srcs/build-pr/succeed.sh
new file mode 100644
index 0000000000..c244fe96b6
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/build-pr/succeed.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+echo "$@"
+echo hi
diff --git a/ofborg/tickborg/test-srcs/eval-mixed-failure/default.nix b/ofborg/tickborg/test-srcs/eval-mixed-failure/default.nix
new file mode 100644
index 0000000000..067c72a349
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/eval-mixed-failure/default.nix
@@ -0,0 +1,44 @@
+let
+ fetchGit = builtins.fetchGit or (path: assert builtins.trace ''
+ error: access to path '/fake' is forbidden in restricted mode
+ ''
+ false; path);
+
+ builder = builtins.storePath <ofborg-test-bash>;
+in
+{ nixpkgs ? fetchGit /fake }:
+
+rec {
+ success = derivation {
+ name = "success";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime} > $out" ];
+ };
+
+ failed = derivation {
+ name = "failed";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime}; echo ${success}" ];
+ };
+
+ passes-instantiation = derivation {
+ name = "passes-instantiation";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo this ones cool" ];
+ };
+
+ nixpkgs-restricted-mode = derivation {
+ name = "nixpkgs-restricted-mode-fetchgit";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString nixpkgs} > $out" ];
+ };
+
+ fails-instantiation = assert builtins.trace ''
+ You just can't frooble the frozz on this particular system.
+ ''
+ false; { };
+}
diff --git a/ofborg/tickborg/test-srcs/eval/default.nix b/ofborg/tickborg/test-srcs/eval/default.nix
new file mode 100644
index 0000000000..e95f0ddca8
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/eval/default.nix
@@ -0,0 +1,18 @@
+let
+ builder = builtins.storePath <ofborg-test-bash>;
+in
+rec {
+ success = derivation {
+ name = "success";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime} > $out" ];
+ };
+
+ failed = derivation {
+ name = "failed";
+ system = builtins.currentSystem;
+ inherit builder;
+ args = [ "-c" "echo hi; echo ${toString builtins.currentTime}; echo ${success}" ];
+ };
+}
diff --git a/ofborg/tickborg/test-srcs/events/pr-changed-base.json b/ofborg/tickborg/test-srcs/events/pr-changed-base.json
new file mode 100644
index 0000000000..6846bf8d9b
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/events/pr-changed-base.json
@@ -0,0 +1,484 @@
+{
+ "action": "edited",
+ "number": 33299,
+ "pull_request": {
+ "url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299",
+ "id": 160662893,
+ "html_url": "https://github.com/NixOS/nixpkgs/pull/33299",
+ "diff_url": "https://github.com/NixOS/nixpkgs/pull/33299.diff",
+ "patch_url": "https://github.com/NixOS/nixpkgs/pull/33299.patch",
+ "issue_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299",
+ "number": 33299,
+ "state": "open",
+ "locked": false,
+ "title": "NixOS Tests: record an flv of the test",
+ "user": {
+ "login": "grahamc",
+ "id": 76716,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/grahamc",
+ "html_url": "https://github.com/grahamc",
+ "followers_url": "https://api.github.com/users/grahamc/followers",
+ "following_url": "https://api.github.com/users/grahamc/following{/other_user}",
+ "gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
+ "organizations_url": "https://api.github.com/users/grahamc/orgs",
+ "repos_url": "https://api.github.com/users/grahamc/repos",
+ "events_url": "https://api.github.com/users/grahamc/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/grahamc/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "###### Motivation for this change\r\n\r\nSometimes tests can be hard to debug. Maybe recording an FLV from the VNC could help with that? To start, enable the recording on the flaky keymap test.\r\n\r\n###### Things done\r\n\r\n<!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. -->\r\n\r\n- [ ] Tested using sandboxing ([nix.useSandbox](http://nixos.org/nixos/manual/options.html#opt-nix.useSandbox) on NixOS, or option `build-use-sandbox` in [`nix.conf`](http://nixos.org/nix/manual/#sec-conf-file) on non-NixOS)\r\n- Built on platform(s)\r\n - [ ] NixOS\r\n - [ ] macOS\r\n - [ ] other Linux distributions\r\n- [ ] Tested via one or more NixOS test(s) if existing and applicable for the change (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))\r\n- [ ] Tested compilation of all pkgs that depend on this change using `nix-shell -p nox --run \"nox-review wip\"`\r\n- [ ] Tested execution of all binary files (usually in `./result/bin/`)\r\n- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).\r\n\r\n---\r\n\r\n",
+ "created_at": "2018-01-01T22:39:24Z",
+ "updated_at": "2018-02-23T21:48:19Z",
+ "closed_at": null,
+ "merged_at": null,
+ "merge_commit_sha": "e145dffca8579ca8fac15497af5f166d1e1197a4",
+ "assignee": null,
+ "assignees": [],
+ "requested_reviewers": [],
+ "requested_teams": [],
+ "labels": [
+ {
+ "id": 737642262,
+ "url": "https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-darwin:%200",
+ "name": "10.rebuild-darwin: 0",
+ "color": "eeffee",
+ "default": false
+ },
+ {
+ "id": 737642408,
+ "url": "https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-linux:%200",
+ "name": "10.rebuild-linux: 0",
+ "color": "eeffee",
+ "default": false
+ }
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/commits",
+ "review_comments_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/comments",
+ "review_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299/comments",
+ "statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/887e8b460a7d45ddb3bbdebe01447b251b3229e8",
+ "head": {
+ "label": "grahamc:flv-nixos-tests",
+ "ref": "flv-nixos-tests",
+ "sha": "887e8b460a7d45ddb3bbdebe01447b251b3229e8",
+ "user": {
+ "login": "grahamc",
+ "id": 76716,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/grahamc",
+ "html_url": "https://github.com/grahamc",
+ "followers_url": "https://api.github.com/users/grahamc/followers",
+ "following_url": "https://api.github.com/users/grahamc/following{/other_user}",
+ "gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
+ "organizations_url": "https://api.github.com/users/grahamc/orgs",
+ "repos_url": "https://api.github.com/users/grahamc/repos",
+ "events_url": "https://api.github.com/users/grahamc/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/grahamc/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 52226505,
+ "name": "nixpkgs",
+ "full_name": "grahamc/nixpkgs",
+ "owner": {
+ "login": "grahamc",
+ "id": 76716,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/grahamc",
+ "html_url": "https://github.com/grahamc",
+ "followers_url": "https://api.github.com/users/grahamc/followers",
+ "following_url": "https://api.github.com/users/grahamc/following{/other_user}",
+ "gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
+ "organizations_url": "https://api.github.com/users/grahamc/orgs",
+ "repos_url": "https://api.github.com/users/grahamc/repos",
+ "events_url": "https://api.github.com/users/grahamc/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/grahamc/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/grahamc/nixpkgs",
+ "description": "Nix Packages collection",
+ "fork": true,
+ "url": "https://api.github.com/repos/grahamc/nixpkgs",
+ "forks_url": "https://api.github.com/repos/grahamc/nixpkgs/forks",
+ "keys_url": "https://api.github.com/repos/grahamc/nixpkgs/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/grahamc/nixpkgs/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/grahamc/nixpkgs/teams",
+ "hooks_url": "https://api.github.com/repos/grahamc/nixpkgs/hooks",
+ "issue_events_url": "https://api.github.com/repos/grahamc/nixpkgs/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/grahamc/nixpkgs/events",
+ "assignees_url": "https://api.github.com/repos/grahamc/nixpkgs/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/grahamc/nixpkgs/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/grahamc/nixpkgs/tags",
+ "blobs_url": "https://api.github.com/repos/grahamc/nixpkgs/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/grahamc/nixpkgs/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/grahamc/nixpkgs/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/grahamc/nixpkgs/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/grahamc/nixpkgs/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/grahamc/nixpkgs/languages",
+ "stargazers_url": "https://api.github.com/repos/grahamc/nixpkgs/stargazers",
+ "contributors_url": "https://api.github.com/repos/grahamc/nixpkgs/contributors",
+ "subscribers_url": "https://api.github.com/repos/grahamc/nixpkgs/subscribers",
+ "subscription_url": "https://api.github.com/repos/grahamc/nixpkgs/subscription",
+ "commits_url": "https://api.github.com/repos/grahamc/nixpkgs/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/grahamc/nixpkgs/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/grahamc/nixpkgs/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/grahamc/nixpkgs/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/grahamc/nixpkgs/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/grahamc/nixpkgs/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/grahamc/nixpkgs/merges",
+ "archive_url": "https://api.github.com/repos/grahamc/nixpkgs/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/grahamc/nixpkgs/downloads",
+ "issues_url": "https://api.github.com/repos/grahamc/nixpkgs/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/grahamc/nixpkgs/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/grahamc/nixpkgs/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/grahamc/nixpkgs/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/grahamc/nixpkgs/labels{/name}",
+ "releases_url": "https://api.github.com/repos/grahamc/nixpkgs/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/grahamc/nixpkgs/deployments",
+ "created_at": "2016-02-21T20:31:54Z",
+ "updated_at": "2017-05-07T04:44:29Z",
+ "pushed_at": "2018-01-01T22:35:52Z",
+ "git_url": "git://github.com/grahamc/nixpkgs.git",
+ "ssh_url": "git@github.com:grahamc/nixpkgs.git",
+ "clone_url": "https://github.com/grahamc/nixpkgs.git",
+ "svn_url": "https://github.com/grahamc/nixpkgs",
+ "homepage": null,
+ "size": 627435,
+ "stargazers_count": 1,
+ "watchers_count": 1,
+ "language": "Nix",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null
+ },
+ "forks": 0,
+ "open_issues": 1,
+ "watchers": 1,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "NixOS:staging",
+ "ref": "staging",
+ "sha": "19784ca4c9ac378539bdc535b02ae673ba6ba0b0",
+ "user": {
+ "login": "NixOS",
+ "id": 487568,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/NixOS",
+ "html_url": "https://github.com/NixOS",
+ "followers_url": "https://api.github.com/users/NixOS/followers",
+ "following_url": "https://api.github.com/users/NixOS/following{/other_user}",
+ "gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
+ "organizations_url": "https://api.github.com/users/NixOS/orgs",
+ "repos_url": "https://api.github.com/users/NixOS/repos",
+ "events_url": "https://api.github.com/users/NixOS/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/NixOS/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 4542716,
+ "name": "nixpkgs",
+ "full_name": "NixOS/nixpkgs",
+ "owner": {
+ "login": "NixOS",
+ "id": 487568,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/NixOS",
+ "html_url": "https://github.com/NixOS",
+ "followers_url": "https://api.github.com/users/NixOS/followers",
+ "following_url": "https://api.github.com/users/NixOS/following{/other_user}",
+ "gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
+ "organizations_url": "https://api.github.com/users/NixOS/orgs",
+ "repos_url": "https://api.github.com/users/NixOS/repos",
+ "events_url": "https://api.github.com/users/NixOS/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/NixOS/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/NixOS/nixpkgs",
+ "description": "Nix Packages collection",
+ "fork": false,
+ "url": "https://api.github.com/repos/NixOS/nixpkgs",
+ "forks_url": "https://api.github.com/repos/NixOS/nixpkgs/forks",
+ "keys_url": "https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/NixOS/nixpkgs/teams",
+ "hooks_url": "https://api.github.com/repos/NixOS/nixpkgs/hooks",
+ "issue_events_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/NixOS/nixpkgs/events",
+ "assignees_url": "https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/NixOS/nixpkgs/tags",
+ "blobs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/NixOS/nixpkgs/languages",
+ "stargazers_url": "https://api.github.com/repos/NixOS/nixpkgs/stargazers",
+ "contributors_url": "https://api.github.com/repos/NixOS/nixpkgs/contributors",
+ "subscribers_url": "https://api.github.com/repos/NixOS/nixpkgs/subscribers",
+ "subscription_url": "https://api.github.com/repos/NixOS/nixpkgs/subscription",
+ "commits_url": "https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/NixOS/nixpkgs/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/NixOS/nixpkgs/merges",
+ "archive_url": "https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/NixOS/nixpkgs/downloads",
+ "issues_url": "https://api.github.com/repos/NixOS/nixpkgs/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/NixOS/nixpkgs/labels{/name}",
+ "releases_url": "https://api.github.com/repos/NixOS/nixpkgs/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/NixOS/nixpkgs/deployments",
+ "created_at": "2012-06-04T02:49:46Z",
+ "updated_at": "2018-02-23T20:56:05Z",
+ "pushed_at": "2018-02-23T21:40:58Z",
+ "git_url": "git://github.com/NixOS/nixpkgs.git",
+ "ssh_url": "git@github.com:NixOS/nixpkgs.git",
+ "clone_url": "https://github.com/NixOS/nixpkgs.git",
+ "svn_url": "https://github.com/NixOS/nixpkgs",
+ "homepage": null,
+ "size": 724069,
+ "stargazers_count": 2239,
+ "watchers_count": 2239,
+ "language": "Nix",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 2580,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2860,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null
+ },
+ "forks": 2580,
+ "open_issues": 2860,
+ "watchers": 2239,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299"
+ },
+ "html": {
+ "href": "https://github.com/NixOS/nixpkgs/pull/33299"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/issues/33299/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/pulls/33299/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/NixOS/nixpkgs/statuses/887e8b460a7d45ddb3bbdebe01447b251b3229e8"
+ }
+ },
+ "author_association": "MEMBER",
+ "merged": false,
+ "mergeable": null,
+ "rebaseable": null,
+ "mergeable_state": "unknown",
+ "merged_by": null,
+ "comments": 5,
+ "review_comments": 0,
+ "maintainer_can_modify": true,
+ "commits": 1,
+ "additions": 41,
+ "deletions": 4,
+ "changed_files": 4
+ },
+ "changes": {
+ "base": {
+ "ref": {
+ "from": "master"
+ },
+ "sha": {
+ "from": "a6664d8192038c4dc2ad44169dbb76556fe71ac1"
+ }
+ }
+ },
+ "repository": {
+ "id": 4542716,
+ "name": "nixpkgs",
+ "full_name": "NixOS/nixpkgs",
+ "owner": {
+ "login": "NixOS",
+ "id": 487568,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/NixOS",
+ "html_url": "https://github.com/NixOS",
+ "followers_url": "https://api.github.com/users/NixOS/followers",
+ "following_url": "https://api.github.com/users/NixOS/following{/other_user}",
+ "gists_url": "https://api.github.com/users/NixOS/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/NixOS/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/NixOS/subscriptions",
+ "organizations_url": "https://api.github.com/users/NixOS/orgs",
+ "repos_url": "https://api.github.com/users/NixOS/repos",
+ "events_url": "https://api.github.com/users/NixOS/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/NixOS/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/NixOS/nixpkgs",
+ "description": "Nix Packages collection",
+ "fork": false,
+ "url": "https://api.github.com/repos/NixOS/nixpkgs",
+ "forks_url": "https://api.github.com/repos/NixOS/nixpkgs/forks",
+ "keys_url": "https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/NixOS/nixpkgs/teams",
+ "hooks_url": "https://api.github.com/repos/NixOS/nixpkgs/hooks",
+ "issue_events_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/NixOS/nixpkgs/events",
+ "assignees_url": "https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/NixOS/nixpkgs/tags",
+ "blobs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/NixOS/nixpkgs/languages",
+ "stargazers_url": "https://api.github.com/repos/NixOS/nixpkgs/stargazers",
+ "contributors_url": "https://api.github.com/repos/NixOS/nixpkgs/contributors",
+ "subscribers_url": "https://api.github.com/repos/NixOS/nixpkgs/subscribers",
+ "subscription_url": "https://api.github.com/repos/NixOS/nixpkgs/subscription",
+ "commits_url": "https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/NixOS/nixpkgs/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/NixOS/nixpkgs/merges",
+ "archive_url": "https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/NixOS/nixpkgs/downloads",
+ "issues_url": "https://api.github.com/repos/NixOS/nixpkgs/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/NixOS/nixpkgs/labels{/name}",
+ "releases_url": "https://api.github.com/repos/NixOS/nixpkgs/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/NixOS/nixpkgs/deployments",
+ "created_at": "2012-06-04T02:49:46Z",
+ "updated_at": "2018-02-23T20:56:05Z",
+ "pushed_at": "2018-02-23T21:40:58Z",
+ "git_url": "git://github.com/NixOS/nixpkgs.git",
+ "ssh_url": "git@github.com:NixOS/nixpkgs.git",
+ "clone_url": "https://github.com/NixOS/nixpkgs.git",
+ "svn_url": "https://github.com/NixOS/nixpkgs",
+ "homepage": null,
+ "size": 724069,
+ "stargazers_count": 2239,
+ "watchers_count": 2239,
+ "language": "Nix",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 2580,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2860,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null
+ },
+ "forks": 2580,
+ "open_issues": 2860,
+ "watchers": 2239,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "NixOS",
+ "id": 487568,
+ "url": "https://api.github.com/orgs/NixOS",
+ "repos_url": "https://api.github.com/orgs/NixOS/repos",
+ "events_url": "https://api.github.com/orgs/NixOS/events",
+ "hooks_url": "https://api.github.com/orgs/NixOS/hooks",
+ "issues_url": "https://api.github.com/orgs/NixOS/issues",
+ "members_url": "https://api.github.com/orgs/NixOS/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/NixOS/public_members{/member}",
+ "avatar_url": "https://avatars3.githubusercontent.com/u/487568?v=4",
+ "description": ""
+ },
+ "sender": {
+ "login": "grahamc",
+ "id": 76716,
+ "avatar_url": "https://avatars3.githubusercontent.com/u/76716?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/grahamc",
+ "html_url": "https://github.com/grahamc",
+ "followers_url": "https://api.github.com/users/grahamc/followers",
+ "following_url": "https://api.github.com/users/grahamc/following{/other_user}",
+ "gists_url": "https://api.github.com/users/grahamc/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/grahamc/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/grahamc/subscriptions",
+ "organizations_url": "https://api.github.com/users/grahamc/orgs",
+ "repos_url": "https://api.github.com/users/grahamc/repos",
+ "events_url": "https://api.github.com/users/grahamc/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/grahamc/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ }
diff --git a/ofborg/tickborg/test-srcs/events/pr-converted-to-draft.json b/ofborg/tickborg/test-srcs/events/pr-converted-to-draft.json
new file mode 100644
index 0000000000..3062df192a
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/events/pr-converted-to-draft.json
@@ -0,0 +1 @@
+{"action":"converted_to_draft","number":86486,"pull_request":{"url":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486","id":412206513,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyMjA2NTEz","html_url":"https://github.com/NixOS/nixpkgs/pull/86486","diff_url":"https://github.com/NixOS/nixpkgs/pull/86486.diff","patch_url":"https://github.com/NixOS/nixpkgs/pull/86486.patch","issue_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/86486","number":86486,"state":"open","locked":false,"title":"nixosTests: re-enable networking tests","user":{"login":"flokli","id":183879,"node_id":"MDQ6VXNlcjE4Mzg3OQ==","avatar_url":"https://avatars0.githubusercontent.com/u/183879?v=4","gravatar_id":"","url":"https://api.github.com/users/flokli","html_url":"https://github.com/flokli","followers_url":"https://api.github.com/users/flokli/followers","following_url":"https://api.github.com/users/flokli/following{/other_user}","gists_url":"https://api.github.com/users/flokli/gists{/gist_id}","starred_url":"https://api.github.com/users/flokli/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/flokli/subscriptions","organizations_url":"https://api.github.com/users/flokli/orgs","repos_url":"https://api.github.com/users/flokli/repos","events_url":"https://api.github.com/users/flokli/events{/privacy}","received_events_url":"https://api.github.com/users/flokli/received_events","type":"User","site_admin":false},"body":"5150378c2f10d34a7ba4404c52f6c882284dd254 fixed the long-brokenrnnixosTests.networking.virtual.rnrnWith all tests failures fixed, and #79328 making debugging much easier,rnlet's re-add it to the tested jobset.rn###### Motivation for this changernrnrn###### Things donernrn<!-- Please check what applies. Note that these are not hard requirements but merely serve as information for reviewers. -->rnrn- [x] Tested using sandboxing ([nix.useSandbox](http://nixos.org/nixos/manual/options.html#opt-nix.useSandbox) on NixOS, or option `sandbox` in [`nix.conf`](http://nixos.org/nix/manual/#sec-conf-file) on non-NixOS linux)rn- Built on platform(s)rn - [x] NixOSrn - [ ] macOSrn - [ ] other Linux distributionsrn- [x] Tested via one or more NixOS test(s) if existing and applicable for the change (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))rn- [ ] Tested compilation of all pkgs that depend on this change using `nix-shell -p nixpkgs-review --run \"nixpkgs-review wip\"`rn- [ ] Tested execution of all binary files (usually in `./result/bin/`)rn- [ ] Determined the impact on package closure size (by running `nix path-info -S` before and after)rn- [ ] Ensured that relevant documentation is up to datern- [ ] Fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/.github/CONTRIBUTING.md).rn","created_at":"2020-05-01T16:47:11Z","updated_at":"2020-05-22T19:22:35Z","closed_at":null,"merged_at":null,"merge_commit_sha":"56a65abb75c71a06709716f8895786cd65b27caf","assignee":null,"assignees":[],"requested_reviewers":[{"login":"tfc","id":29044,"node_id":"MDQ6VXNlcjI5MDQ0","avatar_url":"https://avatars1.githubusercontent.com/u/29044?v=4","gravatar_id":"","url":"https://api.github.com/users/tfc","html_url":"https://github.com/tfc","followers_url":"https://api.github.com/users/tfc/followers","following_url":"https://api.github.com/users/tfc/following{/other_user}","gists_url":"https://api.github.com/users/tfc/gists{/gist_id}","starred_url":"https://api.github.com/users/tfc/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/tfc/subscriptions","organizations_url":"https://api.github.com/users/tfc/orgs","repos_url":"https://api.github.com/users/tfc/repos","events_url":"https://api.github.com/users/tfc/events{/privacy}","received_events_url":"https://api.github.com/users/tfc/received_events","type":"User","site_admin":false},{"login":"nh2","id":399535,"node_id":"MDQ6VXNlcjM5OTUzNQ==","avatar_url":"https://avatars1.githubusercontent.com/u/399535?v=4","gravatar_id":"","url":"https://api.github.com/users/nh2","html_url":"https://github.com/nh2","followers_url":"https://api.github.com/users/nh2/followers","following_url":"https://api.github.com/users/nh2/following{/other_user}","gists_url":"https://api.github.com/users/nh2/gists{/gist_id}","starred_url":"https://api.github.com/users/nh2/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/nh2/subscriptions","organizations_url":"https://api.github.com/users/nh2/orgs","repos_url":"https://api.github.com/users/nh2/repos","events_url":"https://api.github.com/users/nh2/events{/privacy}","received_events_url":"https://api.github.com/users/nh2/received_events","type":"User","site_admin":false},{"login":"andir","id":638836,"node_id":"MDQ6VXNlcjYzODgzNg==","avatar_url":"https://avatars1.githubusercontent.com/u/638836?v=4","gravatar_id":"","url":"https://api.github.com/users/andir","html_url":"https://github.com/andir","followers_url":"https://api.github.com/users/andir/followers","following_url":"https://api.github.com/users/andir/following{/other_user}","gists_url":"https://api.github.com/users/andir/gists{/gist_id}","starred_url":"https://api.github.com/users/andir/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/andir/subscriptions","organizations_url":"https://api.github.com/users/andir/orgs","repos_url":"https://api.github.com/users/andir/repos","events_url":"https://api.github.com/users/andir/events{/privacy}","received_events_url":"https://api.github.com/users/andir/received_events","type":"User","site_admin":false},{"login":"NinjaTrappeur","id":1219785,"node_id":"MDQ6VXNlcjEyMTk3ODU=","avatar_url":"https://avatars1.githubusercontent.com/u/1219785?v=4","gravatar_id":"","url":"https://api.github.com/users/NinjaTrappeur","html_url":"https://github.com/NinjaTrappeur","followers_url":"https://api.github.com/users/NinjaTrappeur/followers","following_url":"https://api.github.com/users/NinjaTrappeur/following{/other_user}","gists_url":"https://api.github.com/users/NinjaTrappeur/gists{/gist_id}","starred_url":"https://api.github.com/users/NinjaTrappeur/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/NinjaTrappeur/subscriptions","organizations_url":"https://api.github.com/users/NinjaTrappeur/orgs","repos_url":"https://api.github.com/users/NinjaTrappeur/repos","events_url":"https://api.github.com/users/NinjaTrappeur/events{/privacy}","received_events_url":"https://api.github.com/users/NinjaTrappeur/received_events","type":"User","site_admin":false}],"requested_teams":[],"labels":[{"id":737642262,"node_id":"MDU6TGFiZWw3Mzc2NDIyNjI=","url":"https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-darwin:%200","name":"10.rebuild-darwin: 0","color":"eeffee","default":false,"description":null},{"id":1955058054,"node_id":"MDU6TGFiZWwxOTU1MDU4MDU0","url":"https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-linux:%201","name":"10.rebuild-linux: 1","color":"ededed","default":false,"description":null},{"id":731733923,"node_id":"MDU6TGFiZWw3MzE3MzM5MjM=","url":"https://api.github.com/repos/NixOS/nixpkgs/labels/10.rebuild-linux:%201-10","name":"10.rebuild-linux: 1-10","color":"eeffee","default":false,"description":null},{"id":60265212,"node_id":"MDU6TGFiZWw2MDI2NTIxMg==","url":"https://api.github.com/repos/NixOS/nixpkgs/labels/6.topic:%20nixos","name":"6.topic: nixos","color":"fef2c0","default":false,"description":null}],"milestone":null,"draft":true,"commits_url":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486/commits","review_comments_url":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486/comments","review_comment_url":"https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}","comments_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/86486/comments","statuses_url":"https://api.github.com/repos/NixOS/nixpkgs/statuses/897d574ae2447e120d5889342e2417f29d5ae81c","head":{"label":"flokli:networking-tests-add","ref":"networking-tests-add","sha":"897d574ae2447e120d5889342e2417f29d5ae81c","user":{"login":"flokli","id":183879,"node_id":"MDQ6VXNlcjE4Mzg3OQ==","avatar_url":"https://avatars0.githubusercontent.com/u/183879?v=4","gravatar_id":"","url":"https://api.github.com/users/flokli","html_url":"https://github.com/flokli","followers_url":"https://api.github.com/users/flokli/followers","following_url":"https://api.github.com/users/flokli/following{/other_user}","gists_url":"https://api.github.com/users/flokli/gists{/gist_id}","starred_url":"https://api.github.com/users/flokli/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/flokli/subscriptions","organizations_url":"https://api.github.com/users/flokli/orgs","repos_url":"https://api.github.com/users/flokli/repos","events_url":"https://api.github.com/users/flokli/events{/privacy}","received_events_url":"https://api.github.com/users/flokli/received_events","type":"User","site_admin":false},"repo":{"id":106616131,"node_id":"MDEwOlJlcG9zaXRvcnkxMDY2MTYxMzE=","name":"nixpkgs","full_name":"flokli/nixpkgs","private":false,"owner":{"login":"flokli","id":183879,"node_id":"MDQ6VXNlcjE4Mzg3OQ==","avatar_url":"https://avatars0.githubusercontent.com/u/183879?v=4","gravatar_id":"","url":"https://api.github.com/users/flokli","html_url":"https://github.com/flokli","followers_url":"https://api.github.com/users/flokli/followers","following_url":"https://api.github.com/users/flokli/following{/other_user}","gists_url":"https://api.github.com/users/flokli/gists{/gist_id}","starred_url":"https://api.github.com/users/flokli/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/flokli/subscriptions","organizations_url":"https://api.github.com/users/flokli/orgs","repos_url":"https://api.github.com/users/flokli/repos","events_url":"https://api.github.com/users/flokli/events{/privacy}","received_events_url":"https://api.github.com/users/flokli/received_events","type":"User","site_admin":false},"html_url":"https://github.com/flokli/nixpkgs","description":"Nix Packages collection","fork":true,"url":"https://api.github.com/repos/flokli/nixpkgs","forks_url":"https://api.github.com/repos/flokli/nixpkgs/forks","keys_url":"https://api.github.com/repos/flokli/nixpkgs/keys{/key_id}","collaborators_url":"https://api.github.com/repos/flokli/nixpkgs/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/flokli/nixpkgs/teams","hooks_url":"https://api.github.com/repos/flokli/nixpkgs/hooks","issue_events_url":"https://api.github.com/repos/flokli/nixpkgs/issues/events{/number}","events_url":"https://api.github.com/repos/flokli/nixpkgs/events","assignees_url":"https://api.github.com/repos/flokli/nixpkgs/assignees{/user}","branches_url":"https://api.github.com/repos/flokli/nixpkgs/branches{/branch}","tags_url":"https://api.github.com/repos/flokli/nixpkgs/tags","blobs_url":"https://api.github.com/repos/flokli/nixpkgs/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/flokli/nixpkgs/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/flokli/nixpkgs/git/refs{/sha}","trees_url":"https://api.github.com/repos/flokli/nixpkgs/git/trees{/sha}","statuses_url":"https://api.github.com/repos/flokli/nixpkgs/statuses/{sha}","languages_url":"https://api.github.com/repos/flokli/nixpkgs/languages","stargazers_url":"https://api.github.com/repos/flokli/nixpkgs/stargazers","contributors_url":"https://api.github.com/repos/flokli/nixpkgs/contributors","subscribers_url":"https://api.github.com/repos/flokli/nixpkgs/subscribers","subscription_url":"https://api.github.com/repos/flokli/nixpkgs/subscription","commits_url":"https://api.github.com/repos/flokli/nixpkgs/commits{/sha}","git_commits_url":"https://api.github.com/repos/flokli/nixpkgs/git/commits{/sha}","comments_url":"https://api.github.com/repos/flokli/nixpkgs/comments{/number}","issue_comment_url":"https://api.github.com/repos/flokli/nixpkgs/issues/comments{/number}","contents_url":"https://api.github.com/repos/flokli/nixpkgs/contents/{+path}","compare_url":"https://api.github.com/repos/flokli/nixpkgs/compare/{base}...{head}","merges_url":"https://api.github.com/repos/flokli/nixpkgs/merges","archive_url":"https://api.github.com/repos/flokli/nixpkgs/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/flokli/nixpkgs/downloads","issues_url":"https://api.github.com/repos/flokli/nixpkgs/issues{/number}","pulls_url":"https://api.github.com/repos/flokli/nixpkgs/pulls{/number}","milestones_url":"https://api.github.com/repos/flokli/nixpkgs/milestones{/number}","notifications_url":"https://api.github.com/repos/flokli/nixpkgs/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/flokli/nixpkgs/labels{/name}","releases_url":"https://api.github.com/repos/flokli/nixpkgs/releases{/id}","deployments_url":"https://api.github.com/repos/flokli/nixpkgs/deployments","created_at":"2017-10-11T22:29:54Z","updated_at":"2017-10-11T22:30:23Z","pushed_at":"2020-05-22T15:10:56Z","git_url":"git://github.com/flokli/nixpkgs.git","ssh_url":"git@github.com:flokli/nixpkgs.git","clone_url":"https://github.com/flokli/nixpkgs.git","svn_url":"https://github.com/flokli/nixpkgs","homepage":null,"size":1106455,"stargazers_count":0,"watchers_count":0,"language":"Nix","has_issues":false,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"forks_count":1,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":0,"license":{"key":"other","name":"Other","spdx_id":"NOASSERTION","url":null,"node_id":"MDc6TGljZW5zZTA="},"forks":1,"open_issues":0,"watchers":0,"default_branch":"master"}},"base":{"label":"NixOS:master","ref":"master","sha":"af66d338269a88604e93700aead604d0bbcc6414","user":{"login":"NixOS","id":487568,"node_id":"MDEyOk9yZ2FuaXphdGlvbjQ4NzU2OA==","avatar_url":"https://avatars3.githubusercontent.com/u/487568?v=4","gravatar_id":"","url":"https://api.github.com/users/NixOS","html_url":"https://github.com/NixOS","followers_url":"https://api.github.com/users/NixOS/followers","following_url":"https://api.github.com/users/NixOS/following{/other_user}","gists_url":"https://api.github.com/users/NixOS/gists{/gist_id}","starred_url":"https://api.github.com/users/NixOS/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/NixOS/subscriptions","organizations_url":"https://api.github.com/users/NixOS/orgs","repos_url":"https://api.github.com/users/NixOS/repos","events_url":"https://api.github.com/users/NixOS/events{/privacy}","received_events_url":"https://api.github.com/users/NixOS/received_events","type":"Organization","site_admin":false},"repo":{"id":4542716,"node_id":"MDEwOlJlcG9zaXRvcnk0NTQyNzE2","name":"nixpkgs","full_name":"NixOS/nixpkgs","private":false,"owner":{"login":"NixOS","id":487568,"node_id":"MDEyOk9yZ2FuaXphdGlvbjQ4NzU2OA==","avatar_url":"https://avatars3.githubusercontent.com/u/487568?v=4","gravatar_id":"","url":"https://api.github.com/users/NixOS","html_url":"https://github.com/NixOS","followers_url":"https://api.github.com/users/NixOS/followers","following_url":"https://api.github.com/users/NixOS/following{/other_user}","gists_url":"https://api.github.com/users/NixOS/gists{/gist_id}","starred_url":"https://api.github.com/users/NixOS/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/NixOS/subscriptions","organizations_url":"https://api.github.com/users/NixOS/orgs","repos_url":"https://api.github.com/users/NixOS/repos","events_url":"https://api.github.com/users/NixOS/events{/privacy}","received_events_url":"https://api.github.com/users/NixOS/received_events","type":"Organization","site_admin":false},"html_url":"https://github.com/NixOS/nixpkgs","description":"Nix Packages collection","fork":false,"url":"https://api.github.com/repos/NixOS/nixpkgs","forks_url":"https://api.github.com/repos/NixOS/nixpkgs/forks","keys_url":"https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}","collaborators_url":"https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/NixOS/nixpkgs/teams","hooks_url":"https://api.github.com/repos/NixOS/nixpkgs/hooks","issue_events_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}","events_url":"https://api.github.com/repos/NixOS/nixpkgs/events","assignees_url":"https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}","branches_url":"https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}","tags_url":"https://api.github.com/repos/NixOS/nixpkgs/tags","blobs_url":"https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}","trees_url":"https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}","statuses_url":"https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}","languages_url":"https://api.github.com/repos/NixOS/nixpkgs/languages","stargazers_url":"https://api.github.com/repos/NixOS/nixpkgs/stargazers","contributors_url":"https://api.github.com/repos/NixOS/nixpkgs/contributors","subscribers_url":"https://api.github.com/repos/NixOS/nixpkgs/subscribers","subscription_url":"https://api.github.com/repos/NixOS/nixpkgs/subscription","commits_url":"https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}","git_commits_url":"https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}","comments_url":"https://api.github.com/repos/NixOS/nixpkgs/comments{/number}","issue_comment_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}","contents_url":"https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}","compare_url":"https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}","merges_url":"https://api.github.com/repos/NixOS/nixpkgs/merges","archive_url":"https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/NixOS/nixpkgs/downloads","issues_url":"https://api.github.com/repos/NixOS/nixpkgs/issues{/number}","pulls_url":"https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}","milestones_url":"https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}","notifications_url":"https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/NixOS/nixpkgs/labels{/name}","releases_url":"https://api.github.com/repos/NixOS/nixpkgs/releases{/id}","deployments_url":"https://api.github.com/repos/NixOS/nixpkgs/deployments","created_at":"2012-06-04T02:49:46Z","updated_at":"2020-05-22T18:55:26Z","pushed_at":"2020-05-22T19:15:16Z","git_url":"git://github.com/NixOS/nixpkgs.git","ssh_url":"git@github.com:NixOS/nixpkgs.git","clone_url":"https://github.com/NixOS/nixpkgs.git","svn_url":"https://github.com/NixOS/nixpkgs","homepage":null,"size":1172580,"stargazers_count":5013,"watchers_count":5013,"language":"Nix","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"forks_count":4811,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":6161,"license":{"key":"mit","name":"MIT License","spdx_id":"MIT","url":"https://api.github.com/licenses/mit","node_id":"MDc6TGljZW5zZTEz"},"forks":4811,"open_issues":6161,"watchers":5013,"default_branch":"master"}},"_links":{"self":{"href":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486"},"html":{"href":"https://github.com/NixOS/nixpkgs/pull/86486"},"issue":{"href":"https://api.github.com/repos/NixOS/nixpkgs/issues/86486"},"comments":{"href":"https://api.github.com/repos/NixOS/nixpkgs/issues/86486/comments"},"review_comments":{"href":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486/comments"},"review_comment":{"href":"https://api.github.com/repos/NixOS/nixpkgs/pulls/comments{/number}"},"commits":{"href":"https://api.github.com/repos/NixOS/nixpkgs/pulls/86486/commits"},"statuses":{"href":"https://api.github.com/repos/NixOS/nixpkgs/statuses/897d574ae2447e120d5889342e2417f29d5ae81c"}},"author_association":"CONTRIBUTOR","merged":false,"mergeable":true,"rebaseable":true,"mergeable_state":"draft","merged_by":null,"comments":8,"review_comments":0,"maintainer_can_modify":true,"commits":2,"additions":17,"deletions":18,"changed_files":2},"repository":{"id":4542716,"node_id":"MDEwOlJlcG9zaXRvcnk0NTQyNzE2","name":"nixpkgs","full_name":"NixOS/nixpkgs","private":false,"owner":{"login":"NixOS","id":487568,"node_id":"MDEyOk9yZ2FuaXphdGlvbjQ4NzU2OA==","avatar_url":"https://avatars3.githubusercontent.com/u/487568?v=4","gravatar_id":"","url":"https://api.github.com/users/NixOS","html_url":"https://github.com/NixOS","followers_url":"https://api.github.com/users/NixOS/followers","following_url":"https://api.github.com/users/NixOS/following{/other_user}","gists_url":"https://api.github.com/users/NixOS/gists{/gist_id}","starred_url":"https://api.github.com/users/NixOS/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/NixOS/subscriptions","organizations_url":"https://api.github.com/users/NixOS/orgs","repos_url":"https://api.github.com/users/NixOS/repos","events_url":"https://api.github.com/users/NixOS/events{/privacy}","received_events_url":"https://api.github.com/users/NixOS/received_events","type":"Organization","site_admin":false},"html_url":"https://github.com/NixOS/nixpkgs","description":"Nix Packages collection","fork":false,"url":"https://api.github.com/repos/NixOS/nixpkgs","forks_url":"https://api.github.com/repos/NixOS/nixpkgs/forks","keys_url":"https://api.github.com/repos/NixOS/nixpkgs/keys{/key_id}","collaborators_url":"https://api.github.com/repos/NixOS/nixpkgs/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/NixOS/nixpkgs/teams","hooks_url":"https://api.github.com/repos/NixOS/nixpkgs/hooks","issue_events_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/events{/number}","events_url":"https://api.github.com/repos/NixOS/nixpkgs/events","assignees_url":"https://api.github.com/repos/NixOS/nixpkgs/assignees{/user}","branches_url":"https://api.github.com/repos/NixOS/nixpkgs/branches{/branch}","tags_url":"https://api.github.com/repos/NixOS/nixpkgs/tags","blobs_url":"https://api.github.com/repos/NixOS/nixpkgs/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/NixOS/nixpkgs/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/NixOS/nixpkgs/git/refs{/sha}","trees_url":"https://api.github.com/repos/NixOS/nixpkgs/git/trees{/sha}","statuses_url":"https://api.github.com/repos/NixOS/nixpkgs/statuses/{sha}","languages_url":"https://api.github.com/repos/NixOS/nixpkgs/languages","stargazers_url":"https://api.github.com/repos/NixOS/nixpkgs/stargazers","contributors_url":"https://api.github.com/repos/NixOS/nixpkgs/contributors","subscribers_url":"https://api.github.com/repos/NixOS/nixpkgs/subscribers","subscription_url":"https://api.github.com/repos/NixOS/nixpkgs/subscription","commits_url":"https://api.github.com/repos/NixOS/nixpkgs/commits{/sha}","git_commits_url":"https://api.github.com/repos/NixOS/nixpkgs/git/commits{/sha}","comments_url":"https://api.github.com/repos/NixOS/nixpkgs/comments{/number}","issue_comment_url":"https://api.github.com/repos/NixOS/nixpkgs/issues/comments{/number}","contents_url":"https://api.github.com/repos/NixOS/nixpkgs/contents/{+path}","compare_url":"https://api.github.com/repos/NixOS/nixpkgs/compare/{base}...{head}","merges_url":"https://api.github.com/repos/NixOS/nixpkgs/merges","archive_url":"https://api.github.com/repos/NixOS/nixpkgs/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/NixOS/nixpkgs/downloads","issues_url":"https://api.github.com/repos/NixOS/nixpkgs/issues{/number}","pulls_url":"https://api.github.com/repos/NixOS/nixpkgs/pulls{/number}","milestones_url":"https://api.github.com/repos/NixOS/nixpkgs/milestones{/number}","notifications_url":"https://api.github.com/repos/NixOS/nixpkgs/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/NixOS/nixpkgs/labels{/name}","releases_url":"https://api.github.com/repos/NixOS/nixpkgs/releases{/id}","deployments_url":"https://api.github.com/repos/NixOS/nixpkgs/deployments","created_at":"2012-06-04T02:49:46Z","updated_at":"2020-05-22T18:55:26Z","pushed_at":"2020-05-22T19:15:16Z","git_url":"git://github.com/NixOS/nixpkgs.git","ssh_url":"git@github.com:NixOS/nixpkgs.git","clone_url":"https://github.com/NixOS/nixpkgs.git","svn_url":"https://github.com/NixOS/nixpkgs","homepage":null,"size":1172580,"stargazers_count":5013,"watchers_count":5013,"language":"Nix","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"forks_count":4811,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":6161,"license":{"key":"mit","name":"MIT License","spdx_id":"MIT","url":"https://api.github.com/licenses/mit","node_id":"MDc6TGljZW5zZTEz"},"forks":4811,"open_issues":6161,"watchers":5013,"default_branch":"master"},"organization":{"login":"NixOS","id":487568,"node_id":"MDEyOk9yZ2FuaXphdGlvbjQ4NzU2OA==","url":"https://api.github.com/orgs/NixOS","repos_url":"https://api.github.com/orgs/NixOS/repos","events_url":"https://api.github.com/orgs/NixOS/events","hooks_url":"https://api.github.com/orgs/NixOS/hooks","issues_url":"https://api.github.com/orgs/NixOS/issues","members_url":"https://api.github.com/orgs/NixOS/members{/member}","public_members_url":"https://api.github.com/orgs/NixOS/public_members{/member}","avatar_url":"https://avatars3.githubusercontent.com/u/487568?v=4","description":""},"sender":{"login":"flokli","id":183879,"node_id":"MDQ6VXNlcjE4Mzg3OQ==","avatar_url":"https://avatars0.githubusercontent.com/u/183879?v=4","gravatar_id":"","url":"https://api.github.com/users/flokli","html_url":"https://github.com/flokli","followers_url":"https://api.github.com/users/flokli/followers","following_url":"https://api.github.com/users/flokli/following{/other_user}","gists_url":"https://api.github.com/users/flokli/gists{/gist_id}","starred_url":"https://api.github.com/users/flokli/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/flokli/subscriptions","organizations_url":"https://api.github.com/users/flokli/orgs","repos_url":"https://api.github.com/users/flokli/repos","events_url":"https://api.github.com/users/flokli/events{/privacy}","received_events_url":"https://api.github.com/users/flokli/received_events","type":"User","site_admin":false}}
diff --git a/ofborg/tickborg/test-srcs/maintainers-pr/default.nix b/ofborg/tickborg/test-srcs/maintainers-pr/default.nix
new file mode 100644
index 0000000000..f41a6048ad
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers-pr/default.nix
@@ -0,0 +1,8 @@
+{ ... }:
+{
+ lib = import ./lib;
+ foo.bar.packageA = {
+ name = "Hi";
+ meta.maintainers = [{ github = "test"; }];
+ };
+}
diff --git a/ofborg/tickborg/test-srcs/maintainers/default.nix b/ofborg/tickborg/test-srcs/maintainers/default.nix
new file mode 100644
index 0000000000..961bb32292
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+{
+ lib = import ./lib;
+ foo.bar.packageA = { };
+}
diff --git a/ofborg/tickborg/test-srcs/maintainers/lib/attrsets.nix b/ofborg/tickborg/test-srcs/maintainers/lib/attrsets.nix
new file mode 100644
index 0000000000..d374d229f5
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/lib/attrsets.nix
@@ -0,0 +1,482 @@
+{ lib }:
+# Operations on attribute sets.
+
+let
+ inherit (builtins) head tail length;
+ inherit (lib.trivial) and;
+ inherit (lib.strings) concatStringsSep;
+ inherit (lib.lists) fold concatMap concatLists;
+in
+
+rec {
+ inherit (builtins) attrNames listToAttrs hasAttr isAttrs getAttr;
+
+
+ /* Return an attribute from nested attribute sets.
+
+ Example:
+ x = { a = { b = 3; }; }
+ attrByPath ["a" "b"] 6 x
+ => 3
+ attrByPath ["z" "z"] 6 x
+ => 6
+ */
+ attrByPath = attrPath: default: e:
+ let attr = head attrPath;
+ in
+ if attrPath == [] then e
+ else if e ? ${attr}
+ then attrByPath (tail attrPath) default e.${attr}
+ else default;
+
+ /* Return if an attribute from nested attribute set exists.
+
+ Example:
+ x = { a = { b = 3; }; }
+ hasAttrByPath ["a" "b"] x
+ => true
+ hasAttrByPath ["z" "z"] x
+ => false
+
+ */
+ hasAttrByPath = attrPath: e:
+ let attr = head attrPath;
+ in
+ if attrPath == [] then true
+ else if e ? ${attr}
+ then hasAttrByPath (tail attrPath) e.${attr}
+ else false;
+
+
+ /* Return nested attribute set in which an attribute is set.
+
+ Example:
+ setAttrByPath ["a" "b"] 3
+ => { a = { b = 3; }; }
+ */
+ setAttrByPath = attrPath: value:
+ if attrPath == [] then value
+ else listToAttrs
+ [ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ];
+
+
+ /* Like `getAttrPath' without a default value. If it doesn't find the
+ path it will throw.
+
+ Example:
+ x = { a = { b = 3; }; }
+ getAttrFromPath ["a" "b"] x
+ => 3
+ getAttrFromPath ["z" "z"] x
+ => error: cannot find attribute `z.z'
+ */
+ getAttrFromPath = attrPath: set:
+ let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
+ in attrByPath attrPath (abort errorMsg) set;
+
+
+ /* Return the specified attributes from a set.
+
+ Example:
+ attrVals ["a" "b" "c"] as
+ => [as.a as.b as.c]
+ */
+ attrVals = nameList: set: map (x: set.${x}) nameList;
+
+
+ /* Return the values of all attributes in the given set, sorted by
+ attribute name.
+
+ Example:
+ attrValues {c = 3; a = 1; b = 2;}
+ => [1 2 3]
+ */
+ attrValues = builtins.attrValues or (attrs: attrVals (attrNames attrs) attrs);
+
+
+ /* Given a set of attribute names, return the set of the corresponding
+ attributes from the given set.
+
+ Example:
+ getAttrs [ "a" "b" ] { a = 1; b = 2; c = 3; }
+ => { a = 1; b = 2; }
+ */
+ getAttrs = names: attrs: genAttrs names (name: attrs.${name});
+
+ /* Collect each attribute named `attr' from a list of attribute
+ sets. Sets that don't contain the named attribute are ignored.
+
+ Example:
+ catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
+ => [1 2]
+ */
+ catAttrs = builtins.catAttrs or
+ (attr: l: concatLists (map (s: if s ? ${attr} then [s.${attr}] else []) l));
+
+
+ /* Filter an attribute set by removing all attributes for which the
+ given predicate return false.
+
+ Example:
+ filterAttrs (n: v: n == "foo") { foo = 1; bar = 2; }
+ => { foo = 1; }
+ */
+ filterAttrs = pred: set:
+ listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
+
+
+ /* Filter an attribute set recursively by removing all attributes for
+ which the given predicate return false.
+
+ Example:
+ filterAttrsRecursive (n: v: v != null) { foo = { bar = null; }; }
+ => { foo = {}; }
+ */
+ filterAttrsRecursive = pred: set:
+ listToAttrs (
+ concatMap (name:
+ let v = set.${name}; in
+ if pred name v then [
+ (nameValuePair name (
+ if isAttrs v then filterAttrsRecursive pred v
+ else v
+ ))
+ ] else []
+ ) (attrNames set)
+ );
+
+ /* Apply fold functions to values grouped by key.
+
+ Example:
+ foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
+ => { a = [ 2 3 ]; }
+ */
+ foldAttrs = op: nul: list_of_attrs:
+ fold (n: a:
+ fold (name: o:
+ o // { ${name} = op n.${name} (a.${name} or nul); }
+ ) a (attrNames n)
+ ) {} list_of_attrs;
+
+
+ /* Recursively collect sets that verify a given predicate named `pred'
+ from the set `attrs'. The recursion is stopped when the predicate is
+ verified.
+
+ Type:
+ collect ::
+ (AttrSet -> Bool) -> AttrSet -> [x]
+
+ Example:
+ collect isList { a = { b = ["b"]; }; c = [1]; }
+ => [["b"] [1]]
+
+ collect (x: x ? outPath)
+ { a = { outPath = "a/"; }; b = { outPath = "b/"; }; }
+ => [{ outPath = "a/"; } { outPath = "b/"; }]
+ */
+ collect = pred: attrs:
+ if pred attrs then
+ [ attrs ]
+ else if isAttrs attrs then
+ concatMap (collect pred) (attrValues attrs)
+ else
+ [];
+
+
+ /* Utility function that creates a {name, value} pair as expected by
+ builtins.listToAttrs.
+
+ Example:
+ nameValuePair "some" 6
+ => { name = "some"; value = 6; }
+ */
+ nameValuePair = name: value: { inherit name value; };
+
+
+ /* Apply a function to each element in an attribute set. The
+ function takes two arguments --- the attribute name and its value
+ --- and returns the new value for the attribute. The result is a
+ new attribute set.
+
+ Example:
+ mapAttrs (name: value: name + "-" + value)
+ { x = "foo"; y = "bar"; }
+ => { x = "x-foo"; y = "y-bar"; }
+ */
+ mapAttrs = builtins.mapAttrs or
+ (f: set:
+ listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)));
+
+
+ /* Like `mapAttrs', but allows the name of each attribute to be
+ changed in addition to the value. The applied function should
+ return both the new name and value as a `nameValuePair'.
+
+ Example:
+ mapAttrs' (name: value: nameValuePair ("foo_" + name) ("bar-" + value))
+ { x = "a"; y = "b"; }
+ => { foo_x = "bar-a"; foo_y = "bar-b"; }
+ */
+ mapAttrs' = f: set:
+ listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
+
+
+ /* Call a function for each attribute in the given set and return
+ the result in a list.
+
+ Example:
+ mapAttrsToList (name: value: name + value)
+ { x = "a"; y = "b"; }
+ => [ "xa" "yb" ]
+ */
+ mapAttrsToList = f: attrs:
+ map (name: f name attrs.${name}) (attrNames attrs);
+
+
+ /* Like `mapAttrs', except that it recursively applies itself to
+ attribute sets. Also, the first argument of the argument
+ function is a *list* of the names of the containing attributes.
+
+ Type:
+ mapAttrsRecursive ::
+ ([String] -> a -> b) -> AttrSet -> AttrSet
+
+ Example:
+ mapAttrsRecursive (path: value: concatStringsSep "-" (path ++ [value]))
+ { n = { a = "A"; m = { b = "B"; c = "C"; }; }; d = "D"; }
+ => { n = { a = "n-a-A"; m = { b = "n-m-b-B"; c = "n-m-c-C"; }; }; d = "d-D"; }
+ */
+ mapAttrsRecursive = mapAttrsRecursiveCond (as: true);
+
+
+ /* Like `mapAttrsRecursive', but it takes an additional predicate
+ function that tells it whether to recursive into an attribute
+ set. If it returns false, `mapAttrsRecursiveCond' does not
+ recurse, but does apply the map function. It is returns true, it
+ does recurse, and does not apply the map function.
+
+ Type:
+ mapAttrsRecursiveCond ::
+ (AttrSet -> Bool) -> ([String] -> a -> b) -> AttrSet -> AttrSet
+
+ Example:
+ # To prevent recursing into derivations (which are attribute
+ # sets with the attribute "type" equal to "derivation"):
+ mapAttrsRecursiveCond
+ (as: !(as ? "type" && as.type == "derivation"))
+ (x: ... do something ...)
+ attrs
+ */
+ mapAttrsRecursiveCond = cond: f: set:
+ let
+ recurse = path: set:
+ let
+ g =
+ name: value:
+ if isAttrs value && cond value
+ then recurse (path ++ [name]) value
+ else f (path ++ [name]) value;
+ in mapAttrs g set;
+ in recurse [] set;
+
+
+ /* Generate an attribute set by mapping a function over a list of
+ attribute names.
+
+ Example:
+ genAttrs [ "foo" "bar" ] (name: "x_" + name)
+ => { foo = "x_foo"; bar = "x_bar"; }
+ */
+ genAttrs = names: f:
+ listToAttrs (map (n: nameValuePair n (f n)) names);
+
+
+ /* Check whether the argument is a derivation. Any set with
+ { type = "derivation"; } counts as a derivation.
+
+ Example:
+ nixpkgs = import <nixpkgs> {}
+ isDerivation nixpkgs.ruby
+ => true
+ isDerivation "foobar"
+ => false
+ */
+ isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
+
+ /* Converts a store path to a fake derivation. */
+ toDerivation = path:
+ let
+ path' = builtins.storePath path;
+ res =
+ { type = "derivation";
+ name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path'));
+ outPath = path';
+ outputs = [ "out" ];
+ out = res;
+ outputName = "out";
+ };
+ in res;
+
+
+ /* If `cond' is true, return the attribute set `as',
+ otherwise an empty attribute set.
+
+ Example:
+ optionalAttrs (true) { my = "set"; }
+ => { my = "set"; }
+ optionalAttrs (false) { my = "set"; }
+ => { }
+ */
+ optionalAttrs = cond: as: if cond then as else {};
+
+
+ /* Merge sets of attributes and use the function f to merge attributes
+ values.
+
+ Example:
+ zipAttrsWithNames ["a"] (name: vs: vs) [{a = "x";} {a = "y"; b = "z";}]
+ => { a = ["x" "y"]; }
+ */
+ zipAttrsWithNames = names: f: sets:
+ listToAttrs (map (name: {
+ inherit name;
+ value = f name (catAttrs name sets);
+ }) names);
+
+ /* Implementation note: Common names appear multiple times in the list of
+ names, hopefully this does not affect the system because the maximal
+ laziness avoid computing twice the same expression and listToAttrs does
+ not care about duplicated attribute names.
+
+ Example:
+ zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
+ => { a = ["x" "y"]; b = ["z"] }
+ */
+ zipAttrsWith = f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets;
+ /* Like `zipAttrsWith' with `(name: values: value)' as the function.
+
+ Example:
+ zipAttrs [{a = "x";} {a = "y"; b = "z";}]
+ => { a = ["x" "y"]; b = ["z"] }
+ */
+ zipAttrs = zipAttrsWith (name: values: values);
+
+ /* Does the same as the update operator '//' except that attributes are
+ merged until the given predicate is verified. The predicate should
+ accept 3 arguments which are the path to reach the attribute, a part of
+ the first attribute set and a part of the second attribute set. When
+ the predicate is verified, the value of the first attribute set is
+ replaced by the value of the second attribute set.
+
+ Example:
+ recursiveUpdateUntil (path: l: r: path == ["foo"]) {
+ # first attribute set
+ foo.bar = 1;
+ foo.baz = 2;
+ bar = 3;
+ } {
+ #second attribute set
+ foo.bar = 1;
+ foo.quz = 2;
+ baz = 4;
+ }
+
+ returns: {
+ foo.bar = 1; # 'foo.*' from the second set
+ foo.quz = 2; #
+ bar = 3; # 'bar' from the first set
+ baz = 4; # 'baz' from the second set
+ }
+
+ */
+ recursiveUpdateUntil = pred: lhs: rhs:
+ let f = attrPath:
+ zipAttrsWith (n: values:
+ let here = attrPath ++ [n]; in
+ if tail values == []
+ || pred here (head (tail values)) (head values) then
+ head values
+ else
+ f here values
+ );
+ in f [] [rhs lhs];
+
+ /* A recursive variant of the update operator ‘//’. The recursion
+ stops when one of the attribute values is not an attribute set,
+ in which case the right hand side value takes precedence over the
+ left hand side value.
+
+ Example:
+ recursiveUpdate {
+ boot.loader.grub.enable = true;
+ boot.loader.grub.device = "/dev/hda";
+ } {
+ boot.loader.grub.device = "";
+ }
+
+ returns: {
+ boot.loader.grub.enable = true;
+ boot.loader.grub.device = "";
+ }
+
+ */
+ recursiveUpdate = lhs: rhs:
+ recursiveUpdateUntil (path: lhs: rhs:
+ !(isAttrs lhs && isAttrs rhs)
+ ) lhs rhs;
+
+ /* Returns true if the pattern is contained in the set. False otherwise.
+
+ Example:
+ matchAttrs { cpu = {}; } { cpu = { bits = 64; }; }
+ => true
+ */
+ matchAttrs = pattern: attrs: assert isAttrs pattern;
+ fold and true (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
+ let pat = head values; val = head (tail values); in
+ if length values == 1 then false
+ else if isAttrs pat then isAttrs val && matchAttrs pat val
+ else pat == val
+ ) [pattern attrs]));
+
+ /* Override only the attributes that are already present in the old set
+ useful for deep-overriding.
+
+ Example:
+ overrideExisting {} { a = 1; }
+ => {}
+ overrideExisting { b = 2; } { a = 1; }
+ => { b = 2; }
+ overrideExisting { a = 3; b = 2; } { a = 1; }
+ => { a = 1; b = 2; }
+ */
+ overrideExisting = old: new:
+ mapAttrs (name: value: new.${name} or value) old;
+
+ /* Get a package output.
+ If no output is found, fallback to `.out` and then to the default.
+
+ Example:
+ getOutput "dev" pkgs.openssl
+ => "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
+ */
+ getOutput = output: pkg:
+ if pkg.outputUnspecified or false
+ then pkg.${output} or pkg.out or pkg
+ else pkg;
+
+ getBin = getOutput "bin";
+ getLib = getOutput "lib";
+ getDev = getOutput "dev";
+
+ /* Pick the outputs of packages to place in buildInputs */
+ chooseDevOutputs = drvs: builtins.map getDev drvs;
+
+ /*** deprecated stuff ***/
+
+ zipWithNames = zipAttrsWithNames;
+ zip = builtins.trace
+ "lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
+
+}
diff --git a/ofborg/tickborg/test-srcs/maintainers/lib/default.nix b/ofborg/tickborg/test-srcs/maintainers/lib/default.nix
new file mode 100644
index 0000000000..916f6e0519
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/lib/default.nix
@@ -0,0 +1,137 @@
+/* Library of low-level helper functions for nix expressions.
+ *
+ * Please implement (mostly) exhaustive unit tests
+ * for new functions in `./tests.nix'.
+ */
+let
+
+ inherit (import ./fixed-points.nix {}) makeExtensible;
+
+ lib = makeExtensible (self: let
+ callLibs = file: import file { lib = self; };
+ in with self; {
+
+ # often used, or depending on very little
+ trivial = callLibs ./trivial.nix;
+ fixedPoints = callLibs ./fixed-points.nix;
+
+ # datatypes
+ attrsets = callLibs ./attrsets.nix;
+ lists = callLibs ./lists.nix;
+ strings = callLibs ./strings.nix;
+ stringsWithDeps = callLibs ./strings-with-deps.nix;
+
+ # packaging
+ customisation = callLibs ./customisation.nix;
+ maintainers = import ../maintainers/maintainer-list.nix;
+ meta = callLibs ./meta.nix;
+ sources = callLibs ./sources.nix;
+ versions = callLibs ./versions.nix;
+
+ # module system
+ modules = callLibs ./modules.nix;
+ options = callLibs ./options.nix;
+ types = callLibs ./types.nix;
+
+ # constants
+ licenses = callLibs ./licenses.nix;
+ systems = callLibs ./systems;
+
+ # misc
+ asserts = callLibs ./asserts.nix;
+ debug = callLibs ./debug.nix;
+ generators = callLibs ./generators.nix;
+ misc = callLibs ./deprecated.nix;
+
+ # domain-specific
+ fetchers = callLibs ./fetchers.nix;
+
+ # Eval-time filesystem handling
+ filesystem = callLibs ./filesystem.nix;
+
+ # back-compat aliases
+ platforms = systems.forMeta;
+
+ inherit (builtins) add addErrorContext attrNames concatLists
+ deepSeq elem elemAt filter genericClosure genList getAttr
+ hasAttr head isAttrs isBool isInt isList isString length
+ lessThan listToAttrs pathExists readFile replaceStrings seq
+ stringLength sub substring tail;
+ inherit (trivial) id const concat or and bitAnd bitOr bitXor bitNot
+ boolToString mergeAttrs flip mapNullable inNixShell min max
+ importJSON warn info nixpkgsVersion version mod compare
+ splitByAndCompare functionArgs setFunctionArgs isFunction;
+ inherit (fixedPoints) fix fix' converge extends composeExtensions
+ makeExtensible makeExtensibleWithCustomName;
+ inherit (attrsets) attrByPath hasAttrByPath setAttrByPath
+ getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
+ filterAttrsRecursive foldAttrs collect nameValuePair mapAttrs
+ mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond
+ genAttrs isDerivation toDerivation optionalAttrs
+ zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
+ recursiveUpdate matchAttrs overrideExisting getOutput getBin
+ getLib getDev chooseDevOutputs zipWithNames zip;
+ inherit (lists) singleton foldr fold foldl foldl' imap0 imap1
+ concatMap flatten remove findSingle findFirst any all count
+ optional optionals toList range partition zipListsWith zipLists
+ reverseList listDfs toposort sort naturalSort compareLists take
+ drop sublist last init crossLists unique intersectLists
+ subtractLists mutuallyExclusive groupBy groupBy';
+ inherit (strings) concatStrings concatMapStrings concatImapStrings
+ intersperse concatStringsSep concatMapStringsSep
+ concatImapStringsSep makeSearchPath makeSearchPathOutput
+ makeLibraryPath makeBinPath makePerlPath makeFullPerlPath optionalString
+ hasPrefix hasSuffix stringToCharacters stringAsChars escape
+ escapeShellArg escapeShellArgs replaceChars lowerChars
+ upperChars toLower toUpper addContextFrom splitString
+ removePrefix removeSuffix versionOlder versionAtLeast getVersion
+ nameFromURL enableFeature enableFeatureAs withFeature
+ withFeatureAs fixedWidthString fixedWidthNumber isStorePath
+ toInt readPathsFromFile fileContents;
+ inherit (stringsWithDeps) textClosureList textClosureMap
+ noDepEntry fullDepEntry packEntry stringAfter;
+ inherit (customisation) overrideDerivation makeOverridable
+ callPackageWith callPackagesWith extendDerivation hydraJob
+ makeScope;
+ inherit (meta) addMetaAttrs dontDistribute setName updateName
+ appendToName mapDerivationAttrset lowPrio lowPrioSet hiPrio
+ hiPrioSet;
+ inherit (sources) pathType pathIsDirectory cleanSourceFilter
+ cleanSource sourceByRegex sourceFilesBySuffices
+ commitIdFromGitRepo cleanSourceWith pathHasContext
+ canCleanSource;
+ inherit (modules) evalModules closeModules unifyModuleSyntax
+ applyIfFunction unpackSubmodule packSubmodule mergeModules
+ mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
+ pushDownProperties dischargeProperties filterOverrides
+ sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
+ mkOptionDefault mkDefault mkForce mkVMOverride mkStrict
+ mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
+ mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
+ mkRenamedOptionModule mkMergedOptionModule mkChangedOptionModule
+ mkAliasOptionModule doRename filterModules;
+ inherit (options) isOption mkEnableOption mkSinkUndeclaredOptions
+ mergeDefaultOption mergeOneOption mergeEqualOption getValues
+ getFiles optionAttrSetToDocList optionAttrSetToDocList'
+ scrubOptionValue literalExample showOption showFiles
+ unknownModule mkOption;
+ inherit (types) isType setType defaultTypeMerge defaultFunctor
+ isOptionType mkOptionType;
+ inherit (asserts)
+ assertMsg assertOneOf;
+ inherit (debug) addErrorContextToAttrs traceIf traceVal traceValFn
+ traceXMLVal traceXMLValMarked traceSeq traceSeqN traceValSeq
+ traceValSeqFn traceValSeqN traceValSeqNFn traceShowVal
+ traceShowValMarked showVal traceCall traceCall2 traceCall3
+ traceValIfNot runTests testAllTrue traceCallXml attrNamesToStr;
+ inherit (misc) maybeEnv defaultMergeArg defaultMerge foldArgs
+ maybeAttrNullable maybeAttr ifEnable checkFlag getValue
+ checkReqs uniqList uniqListExt condConcat lazyGenericClosure
+ innerModifySumArgs modifySumArgs innerClosePropagation
+ closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
+ mergeAttrsWithFunc mergeAttrsConcatenateValues
+ mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
+ mergeAttrsByFuncDefaultsClean mergeAttrBy
+ nixType imap;
+ });
+in lib
diff --git a/ofborg/tickborg/test-srcs/maintainers/lib/fixed-points.nix b/ofborg/tickborg/test-srcs/maintainers/lib/fixed-points.nix
new file mode 100644
index 0000000000..2f818c88de
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/lib/fixed-points.nix
@@ -0,0 +1,101 @@
+{ ... }:
+rec {
+ # Compute the fixed point of the given function `f`, which is usually an
+ # attribute set that expects its final, non-recursive representation as an
+ # argument:
+ #
+ # f = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
+ #
+ # Nix evaluates this recursion until all references to `self` have been
+ # resolved. At that point, the final result is returned and `f x = x` holds:
+ #
+ # nix-repl> fix f
+ # { bar = "bar"; foo = "foo"; foobar = "foobar"; }
+ #
+ # Type: fix :: (a -> a) -> a
+ #
+ # See https://en.wikipedia.org/wiki/Fixed-point_combinator for further
+ # details.
+ fix = f: let x = f x; in x;
+
+ # A variant of `fix` that records the original recursive attribute set in the
+ # result. This is useful in combination with the `extends` function to
+ # implement deep overriding. See pkgs/development/haskell-modules/default.nix
+ # for a concrete example.
+ fix' = f: let x = f x // { __unfix__ = f; }; in x;
+
+ # Return the fixpoint that `f` converges to when called recursively, starting
+ # with the input `x`.
+ #
+ # nix-repl> converge (x: x / 2) 16
+ # 0
+ converge = f: x:
+ if (f x) == x
+ then x
+ else converge f (f x);
+
+ # Modify the contents of an explicitly recursive attribute set in a way that
+ # honors `self`-references. This is accomplished with a function
+ #
+ # g = self: super: { foo = super.foo + " + "; }
+ #
+ # that has access to the unmodified input (`super`) as well as the final
+ # non-recursive representation of the attribute set (`self`). `extends`
+ # differs from the native `//` operator insofar as that it's applied *before*
+ # references to `self` are resolved:
+ #
+ # nix-repl> fix (extends g f)
+ # { bar = "bar"; foo = "foo + "; foobar = "foo + bar"; }
+ #
+ # The name of the function is inspired by object-oriented inheritance, i.e.
+ # think of it as an infix operator `g extends f` that mimics the syntax from
+ # Java. It may seem counter-intuitive to have the "base class" as the second
+ # argument, but it's nice this way if several uses of `extends` are cascaded.
+ #
+ # To get a better understanding how `extends` turns a function with a fix
+ # point (the package set we start with) into a new function with a different fix
+ # point (the desired packages set) lets just see, how `extends g f`
+ # unfolds with `g` and `f` defined above:
+ #
+ # extends g f = self: let super = f self; in super // g self super;
+ # = self: let super = { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }; in super // g self super
+ # = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; } // g self { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
+ # = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; } // { foo = "foo" + " + "; }
+ # = self: { foo = "foo + "; bar = "bar"; foobar = self.foo + self.bar; }
+ #
+ extends = f: rattrs: self: let super = rattrs self; in super // f self super;
+
+ # Compose two extending functions of the type expected by 'extends'
+ # into one where changes made in the first are available in the
+ # 'super' of the second
+ composeExtensions =
+ f: g: self: super:
+ let fApplied = f self super;
+ super' = super // fApplied;
+ in fApplied // g self super';
+
+ # Create an overridable, recursive attribute set. For example:
+ #
+ # nix-repl> obj = makeExtensible (self: { })
+ #
+ # nix-repl> obj
+ # { __unfix__ = «lambda»; extend = «lambda»; }
+ #
+ # nix-repl> obj = obj.extend (self: super: { foo = "foo"; })
+ #
+ # nix-repl> obj
+ # { __unfix__ = «lambda»; extend = «lambda»; foo = "foo"; }
+ #
+ # nix-repl> obj = obj.extend (self: super: { foo = super.foo + " + "; bar = "bar"; foobar = self.foo + self.bar; })
+ #
+ # nix-repl> obj
+ # { __unfix__ = «lambda»; bar = "bar"; extend = «lambda»; foo = "foo + "; foobar = "foo + bar"; }
+ makeExtensible = makeExtensibleWithCustomName "extend";
+
+ # Same as `makeExtensible` but the name of the extending attribute is
+ # customized.
+ makeExtensibleWithCustomName = extenderName: rattrs:
+ fix' rattrs // {
+ ${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
+ };
+}
diff --git a/ofborg/tickborg/test-srcs/maintainers/lib/lists.nix b/ofborg/tickborg/test-srcs/maintainers/lib/lists.nix
new file mode 100644
index 0000000000..be541427c2
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/lib/lists.nix
@@ -0,0 +1,663 @@
+# General list operations.
+
+{ lib }:
+with lib.trivial;
+let
+ inherit (lib.strings) toInt;
+in
+rec {
+
+ inherit (builtins) head tail length isList elemAt concatLists filter elem genList;
+
+ /* Create a list consisting of a single element. `singleton x` is
+ sometimes more convenient with respect to indentation than `[x]`
+ when x spans multiple lines.
+
+ Type: singleton :: a -> [a]
+
+ Example:
+ singleton "foo"
+ => [ "foo" ]
+ */
+ singleton = x: [x];
+
+ /* “right fold” a binary function `op` between successive elements of
+ `list` with `nul' as the starting value, i.e.,
+ `foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
+
+ Type: foldr :: (a -> b -> b) -> b -> [a] -> b
+
+ Example:
+ concat = foldr (a: b: a + b) "z"
+ concat [ "a" "b" "c" ]
+ => "abcz"
+ # different types
+ strange = foldr (int: str: toString (int + 1) + str) "a"
+ strange [ 1 2 3 4 ]
+ => "2345a"
+ */
+ foldr = op: nul: list:
+ let
+ len = length list;
+ fold' = n:
+ if n == len
+ then nul
+ else op (elemAt list n) (fold' (n + 1));
+ in fold' 0;
+
+ /* `fold` is an alias of `foldr` for historic reasons */
+ # FIXME(Profpatsch): deprecate?
+ fold = foldr;
+
+
+ /* “left fold”, like `foldr`, but from the left:
+ `foldl op nul [x_1 x_2 ... x_n] == op (... (op (op nul x_1) x_2) ... x_n)`.
+
+ Type: foldl :: (b -> a -> b) -> b -> [a] -> b
+
+ Example:
+ lconcat = foldl (a: b: a + b) "z"
+ lconcat [ "a" "b" "c" ]
+ => "zabc"
+ # different types
+ lstrange = foldl (str: int: str + toString (int + 1)) ""
+ strange [ 1 2 3 4 ]
+ => "a2345"
+ */
+ foldl = op: nul: list:
+ let
+ foldl' = n:
+ if n == -1
+ then nul
+ else op (foldl' (n - 1)) (elemAt list n);
+ in foldl' (length list - 1);
+
+ /* Strict version of `foldl`.
+
+ The difference is that evaluation is forced upon access. Usually used
+ with small whole results (in contract with lazily-generated list or large
+ lists where only a part is consumed.)
+
+ Type: foldl' :: (b -> a -> b) -> b -> [a] -> b
+ */
+ foldl' = builtins.foldl' or foldl;
+
+ /* Map with index starting from 0
+
+ Type: imap0 :: (int -> a -> b) -> [a] -> [b]
+
+ Example:
+ imap0 (i: v: "${v}-${toString i}") ["a" "b"]
+ => [ "a-0" "b-1" ]
+ */
+ imap0 = f: list: genList (n: f n (elemAt list n)) (length list);
+
+ /* Map with index starting from 1
+
+ Type: imap1 :: (int -> a -> b) -> [a] -> [b]
+
+ Example:
+ imap1 (i: v: "${v}-${toString i}") ["a" "b"]
+ => [ "a-1" "b-2" ]
+ */
+ imap1 = f: list: genList (n: f (n + 1) (elemAt list n)) (length list);
+
+ /* Map and concatenate the result.
+
+ Type: concatMap :: (a -> [b]) -> [a] -> [b]
+
+ Example:
+ concatMap (x: [x] ++ ["z"]) ["a" "b"]
+ => [ "a" "z" "b" "z" ]
+ */
+ concatMap = builtins.concatMap or (f: list: concatLists (map f list));
+
+ /* Flatten the argument into a single list; that is, nested lists are
+ spliced into the top-level lists.
+
+ Example:
+ flatten [1 [2 [3] 4] 5]
+ => [1 2 3 4 5]
+ flatten 1
+ => [1]
+ */
+ flatten = x:
+ if isList x
+ then concatMap (y: flatten y) x
+ else [x];
+
+ /* Remove elements equal to 'e' from a list. Useful for buildInputs.
+
+ Type: remove :: a -> [a] -> [a]
+
+ Example:
+ remove 3 [ 1 3 4 3 ]
+ => [ 1 4 ]
+ */
+ remove =
+ # Element to remove from the list
+ e: filter (x: x != e);
+
+ /* Find the sole element in the list matching the specified
+ predicate, returns `default` if no such element exists, or
+ `multiple` if there are multiple matching elements.
+
+ Type: findSingle :: (a -> bool) -> a -> a -> [a] -> a
+
+ Example:
+ findSingle (x: x == 3) "none" "multiple" [ 1 3 3 ]
+ => "multiple"
+ findSingle (x: x == 3) "none" "multiple" [ 1 3 ]
+ => 3
+ findSingle (x: x == 3) "none" "multiple" [ 1 9 ]
+ => "none"
+ */
+ findSingle =
+ # Predicate
+ pred:
+ # Default value to return if element was not found.
+ default:
+ # Default value to return if more than one element was found
+ multiple:
+ # Input list
+ list:
+ let found = filter pred list; len = length found;
+ in if len == 0 then default
+ else if len != 1 then multiple
+ else head found;
+
+ /* Find the first element in the list matching the specified
+ predicate or return `default` if no such element exists.
+
+ Type: findFirst :: (a -> bool) -> a -> [a] -> a
+
+ Example:
+ findFirst (x: x > 3) 7 [ 1 6 4 ]
+ => 6
+ findFirst (x: x > 9) 7 [ 1 6 4 ]
+ => 7
+ */
+ findFirst =
+ # Predicate
+ pred:
+ # Default value to return
+ default:
+ # Input list
+ list:
+ let found = filter pred list;
+ in if found == [] then default else head found;
+
+ /* Return true if function `pred` returns true for at least one
+ element of `list`.
+
+ Type: any :: (a -> bool) -> [a] -> bool
+
+ Example:
+ any isString [ 1 "a" { } ]
+ => true
+ any isString [ 1 { } ]
+ => false
+ */
+ any = builtins.any or (pred: foldr (x: y: if pred x then true else y) false);
+
+ /* Return true if function `pred` returns true for all elements of
+ `list`.
+
+ Type: all :: (a -> bool) -> [a] -> bool
+
+ Example:
+ all (x: x < 3) [ 1 2 ]
+ => true
+ all (x: x < 3) [ 1 2 3 ]
+ => false
+ */
+ all = builtins.all or (pred: foldr (x: y: if pred x then y else false) true);
+
+ /* Count how many elements of `list` match the supplied predicate
+ function.
+
+ Type: count :: (a -> bool) -> [a] -> int
+
+ Example:
+ count (x: x == 3) [ 3 2 3 4 6 ]
+ => 2
+ */
+ count =
+ # Predicate
+ pred: foldl' (c: x: if pred x then c + 1 else c) 0;
+
+ /* Return a singleton list or an empty list, depending on a boolean
+ value. Useful when building lists with optional elements
+ (e.g. `++ optional (system == "i686-linux") flashplayer').
+
+ Type: optional :: bool -> a -> [a]
+
+ Example:
+ optional true "foo"
+ => [ "foo" ]
+ optional false "foo"
+ => [ ]
+ */
+ optional = cond: elem: if cond then [elem] else [];
+
+ /* Return a list or an empty list, depending on a boolean value.
+
+ Type: optionals :: bool -> [a] -> [a]
+
+ Example:
+ optionals true [ 2 3 ]
+ => [ 2 3 ]
+ optionals false [ 2 3 ]
+ => [ ]
+ */
+ optionals =
+ # Condition
+ cond:
+ # List to return if condition is true
+ elems: if cond then elems else [];
+
+
+ /* If argument is a list, return it; else, wrap it in a singleton
+ list. If you're using this, you should almost certainly
+ reconsider if there isn't a more "well-typed" approach.
+
+ Example:
+ toList [ 1 2 ]
+ => [ 1 2 ]
+ toList "hi"
+ => [ "hi "]
+ */
+ toList = x: if isList x then x else [x];
+
+ /* Return a list of integers from `first' up to and including `last'.
+
+ Type: range :: int -> int -> [int]
+
+ Example:
+ range 2 4
+ => [ 2 3 4 ]
+ range 3 2
+ => [ ]
+ */
+ range =
+ # First integer in the range
+ first:
+ # Last integer in the range
+ last:
+ if first > last then
+ []
+ else
+ genList (n: first + n) (last - first + 1);
+
+ /* Splits the elements of a list in two lists, `right` and
+ `wrong`, depending on the evaluation of a predicate.
+
+ Type: (a -> bool) -> [a] -> { right :: [a], wrong :: [a] }
+
+ Example:
+ partition (x: x > 2) [ 5 1 2 3 4 ]
+ => { right = [ 5 3 4 ]; wrong = [ 1 2 ]; }
+ */
+ partition = builtins.partition or (pred:
+ foldr (h: t:
+ if pred h
+ then { right = [h] ++ t.right; wrong = t.wrong; }
+ else { right = t.right; wrong = [h] ++ t.wrong; }
+ ) { right = []; wrong = []; });
+
+ /* Splits the elements of a list into many lists, using the return value of a predicate.
+ Predicate should return a string which becomes keys of attrset `groupBy' returns.
+
+ `groupBy'` allows to customise the combining function and initial value
+
+ Example:
+ groupBy (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
+ => { true = [ 5 3 4 ]; false = [ 1 2 ]; }
+ groupBy (x: x.name) [ {name = "icewm"; script = "icewm &";}
+ {name = "xfce"; script = "xfce4-session &";}
+ {name = "icewm"; script = "icewmbg &";}
+ {name = "mate"; script = "gnome-session &";}
+ ]
+ => { icewm = [ { name = "icewm"; script = "icewm &"; }
+ { name = "icewm"; script = "icewmbg &"; } ];
+ mate = [ { name = "mate"; script = "gnome-session &"; } ];
+ xfce = [ { name = "xfce"; script = "xfce4-session &"; } ];
+ }
+
+ groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
+ => { true = 12; false = 3; }
+ */
+ groupBy' = op: nul: pred: lst:
+ foldl' (r: e:
+ let
+ key = pred e;
+ in
+ r // { ${key} = op (r.${key} or nul) e; }
+ ) {} lst;
+
+ groupBy = groupBy' (sum: e: sum ++ [e]) [];
+
+ /* Merges two lists of the same size together. If the sizes aren't the same
+ the merging stops at the shortest. How both lists are merged is defined
+ by the first argument.
+
+ Type: zipListsWith :: (a -> b -> c) -> [a] -> [b] -> [c]
+
+ Example:
+ zipListsWith (a: b: a + b) ["h" "l"] ["e" "o"]
+ => ["he" "lo"]
+ */
+ zipListsWith =
+ # Function to zip elements of both lists
+ f:
+ # First list
+ fst:
+ # Second list
+ snd:
+ genList
+ (n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
+
+ /* Merges two lists of the same size together. If the sizes aren't the same
+ the merging stops at the shortest.
+
+ Type: zipLists :: [a] -> [b] -> [{ fst :: a, snd :: b}]
+
+ Example:
+ zipLists [ 1 2 ] [ "a" "b" ]
+ => [ { fst = 1; snd = "a"; } { fst = 2; snd = "b"; } ]
+ */
+ zipLists = zipListsWith (fst: snd: { inherit fst snd; });
+
+ /* Reverse the order of the elements of a list.
+
+ Type: reverseList :: [a] -> [a]
+
+ Example:
+
+ reverseList [ "b" "o" "j" ]
+ => [ "j" "o" "b" ]
+ */
+ reverseList = xs:
+ let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
+
+ /* Depth-First Search (DFS) for lists `list != []`.
+
+ `before a b == true` means that `b` depends on `a` (there's an
+ edge from `b` to `a`).
+
+ Example:
+ listDfs true hasPrefix [ "/home/user" "other" "/" "/home" ]
+ == { minimal = "/"; # minimal element
+ visited = [ "/home/user" ]; # seen elements (in reverse order)
+ rest = [ "/home" "other" ]; # everything else
+ }
+
+ listDfs true hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
+ == { cycle = "/"; # cycle encountered at this element
+ loops = [ "/" ]; # and continues to these elements
+ visited = [ "/" "/home/user" ]; # elements leading to the cycle (in reverse order)
+ rest = [ "/home" "other" ]; # everything else
+
+ */
+ listDfs = stopOnCycles: before: list:
+ let
+ dfs' = us: visited: rest:
+ let
+ c = filter (x: before x us) visited;
+ b = partition (x: before x us) rest;
+ in if stopOnCycles && (length c > 0)
+ then { cycle = us; loops = c; inherit visited rest; }
+ else if length b.right == 0
+ then # nothing is before us
+ { minimal = us; inherit visited rest; }
+ else # grab the first one before us and continue
+ dfs' (head b.right)
+ ([ us ] ++ visited)
+ (tail b.right ++ b.wrong);
+ in dfs' (head list) [] (tail list);
+
+ /* Sort a list based on a partial ordering using DFS. This
+ implementation is O(N^2), if your ordering is linear, use `sort`
+ instead.
+
+ `before a b == true` means that `b` should be after `a`
+ in the result.
+
+ Example:
+
+ toposort hasPrefix [ "/home/user" "other" "/" "/home" ]
+ == { result = [ "/" "/home" "/home/user" "other" ]; }
+
+ toposort hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
+ == { cycle = [ "/home/user" "/" "/" ]; # path leading to a cycle
+ loops = [ "/" ]; } # loops back to these elements
+
+ toposort hasPrefix [ "other" "/home/user" "/home" "/" ]
+ == { result = [ "other" "/" "/home" "/home/user" ]; }
+
+ toposort (a: b: a < b) [ 3 2 1 ] == { result = [ 1 2 3 ]; }
+
+ */
+ toposort = before: list:
+ let
+ dfsthis = listDfs true before list;
+ toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
+ in
+ if length list < 2
+ then # finish
+ { result = list; }
+ else if dfsthis ? "cycle"
+ then # there's a cycle, starting from the current vertex, return it
+ { cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
+ inherit (dfsthis) loops; }
+ else if toporest ? "cycle"
+ then # there's a cycle somewhere else in the graph, return it
+ toporest
+ # Slow, but short. Can be made a bit faster with an explicit stack.
+ else # there are no cycles
+ { result = [ dfsthis.minimal ] ++ toporest.result; };
+
+ /* Sort a list based on a comparator function which compares two
+ elements and returns true if the first argument is strictly below
+ the second argument. The returned list is sorted in an increasing
+ order. The implementation does a quick-sort.
+
+ Example:
+ sort (a: b: a < b) [ 5 3 7 ]
+ => [ 3 5 7 ]
+ */
+ sort = builtins.sort or (
+ strictLess: list:
+ let
+ len = length list;
+ first = head list;
+ pivot' = n: acc@{ left, right }: let el = elemAt list n; next = pivot' (n + 1); in
+ if n == len
+ then acc
+ else if strictLess first el
+ then next { inherit left; right = [ el ] ++ right; }
+ else
+ next { left = [ el ] ++ left; inherit right; };
+ pivot = pivot' 1 { left = []; right = []; };
+ in
+ if len < 2 then list
+ else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right));
+
+ /* Compare two lists element-by-element.
+
+ Example:
+ compareLists compare [] []
+ => 0
+ compareLists compare [] [ "a" ]
+ => -1
+ compareLists compare [ "a" ] []
+ => 1
+ compareLists compare [ "a" "b" ] [ "a" "c" ]
+ => 1
+ */
+ compareLists = cmp: a: b:
+ if a == []
+ then if b == []
+ then 0
+ else -1
+ else if b == []
+ then 1
+ else let rel = cmp (head a) (head b); in
+ if rel == 0
+ then compareLists cmp (tail a) (tail b)
+ else rel;
+
+ /* Sort list using "Natural sorting".
+ Numeric portions of strings are sorted in numeric order.
+
+ Example:
+ naturalSort ["disk11" "disk8" "disk100" "disk9"]
+ => ["disk8" "disk9" "disk11" "disk100"]
+ naturalSort ["10.46.133.149" "10.5.16.62" "10.54.16.25"]
+ => ["10.5.16.62" "10.46.133.149" "10.54.16.25"]
+ naturalSort ["v0.2" "v0.15" "v0.0.9"]
+ => [ "v0.0.9" "v0.2" "v0.15" ]
+ */
+ naturalSort = lst:
+ let
+ vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
+ prepared = map (x: [ (vectorise x) x ]) lst; # remember vectorised version for O(n) regex splits
+ less = a: b: (compareLists compare (head a) (head b)) < 0;
+ in
+ map (x: elemAt x 1) (sort less prepared);
+
+ /* Return the first (at most) N elements of a list.
+
+ Type: take :: int -> [a] -> [a]
+
+ Example:
+ take 2 [ "a" "b" "c" "d" ]
+ => [ "a" "b" ]
+ take 2 [ ]
+ => [ ]
+ */
+ take =
+ # Number of elements to take
+ count: sublist 0 count;
+
+ /* Remove the first (at most) N elements of a list.
+
+ Type: drop :: int -> [a] -> [a]
+
+ Example:
+ drop 2 [ "a" "b" "c" "d" ]
+ => [ "c" "d" ]
+ drop 2 [ ]
+ => [ ]
+ */
+ drop =
+ # Number of elements to drop
+ count:
+ # Input list
+ list: sublist count (length list) list;
+
+ /* Return a list consisting of at most `count` elements of `list`,
+ starting at index `start`.
+
+ Type: sublist :: int -> int -> [a] -> [a]
+
+ Example:
+ sublist 1 3 [ "a" "b" "c" "d" "e" ]
+ => [ "b" "c" "d" ]
+ sublist 1 3 [ ]
+ => [ ]
+ */
+ sublist =
+ # Index at which to start the sublist
+ start:
+ # Number of elements to take
+ count:
+ # Input list
+ list:
+ let len = length list; in
+ genList
+ (n: elemAt list (n + start))
+ (if start >= len then 0
+ else if start + count > len then len - start
+ else count);
+
+ /* Return the last element of a list.
+
+ This function throws an error if the list is empty.
+
+ Type: last :: [a] -> a
+
+ Example:
+ last [ 1 2 3 ]
+ => 3
+ */
+ last = list:
+ assert lib.assertMsg (list != []) "lists.last: list must not be empty!";
+ elemAt list (length list - 1);
+
+ /* Return all elements but the last.
+
+ This function throws an error if the list is empty.
+
+ Type: init :: [a] -> [a]
+
+ Example:
+ init [ 1 2 3 ]
+ => [ 1 2 ]
+ */
+ init = list:
+ assert lib.assertMsg (list != []) "lists.init: list must not be empty!";
+ take (length list - 1) list;
+
+
+ /* Return the image of the cross product of some lists by a function.
+
+ Example:
+ crossLists (x:y: "${toString x}${toString y}") [[1 2] [3 4]]
+ => [ "13" "14" "23" "24" ]
+ */
+ crossLists = f: foldl (fs: args: concatMap (f: map f args) fs) [f];
+
+
+ /* Remove duplicate elements from the list. O(n^2) complexity.
+
+ Type: unique :: [a] -> [a]
+
+ Example:
+ unique [ 3 2 3 4 ]
+ => [ 3 2 4 ]
+ */
+ unique = list:
+ if list == [] then
+ []
+ else
+ let
+ x = head list;
+ xs = unique (drop 1 list);
+ in [x] ++ remove x xs;
+
+ /* Intersects list 'e' and another list. O(nm) complexity.
+
+ Example:
+ intersectLists [ 1 2 3 ] [ 6 3 2 ]
+ => [ 3 2 ]
+ */
+ intersectLists = e: filter (x: elem x e);
+
+ /* Subtracts list 'e' from another list. O(nm) complexity.
+
+ Example:
+ subtractLists [ 3 2 ] [ 1 2 3 4 5 3 ]
+ => [ 1 4 5 ]
+ */
+ subtractLists = e: filter (x: !(elem x e));
+
+ /* Test if two lists have no common element.
+ It should be slightly more efficient than (intersectLists a b == [])
+ */
+ mutuallyExclusive = a: b:
+ (builtins.length a) == 0 ||
+ (!(builtins.elem (builtins.head a) b) &&
+ mutuallyExclusive (builtins.tail a) b);
+
+}
diff --git a/ofborg/tickborg/test-srcs/maintainers/lib/strings.nix b/ofborg/tickborg/test-srcs/maintainers/lib/strings.nix
new file mode 100644
index 0000000000..48420a3678
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/maintainers/lib/strings.nix
@@ -0,0 +1,684 @@
+/* String manipulation functions. */
+{ lib }:
+let
+
+inherit (builtins) length;
+
+in
+
+rec {
+
+ inherit (builtins) stringLength substring head tail isString replaceStrings;
+
+ /* Concatenate a list of strings.
+
+ Type: concatStrings :: [string] -> string
+
+ Example:
+ concatStrings ["foo" "bar"]
+ => "foobar"
+ */
+ concatStrings = builtins.concatStringsSep "";
+
+ /* Map a function over a list and concatenate the resulting strings.
+
+ Type: concatMapStrings :: (a -> string) -> [a] -> string
+
+ Example:
+ concatMapStrings (x: "a" + x) ["foo" "bar"]
+ => "afooabar"
+ */
+ concatMapStrings = f: list: concatStrings (map f list);
+
+ /* Like `concatMapStrings` except that the f functions also gets the
+ position as a parameter.
+
+ Type: concatImapStrings :: (int -> a -> string) -> [a] -> string
+
+ Example:
+ concatImapStrings (pos: x: "${toString pos}-${x}") ["foo" "bar"]
+ => "1-foo2-bar"
+ */
+ concatImapStrings = f: list: concatStrings (lib.imap1 f list);
+
+ /* Place an element between each element of a list
+
+ Type: intersperse :: a -> [a] -> [a]
+
+ Example:
+ intersperse "/" ["usr" "local" "bin"]
+ => ["usr" "/" "local" "/" "bin"].
+ */
+ intersperse =
+ # Separator to add between elements
+ separator:
+ # Input list
+ list:
+ if list == [] || length list == 1
+ then list
+ else tail (lib.concatMap (x: [separator x]) list);
+
+ /* Concatenate a list of strings with a separator between each element
+
+ Type: concatStringsSep :: string -> [string] -> string
+
+ Example:
+ concatStringsSep "/" ["usr" "local" "bin"]
+ => "usr/local/bin"
+ */
+ concatStringsSep = builtins.concatStringsSep or (separator: list:
+ concatStrings (intersperse separator list));
+
+ /* Maps a function over a list of strings and then concatenates the
+ result with the specified separator interspersed between
+ elements.
+
+ Type: concatMapStringsSep :: string -> (string -> string) -> [string] -> string
+
+ Example:
+ concatMapStringsSep "-" (x: toUpper x) ["foo" "bar" "baz"]
+ => "FOO-BAR-BAZ"
+ */
+ concatMapStringsSep =
+ # Separator to add between elements
+ sep:
+ # Function to map over the list
+ f:
+ # List of input strings
+ list: concatStringsSep sep (map f list);
+
+ /* Same as `concatMapStringsSep`, but the mapping function
+ additionally receives the position of its argument.
+
+ Type: concatMapStringsSep :: string -> (int -> string -> string) -> [string] -> string
+
+ Example:
+ concatImapStringsSep "-" (pos: x: toString (x / pos)) [ 6 6 6 ]
+ => "6-3-2"
+ */
+ concatImapStringsSep =
+ # Separator to add between elements
+ sep:
+ # Function that receives elements and their positions
+ f:
+ # List of input strings
+ list: concatStringsSep sep (lib.imap1 f list);
+
+ /* Construct a Unix-style, colon-separated search path consisting of
+ the given `subDir` appended to each of the given paths.
+
+ Type: makeSearchPath :: string -> [string] -> string
+
+ Example:
+ makeSearchPath "bin" ["/root" "/usr" "/usr/local"]
+ => "/root/bin:/usr/bin:/usr/local/bin"
+ makeSearchPath "bin" [""]
+ => "/bin"
+ */
+ makeSearchPath =
+ # Directory name to append
+ subDir:
+ # List of base paths
+ paths:
+ concatStringsSep ":" (map (path: path + "/" + subDir) (builtins.filter (x: x != null) paths));
+
+ /* Construct a Unix-style search path by appending the given
+ `subDir` to the specified `output` of each of the packages. If no
+ output by the given name is found, fallback to `.out` and then to
+ the default.
+
+ Type: string -> string -> [package] -> string
+
+ Example:
+ makeSearchPathOutput "dev" "bin" [ pkgs.openssl pkgs.zlib ]
+ => "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev/bin:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/bin"
+ */
+ makeSearchPathOutput =
+ # Package output to use
+ output:
+ # Directory name to append
+ subDir:
+ # List of packages
+ pkgs: makeSearchPath subDir (map (lib.getOutput output) pkgs);
+
+ /* Construct a library search path (such as RPATH) containing the
+ libraries for a set of packages
+
+ Example:
+ makeLibraryPath [ "/usr" "/usr/local" ]
+ => "/usr/lib:/usr/local/lib"
+ pkgs = import <nixpkgs> { }
+ makeLibraryPath [ pkgs.openssl pkgs.zlib ]
+ => "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r/lib:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/lib"
+ */
+ makeLibraryPath = makeSearchPathOutput "lib" "lib";
+
+ /* Construct a binary search path (such as $PATH) containing the
+ binaries for a set of packages.
+
+ Example:
+ makeBinPath ["/root" "/usr" "/usr/local"]
+ => "/root/bin:/usr/bin:/usr/local/bin"
+ */
+ makeBinPath = makeSearchPathOutput "bin" "bin";
+
+
+ /* Construct a perl search path (such as $PERL5LIB)
+
+ Example:
+ pkgs = import <nixpkgs> { }
+ makePerlPath [ pkgs.perlPackages.libnet ]
+ => "/nix/store/n0m1fk9c960d8wlrs62sncnadygqqc6y-perl-Net-SMTP-1.25/lib/perl5/site_perl"
+ */
+ # FIXME(zimbatm): this should be moved in perl-specific code
+ makePerlPath = makeSearchPathOutput "lib" "lib/perl5/site_perl";
+
+ /* Construct a perl search path recursively including all dependencies (such as $PERL5LIB)
+
+ Example:
+ pkgs = import <nixpkgs> { }
+ makeFullPerlPath [ pkgs.perlPackages.CGI ]
+ => "/nix/store/fddivfrdc1xql02h9q500fpnqy12c74n-perl-CGI-4.38/lib/perl5/site_perl:/nix/store/8hsvdalmsxqkjg0c5ifigpf31vc4vsy2-perl-HTML-Parser-3.72/lib/perl5/site_perl:/nix/store/zhc7wh0xl8hz3y3f71nhlw1559iyvzld-perl-HTML-Tagset-3.20/lib/perl5/site_perl"
+ */
+ makeFullPerlPath = deps: makePerlPath (lib.misc.closePropagation deps);
+
+ /* Depending on the boolean `cond', return either the given string
+ or the empty string. Useful to concatenate against a bigger string.
+
+ Type: optionalString :: bool -> string -> string
+
+ Example:
+ optionalString true "some-string"
+ => "some-string"
+ optionalString false "some-string"
+ => ""
+ */
+ optionalString =
+ # Condition
+ cond:
+ # String to return if condition is true
+ string: if cond then string else "";
+
+ /* Determine whether a string has given prefix.
+
+ Type: hasPrefix :: string -> string -> bool
+
+ Example:
+ hasPrefix "foo" "foobar"
+ => true
+ hasPrefix "foo" "barfoo"
+ => false
+ */
+ hasPrefix =
+ # Prefix to check for
+ pref:
+ # Input string
+ str: substring 0 (stringLength pref) str == pref;
+
+ /* Determine whether a string has given suffix.
+
+ Type: hasSuffix :: string -> string -> bool
+
+ Example:
+ hasSuffix "foo" "foobar"
+ => false
+ hasSuffix "foo" "barfoo"
+ => true
+ */
+ hasSuffix =
+ # Suffix to check for
+ suffix:
+ # Input string
+ content:
+ let
+ lenContent = stringLength content;
+ lenSuffix = stringLength suffix;
+ in lenContent >= lenSuffix &&
+ substring (lenContent - lenSuffix) lenContent content == suffix;
+
+ /* Determine whether a string contains the given infix
+
+ Type: hasInfix :: string -> string -> bool
+
+ Example:
+ hasInfix "bc" "abcd"
+ => true
+ hasInfix "ab" "abcd"
+ => true
+ hasInfix "cd" "abcd"
+ => true
+ hasInfix "foo" "abcd"
+ => false
+ */
+ hasInfix = infix: content:
+ let
+ drop = x: substring 1 (stringLength x) x;
+ in hasPrefix infix content
+ || content != "" && hasInfix infix (drop content);
+
+ /* Convert a string to a list of characters (i.e. singleton strings).
+ This allows you to, e.g., map a function over each character. However,
+ note that this will likely be horribly inefficient; Nix is not a
+ general purpose programming language. Complex string manipulations
+ should, if appropriate, be done in a derivation.
+ Also note that Nix treats strings as a list of bytes and thus doesn't
+ handle unicode.
+
+ Type: stringtoCharacters :: string -> [string]
+
+ Example:
+ stringToCharacters ""
+ => [ ]
+ stringToCharacters "abc"
+ => [ "a" "b" "c" ]
+ stringToCharacters "💩"
+ => [ "�" "�" "�" "�" ]
+ */
+ stringToCharacters = s:
+ map (p: substring p 1 s) (lib.range 0 (stringLength s - 1));
+
+ /* Manipulate a string character by character and replace them by
+ strings before concatenating the results.
+
+ Type: stringAsChars :: (string -> string) -> string -> string
+
+ Example:
+ stringAsChars (x: if x == "a" then "i" else x) "nax"
+ => "nix"
+ */
+ stringAsChars =
+ # Function to map over each individual character
+ f:
+ # Input string
+ s: concatStrings (
+ map f (stringToCharacters s)
+ );
+
+ /* Escape occurrence of the elements of `list` in `string` by
+ prefixing it with a backslash.
+
+ Type: escape :: [string] -> string -> string
+
+ Example:
+ escape ["(" ")"] "(foo)"
+ => "\\(foo\\)"
+ */
+ escape = list: replaceChars list (map (c: "\\${c}") list);
+
+ /* Quote string to be used safely within the Bourne shell.
+
+ Type: escapeShellArg :: string -> string
+
+ Example:
+ escapeShellArg "esc'ape\nme"
+ => "'esc'\\''ape\nme'"
+ */
+ escapeShellArg = arg: "'${replaceStrings ["'"] ["'\\''"] (toString arg)}'";
+
+ /* Quote all arguments to be safely passed to the Bourne shell.
+
+ Type: escapeShellArgs :: [string] -> string
+
+ Example:
+ escapeShellArgs ["one" "two three" "four'five"]
+ => "'one' 'two three' 'four'\\''five'"
+ */
+ escapeShellArgs = concatMapStringsSep " " escapeShellArg;
+
+ /* Turn a string into a Nix expression representing that string
+
+ Type: string -> string
+
+ Example:
+ escapeNixString "hello\${}\n"
+ => "\"hello\\\${}\\n\""
+ */
+ escapeNixString = s: escape ["$"] (builtins.toJSON s);
+
+ # Obsolete - use replaceStrings instead.
+ replaceChars = builtins.replaceStrings or (
+ del: new: s:
+ let
+ substList = lib.zipLists del new;
+ subst = c:
+ let found = lib.findFirst (sub: sub.fst == c) null substList; in
+ if found == null then
+ c
+ else
+ found.snd;
+ in
+ stringAsChars subst s);
+
+ # Case conversion utilities.
+ lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
+ upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
+
+ /* Converts an ASCII string to lower-case.
+
+ Type: toLower :: string -> string
+
+ Example:
+ toLower "HOME"
+ => "home"
+ */
+ toLower = replaceChars upperChars lowerChars;
+
+ /* Converts an ASCII string to upper-case.
+
+ Type: toUpper :: string -> string
+
+ Example:
+ toUpper "home"
+ => "HOME"
+ */
+ toUpper = replaceChars lowerChars upperChars;
+
+ /* Appends string context from another string. This is an implementation
+ detail of Nix.
+
+ Strings in Nix carry an invisible `context` which is a list of strings
+ representing store paths. If the string is later used in a derivation
+ attribute, the derivation will properly populate the inputDrvs and
+ inputSrcs.
+
+ Example:
+ pkgs = import <nixpkgs> { };
+ addContextFrom pkgs.coreutils "bar"
+ => "bar"
+ */
+ addContextFrom = a: b: substring 0 0 a + b;
+
+ /* Cut a string with a separator and produces a list of strings which
+ were separated by this separator.
+
+ NOTE: this function is not performant and should never be used.
+
+ Example:
+ splitString "." "foo.bar.baz"
+ => [ "foo" "bar" "baz" ]
+ splitString "/" "/usr/local/bin"
+ => [ "" "usr" "local" "bin" ]
+ */
+ splitString = _sep: _s:
+ let
+ sep = addContextFrom _s _sep;
+ s = addContextFrom _sep _s;
+ sepLen = stringLength sep;
+ sLen = stringLength s;
+ lastSearch = sLen - sepLen;
+ startWithSep = startAt:
+ substring startAt sepLen s == sep;
+
+ recurse = index: startAt:
+ let cutUntil = i: [(substring startAt (i - startAt) s)]; in
+ if index <= lastSearch then
+ if startWithSep index then
+ let restartAt = index + sepLen; in
+ cutUntil index ++ recurse restartAt restartAt
+ else
+ recurse (index + 1) startAt
+ else
+ cutUntil sLen;
+ in
+ recurse 0 0;
+
+ /* Return a string without the specified prefix, if the prefix matches.
+
+ Type: string -> string -> string
+
+ Example:
+ removePrefix "foo." "foo.bar.baz"
+ => "bar.baz"
+ removePrefix "xxx" "foo.bar.baz"
+ => "foo.bar.baz"
+ */
+ removePrefix =
+ # Prefix to remove if it matches
+ prefix:
+ # Input string
+ str:
+ let
+ preLen = stringLength prefix;
+ sLen = stringLength str;
+ in
+ if hasPrefix prefix str then
+ substring preLen (sLen - preLen) str
+ else
+ str;
+
+ /* Return a string without the specified suffix, if the suffix matches.
+
+ Type: string -> string -> string
+
+ Example:
+ removeSuffix "front" "homefront"
+ => "home"
+ removeSuffix "xxx" "homefront"
+ => "homefront"
+ */
+ removeSuffix =
+ # Suffix to remove if it matches
+ suffix:
+ # Input string
+ str:
+ let
+ sufLen = stringLength suffix;
+ sLen = stringLength str;
+ in
+ if sufLen <= sLen && suffix == substring (sLen - sufLen) sufLen str then
+ substring 0 (sLen - sufLen) str
+ else
+ str;
+
+ /* Return true if string v1 denotes a version older than v2.
+
+ Example:
+ versionOlder "1.1" "1.2"
+ => true
+ versionOlder "1.1" "1.1"
+ => false
+ */
+ versionOlder = v1: v2: builtins.compareVersions v2 v1 == 1;
+
+ /* Return true if string v1 denotes a version equal to or newer than v2.
+
+ Example:
+ versionAtLeast "1.1" "1.0"
+ => true
+ versionAtLeast "1.1" "1.1"
+ => true
+ versionAtLeast "1.1" "1.2"
+ => false
+ */
+ versionAtLeast = v1: v2: !versionOlder v1 v2;
+
+ /* This function takes an argument that's either a derivation or a
+ derivation's "name" attribute and extracts the version part from that
+ argument.
+
+ Example:
+ getVersion "youtube-dl-2016.01.01"
+ => "2016.01.01"
+ getVersion pkgs.youtube-dl
+ => "2016.01.01"
+ */
+ getVersion = x:
+ let
+ parse = drv: (builtins.parseDrvName drv).version;
+ in if isString x
+ then parse x
+ else x.version or (parse x.name);
+
+ /* Extract name with version from URL. Ask for separator which is
+ supposed to start extension.
+
+ Example:
+ nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "-"
+ => "nix"
+ nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "_"
+ => "nix-1.7-x86"
+ */
+ nameFromURL = url: sep:
+ let
+ components = splitString "/" url;
+ filename = lib.last components;
+ name = builtins.head (splitString sep filename);
+ in assert name != filename; name;
+
+ /* Create an --{enable,disable}-<feat> string that can be passed to
+ standard GNU Autoconf scripts.
+
+ Example:
+ enableFeature true "shared"
+ => "--enable-shared"
+ enableFeature false "shared"
+ => "--disable-shared"
+ */
+ enableFeature = enable: feat: "--${if enable then "enable" else "disable"}-${feat}";
+
+ /* Create an --{enable-<feat>=<value>,disable-<feat>} string that can be passed to
+ standard GNU Autoconf scripts.
+
+ Example:
+ enableFeature true "shared" "foo"
+ => "--enable-shared=foo"
+ enableFeature false "shared" (throw "ignored")
+ => "--disable-shared"
+ */
+ enableFeatureAs = enable: feat: value: enableFeature enable feat + optionalString enable "=${value}";
+
+ /* Create an --{with,without}-<feat> string that can be passed to
+ standard GNU Autoconf scripts.
+
+ Example:
+ withFeature true "shared"
+ => "--with-shared"
+ withFeature false "shared"
+ => "--without-shared"
+ */
+ withFeature = with_: feat: "--${if with_ then "with" else "without"}-${feat}";
+
+ /* Create an --{with-<feat>=<value>,without-<feat>} string that can be passed to
+ standard GNU Autoconf scripts.
+
+ Example:
+ with_Feature true "shared" "foo"
+ => "--with-shared=foo"
+ with_Feature false "shared" (throw "ignored")
+ => "--without-shared"
+ */
+ withFeatureAs = with_: feat: value: withFeature with_ feat + optionalString with_ "=${value}";
+
+ /* Create a fixed width string with additional prefix to match
+ required width.
+
+ This function will fail if the input string is longer than the
+ requested length.
+
+ Type: fixedWidthString :: int -> string -> string
+
+ Example:
+ fixedWidthString 5 "0" (toString 15)
+ => "00015"
+ */
+ fixedWidthString = width: filler: str:
+ let
+ strw = lib.stringLength str;
+ reqWidth = width - (lib.stringLength filler);
+ in
+ assert lib.assertMsg (strw <= width)
+ "fixedWidthString: requested string length (${
+ toString width}) must not be shorter than actual length (${
+ toString strw})";
+ if strw == width then str else filler + fixedWidthString reqWidth filler str;
+
+ /* Format a number adding leading zeroes up to fixed width.
+
+ Example:
+ fixedWidthNumber 5 15
+ => "00015"
+ */
+ fixedWidthNumber = width: n: fixedWidthString width "0" (toString n);
+
+ /* Check whether a value can be coerced to a string */
+ isCoercibleToString = x:
+ builtins.elem (builtins.typeOf x) [ "path" "string" "null" "int" "float" "bool" ] ||
+ (builtins.isList x && lib.all isCoercibleToString x) ||
+ x ? outPath ||
+ x ? __toString;
+
+ /* Check whether a value is a store path.
+
+ Example:
+ isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/bin/python"
+ => false
+ isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/"
+ => true
+ isStorePath pkgs.python
+ => true
+ isStorePath [] || isStorePath 42 || isStorePath {} || …
+ => false
+ */
+ isStorePath = x:
+ if isCoercibleToString x then
+ let str = toString x; in
+ builtins.substring 0 1 str == "/"
+ && dirOf str == builtins.storeDir
+ else
+ false;
+
+ /* Parse a string string as an int.
+
+ Type: string -> int
+
+ Example:
+ toInt "1337"
+ => 1337
+ toInt "-4"
+ => -4
+ toInt "3.14"
+ => error: floating point JSON numbers are not supported
+ */
+ # Obviously, it is a bit hacky to use fromJSON this way.
+ toInt = str:
+ let may_be_int = builtins.fromJSON str; in
+ if builtins.isInt may_be_int
+ then may_be_int
+ else throw "Could not convert ${str} to int.";
+
+ /* Read a list of paths from `file`, relative to the `rootPath`.
+ Lines beginning with `#` are treated as comments and ignored.
+ Whitespace is significant.
+
+ NOTE: This function is not performant and should be avoided.
+
+ Example:
+ readPathsFromFile /prefix
+ ./pkgs/development/libraries/qt-5/5.4/qtbase/series
+ => [ "/prefix/dlopen-resolv.patch" "/prefix/tzdir.patch"
+ "/prefix/dlopen-libXcursor.patch" "/prefix/dlopen-openssl.patch"
+ "/prefix/dlopen-dbus.patch" "/prefix/xdg-config-dirs.patch"
+ "/prefix/nix-profiles-library-paths.patch"
+ "/prefix/compose-search-path.patch" ]
+ */
+ readPathsFromFile = rootPath: file:
+ let
+ lines = lib.splitString "\n" (builtins.readFile file);
+ removeComments = lib.filter (line: line != "" && !(lib.hasPrefix "#" line));
+ relativePaths = removeComments lines;
+ absolutePaths = builtins.map (path: rootPath + "/${path}") relativePaths;
+ in
+ absolutePaths;
+
+ /* Read the contents of a file removing the trailing \n
+
+ Type: fileContents :: path -> string
+
+ Example:
+ $ echo "1.0" > ./version
+
+ fileContents ./version
+ => "1.0"
+ */
+ fileContents = file: removeSuffix "\n" (builtins.readFile file);
+}
diff --git a/ofborg/tickborg/test-srcs/make-maintainer-pr.sh b/ofborg/tickborg/test-srcs/make-maintainer-pr.sh
new file mode 100755
index 0000000000..940fa8c46c
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/make-maintainer-pr.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+set -eu
+
+bare=$1
+co=$2
+
+export GIT_CONFIG_GLOBAL=/dev/null
+export GIT_CONFIG_NOSYSTEM=1
+export GIT_AUTHOR_NAME="GrahamCOfBorg"
+export GIT_AUTHOR_EMAIL="graham+cofborg@example.com"
+export GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"
+export GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"
+
+makepr() {
+ git init --bare "$bare"
+ git clone "$bare" "$co"
+
+ cp -r maintainers/* "$co/"
+ git -C "$co" add .
+ git -C "$co" commit -m "initial repo commit"
+ git -C "$co" push origin master
+
+ cp maintainers-pr/* "$co/"
+ git -C "$co" checkout -b my-cool-pr
+ git -C "$co" add .
+ git -C "$co" commit -m "check out this cool PR"
+ git -C "$co" push origin my-cool-pr:refs/pull/1/head
+}
+
+makepr >&2
+git -C "$co" rev-parse HEAD
diff --git a/ofborg/tickborg/test-srcs/make-pr.sh b/ofborg/tickborg/test-srcs/make-pr.sh
new file mode 100755
index 0000000000..2548fb6fdc
--- /dev/null
+++ b/ofborg/tickborg/test-srcs/make-pr.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+set -eu
+
+bare=$1
+co=$2
+
+export GIT_CONFIG_GLOBAL=/dev/null
+export GIT_CONFIG_NOSYSTEM=1
+export GIT_AUTHOR_NAME="GrahamCOfBorg"
+export GIT_AUTHOR_EMAIL="graham+cofborg@example.com"
+export GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"
+export GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"
+
+makepr() {
+ git init --bare "$bare"
+ git clone "$bare" "$co"
+
+ cp build/* "$co/"
+ git -C "$co" add .
+ git -C "$co" commit -m "initial repo commit"
+ git -C "$co" push origin master
+
+ cp build-pr/* "$co/"
+ git -C "$co" checkout -b my-cool-pr
+ git -C "$co" add .
+ git -C "$co" commit -m "check out this cool PR"
+ git -C "$co" push origin my-cool-pr:refs/pull/1/head
+}
+
+makepr >&2
+git -C "$co" rev-parse HEAD