summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--.rustfmt.toml29
-rw-r--r--COPYING24
-rw-r--r--Cargo.lock1972
-rw-r--r--Cargo.toml79
-rw-r--r--DCO40
-rw-r--r--Documentation/.gitignore1
-rw-r--r--Documentation/Makefile4
-rw-r--r--Documentation/getting-started.adoc310
-rw-r--r--Documentation/monospace.css72
-rw-r--r--Documentation/spec.adoc1373
-rw-r--r--LICENSES/Apache-2.0202
-rw-r--r--LICENSES/CC-BY-SA-4.0427
-rw-r--r--LICENSES/GPL-2.0339
-rw-r--r--LICENSES/MIT20
-rw-r--r--LICENSES/OpenSSL123
-rw-r--r--LICENSES/exceptions/openvpn-openssl-exception10
-rw-r--r--README.md36
-rw-r--r--TODO55
-rw-r--r--src/bin/it.rs181
-rw-r--r--src/bundle.rs114
-rw-r--r--src/bundle/error.rs31
-rw-r--r--src/bundle/fetch.rs130
-rw-r--r--src/bundle/header.rs365
-rw-r--r--src/bundle/list.rs335
-rw-r--r--src/cfg.rs180
-rw-r--r--src/cmd.rs117
-rw-r--r--src/cmd/drop.rs205
-rw-r--r--src/cmd/drop/bundles.rs32
-rw-r--r--src/cmd/drop/bundles/prune.rs113
-rw-r--r--src/cmd/drop/bundles/sync.rs276
-rw-r--r--src/cmd/drop/edit.rs368
-rw-r--r--src/cmd/drop/init.rs194
-rw-r--r--src/cmd/drop/serve.rs140
-rw-r--r--src/cmd/drop/show.rs208
-rw-r--r--src/cmd/drop/snapshot.rs20
-rw-r--r--src/cmd/drop/unbundle.rs93
-rw-r--r--src/cmd/id.rs188
-rw-r--r--src/cmd/id/edit.rs209
-rw-r--r--src/cmd/id/init.rs230
-rw-r--r--src/cmd/id/show.rs75
-rw-r--r--src/cmd/id/sign.rs221
-rw-r--r--src/cmd/mergepoint.rs75
-rw-r--r--src/cmd/patch.rs77
-rw-r--r--src/cmd/patch/create.rs483
-rw-r--r--src/cmd/patch/prepare.rs615
-rw-r--r--src/cmd/topic.rs58
-rw-r--r--src/cmd/topic/comment.rs68
-rw-r--r--src/cmd/topic/ls.rs32
-rw-r--r--src/cmd/topic/show.rs34
-rw-r--r--src/cmd/topic/unbundle.rs174
-rw-r--r--src/cmd/ui.rs131
-rw-r--r--src/cmd/ui/editor.rs228
-rw-r--r--src/cmd/ui/output.rs44
-rw-r--r--src/cmd/util.rs4
-rw-r--r--src/cmd/util/args.rs139
-rw-r--r--src/error.rs12
-rw-r--r--src/fs.rs192
-rw-r--r--src/git.rs111
-rw-r--r--src/git/commit.rs46
-rw-r--r--src/git/config.rs31
-rw-r--r--src/git/refs.rs327
-rw-r--r--src/git/repo.rs93
-rw-r--r--src/git/serde.rs61
-rw-r--r--src/http.rs355
-rw-r--r--src/io.rs146
-rw-r--r--src/iter.rs109
-rw-r--r--src/json.rs49
-rw-r--r--src/json/canonical.rs166
-rw-r--r--src/keys.rs206
-rw-r--r--src/lib.rs33
-rw-r--r--src/metadata.rs749
-rw-r--r--src/metadata/drop.rs274
-rw-r--r--src/metadata/error.rs40
-rw-r--r--src/metadata/git.rs232
-rw-r--r--src/metadata/identity.rs366
-rw-r--r--src/metadata/mirrors.rs95
-rw-r--r--src/patches.rs212
-rw-r--r--src/patches/bundle.rs344
-rw-r--r--src/patches/error.rs29
-rw-r--r--src/patches/iter.rs395
-rw-r--r--src/patches/notes.rs181
-rw-r--r--src/patches/record.rs472
-rw-r--r--src/patches/state.rs231
-rw-r--r--src/patches/submit.rs574
-rw-r--r--src/patches/traits.rs165
-rw-r--r--src/serde.rs28
-rw-r--r--src/ssh.rs5
-rw-r--r--src/ssh/agent.rs279
-rw-r--r--src/str.rs94
90 files changed, 18006 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..ea8c4bf
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+/target
diff --git a/.rustfmt.toml b/.rustfmt.toml
new file mode 100644
index 0000000..5290ecd
--- /dev/null
+++ b/.rustfmt.toml
@@ -0,0 +1,29 @@
+max_width = 100
+comment_width = 80
+
+wrap_comments = true
+hard_tabs = false
+tab_spaces = 4
+imports_layout = "Vertical"
+imports_granularity = "Crate"
+
+newline_style = "Unix"
+use_small_heuristics = "Default"
+
+reorder_imports = true
+reorder_modules = true
+
+remove_nested_parens = true
+
+fn_args_layout = "Tall"
+
+edition = "2021"
+
+match_block_trailing_comma = true
+
+merge_derives = true
+
+use_try_shorthand = false
+use_field_init_shorthand = false
+
+force_explicit_abi = true
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..f8c8b86
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,24 @@
+`it` is provided under:
+
+ SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+Being under the terms of the GNU General Public License version 2 only,
+according with:
+
+ LICENSES/GPL-2.0
+
+With an explicit exception regarding OpenSSL, as stated at:
+
+ LICENSES/exceptions/openvpn-openssl-exception
+
+All documentation, including specification documents, are provided under the
+Creative Commons Attribution-ShareAlike 4.0 International license, according
+with:
+
+ LICENSES/CC-BY-SA-4.0
+
+In addition, other licenses may also apply. In particular, individual source
+code files may be dual-licensed by their respective copyright holders, as
+indicated in the 'SPDX-License-Identifier' header of the file.
+
+All contributions to `it` are subject to this COPYING file.
diff --git a/Cargo.lock b/Cargo.lock
new file mode 100644
index 0000000..ad589a9
--- /dev/null
+++ b/Cargo.lock
@@ -0,0 +1,1972 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "aes"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "433cfd6710c9986c576a25ca913c39d66a6474107b406f34f91d4a8923395241"
+dependencies = [
+ "cfg-if",
+ "cipher",
+ "cpufeatures",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
+dependencies = [
+ "backtrace",
+]
+
+[[package]]
+name = "ascii"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backtrace"
+version = "0.3.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide 0.5.4",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "base16ct"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
+
+[[package]]
+name = "base64"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
+
+[[package]]
+name = "base64ct"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf"
+
+[[package]]
+name = "bcrypt-pbkdf"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3806a8db60cf56efee531616a34a6aaa9a114d6da2add861b0fa4a188881b2c7"
+dependencies = [
+ "blowfish",
+ "pbkdf2",
+ "sha2 0.10.6",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "block-buffer"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "blowfish"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7"
+dependencies = [
+ "byteorder",
+ "cipher",
+]
+
+[[package]]
+name = "bstr"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "cc"
+version = "1.0.77"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
+dependencies = [
+ "jobserver",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chunked_transfer"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e"
+
+[[package]]
+name = "cipher"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e"
+dependencies = [
+ "crypto-common",
+ "inout",
+]
+
+[[package]]
+name = "clap"
+version = "4.0.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d63b9e9c07271b9957ad22c173bae2a4d9a81127680962039296abcd2f8251d"
+dependencies = [
+ "bitflags",
+ "clap_derive",
+ "clap_lex",
+ "is-terminal",
+ "once_cell",
+ "strsim",
+ "termcolor",
+ "terminal_size 0.2.3",
+]
+
+[[package]]
+name = "clap_complete"
+version = "4.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7b3c9eae0de7bf8e3f904a5e40612b21fb2e2e566456d177809a48b892d24da"
+dependencies = [
+ "clap",
+]
+
+[[package]]
+name = "clap_derive"
+version = "4.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
+dependencies = [
+ "os_str_bytes",
+]
+
+[[package]]
+name = "clap_mangen"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e503c3058af0a0854668ea01db55c622482a080092fede9dd2e00a00a9436504"
+dependencies = [
+ "clap",
+ "roff",
+]
+
+[[package]]
+name = "console"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c050367d967ced717c04b65d8c619d863ef9292ce0c5760028655a2fb298718c"
+dependencies = [
+ "encode_unicode",
+ "lazy_static",
+ "libc",
+ "terminal_size 0.1.17",
+ "winapi",
+]
+
+[[package]]
+name = "const-oid"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b"
+
+[[package]]
+name = "core-foundation"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-bigint"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef"
+dependencies = [
+ "generic-array",
+ "rand_core 0.6.4",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "ctr"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "curve25519-dalek"
+version = "3.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61"
+dependencies = [
+ "byteorder",
+ "digest 0.9.0",
+ "rand_core 0.5.1",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "der"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de"
+dependencies = [
+ "const-oid",
+ "pem-rfc7468",
+ "zeroize",
+]
+
+[[package]]
+name = "digest"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
+dependencies = [
+ "block-buffer 0.10.3",
+ "const-oid",
+ "crypto-common",
+ "subtle",
+]
+
+[[package]]
+name = "directories"
+version = "4.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "ecdsa"
+version = "0.14.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c"
+dependencies = [
+ "der",
+ "elliptic-curve",
+ "rfc6979",
+ "signature",
+]
+
+[[package]]
+name = "ed25519"
+version = "1.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e9c280362032ea4203659fc489832d0204ef09f247a0506f170dafcac08c369"
+dependencies = [
+ "signature",
+]
+
+[[package]]
+name = "ed25519-dalek"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d"
+dependencies = [
+ "curve25519-dalek",
+ "ed25519",
+ "rand 0.7.3",
+ "serde",
+ "sha2 0.9.9",
+ "zeroize",
+]
+
+[[package]]
+name = "either"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
+
+[[package]]
+name = "elliptic-curve"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3"
+dependencies = [
+ "base16ct",
+ "crypto-bigint",
+ "der",
+ "digest 0.10.6",
+ "ff",
+ "generic-array",
+ "group",
+ "rand_core 0.6.4",
+ "sec1",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "encode_unicode"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+
+[[package]]
+name = "erased-serde"
+version = "0.3.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54558e0ba96fbe24280072642eceb9d7d442e32c7ec0ea9e7ecd7b4ea2cf4e11"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "errno"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
+dependencies = [
+ "errno-dragonfly",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "errno-dragonfly"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "fastrand"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "ff"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160"
+dependencies = [
+ "rand_core 0.6.4",
+ "subtle",
+]
+
+[[package]]
+name = "flate2"
+version = "1.0.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide 0.6.2",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+dependencies = [
+ "foreign-types-shared",
+]
+
+[[package]]
+name = "foreign-types-shared"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi 0.9.0+wasi-snapshot-preview1",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi 0.11.0+wasi-snapshot-preview1",
+]
+
+[[package]]
+name = "gimli"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
+
+[[package]]
+name = "git2"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2994bee4a3a6a51eb90c218523be382fd7ea09b16380b9312e9dbe955ff7c7d1"
+dependencies = [
+ "bitflags",
+ "libc",
+ "libgit2-sys",
+ "log",
+ "url",
+]
+
+[[package]]
+name = "globset"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a"
+dependencies = [
+ "aho-corasick",
+ "bstr",
+ "fnv",
+ "log",
+ "regex",
+]
+
+[[package]]
+name = "group"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
+dependencies = [
+ "ff",
+ "rand_core 0.6.4",
+ "subtle",
+]
+
+[[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "hmac"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+dependencies = [
+ "digest 0.10.6",
+]
+
+[[package]]
+name = "idna"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "inout"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "io-lifetimes"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c"
+dependencies = [
+ "libc",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "is-terminal"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "927609f78c2913a6f6ac3c27a4fe87f43e2a35367c0c4b0f8265e8f49a104330"
+dependencies = [
+ "hermit-abi 0.2.6",
+ "io-lifetimes",
+ "rustix",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "it"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "base64",
+ "clap",
+ "clap_complete",
+ "clap_mangen",
+ "console",
+ "directories",
+ "either",
+ "erased-serde",
+ "git2",
+ "globset",
+ "hex",
+ "log",
+ "multipart",
+ "num_cpus",
+ "once_cell",
+ "rand_core 0.6.4",
+ "serde",
+ "serde_json",
+ "sha1collisiondetection",
+ "sha2 0.10.6",
+ "shlex",
+ "signature",
+ "ssh-encoding",
+ "ssh-key",
+ "tempfile",
+ "thiserror",
+ "threadpool",
+ "time",
+ "tiny_http",
+ "uds_windows",
+ "unicode-normalization",
+ "ureq",
+ "url",
+ "versions",
+ "zeroize",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
+
+[[package]]
+name = "jobserver"
+version = "0.1.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+dependencies = [
+ "spin",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.138"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
+
+[[package]]
+name = "libgit2-sys"
+version = "0.14.0+1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b"
+dependencies = [
+ "cc",
+ "libc",
+ "libz-sys",
+ "pkg-config",
+]
+
+[[package]]
+name = "libm"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
+
+[[package]]
+name = "libz-sys"
+version = "1.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "mime"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
+dependencies = [
+ "mime",
+ "unicase",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "multipart"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182"
+dependencies = [
+ "log",
+ "mime",
+ "mime_guess",
+ "rand 0.8.5",
+ "tempfile",
+]
+
+[[package]]
+name = "native-tls"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+dependencies = [
+ "lazy_static",
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "num-bigint-dig"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2399c9463abc5f909349d8aa9ba080e0b88b3ce2885389b60b993f39b1a56905"
+dependencies = [
+ "byteorder",
+ "lazy_static",
+ "libm",
+ "num-integer",
+ "num-iter",
+ "num-traits",
+ "rand 0.8.5",
+ "serde",
+ "smallvec",
+ "zeroize",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
+dependencies = [
+ "autocfg",
+ "num-traits",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252"
+dependencies = [
+ "autocfg",
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+ "libm",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
+dependencies = [
+ "hermit-abi 0.1.19",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
+
+[[package]]
+name = "opaque-debug"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
+
+[[package]]
+name = "openssl"
+version = "0.10.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29d971fd5722fec23977260f6e81aa67d2f22cadbdc2aa049f1022d9a3be1566"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5454462c0eced1e97f2ec09036abc8da362e66802f66fd20f86854d9d8cbcbc4"
+dependencies = [
+ "autocfg",
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "os_str_bytes"
+version = "6.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
+
+[[package]]
+name = "p256"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594"
+dependencies = [
+ "ecdsa",
+ "elliptic-curve",
+ "sha2 0.10.6",
+]
+
+[[package]]
+name = "p384"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dfc8c5bf642dde52bb9e87c0ecd8ca5a76faac2eeed98dedb7c717997e1080aa"
+dependencies = [
+ "ecdsa",
+ "elliptic-curve",
+ "sha2 0.10.6",
+]
+
+[[package]]
+name = "pbkdf2"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917"
+dependencies = [
+ "digest 0.10.6",
+]
+
+[[package]]
+name = "pem-rfc7468"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d159833a9105500e0398934e205e0773f0b27529557134ecfc51c27646adac"
+dependencies = [
+ "base64ct",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+
+[[package]]
+name = "pkcs1"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eff33bdbdfc54cc98a2eca766ebdec3e1b8fb7387523d5c9c9a2891da856f719"
+dependencies = [
+ "der",
+ "pkcs8",
+ "spki",
+ "zeroize",
+]
+
+[[package]]
+name = "pkcs8"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba"
+dependencies = [
+ "der",
+ "spki",
+]
+
+[[package]]
+name = "pkg-config"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.47"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+dependencies = [
+ "getrandom 0.1.16",
+ "libc",
+ "rand_chacha 0.2.2",
+ "rand_core 0.5.1",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha 0.3.1",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+dependencies = [
+ "getrandom 0.1.16",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom 0.2.8",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+dependencies = [
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom 0.2.8",
+ "redox_syscall",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
+
+[[package]]
+name = "remove_dir_all"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "rfc6979"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb"
+dependencies = [
+ "crypto-bigint",
+ "hmac",
+ "zeroize",
+]
+
+[[package]]
+name = "roff"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b833d8d034ea094b1ea68aa6d5c740e0d04bad9d16568d08ba6f76823a114316"
+
+[[package]]
+name = "rsa"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "094052d5470cbcef561cb848a7209968c9f12dfa6d668f4bca048ac5de51099c"
+dependencies = [
+ "byteorder",
+ "digest 0.10.6",
+ "num-bigint-dig",
+ "num-integer",
+ "num-iter",
+ "num-traits",
+ "pkcs1",
+ "pkcs8",
+ "rand_core 0.6.4",
+ "signature",
+ "smallvec",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "rustix"
+version = "0.36.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3807b5d10909833d3e9acd1eb5fb988f79376ff10fce42937de71a449c4c588"
+dependencies = [
+ "bitflags",
+ "errno",
+ "io-lifetimes",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+
+[[package]]
+name = "schannel"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2"
+dependencies = [
+ "lazy_static",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "sec1"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928"
+dependencies = [
+ "base16ct",
+ "der",
+ "generic-array",
+ "pkcs8",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "security-framework"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.149"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "256b9932320c590e707b94576e3cc1f7c9024d0ee6612dfbcf1cb106cbe8e055"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.149"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4eae9b04cbffdfd550eb462ed33bc6a1b68c935127d008b27444d08380f94e4"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.89"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha1collisiondetection"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c66558a774ef5044cb4a834db5f5c7f95e139d2341d7f502fe6034afa7082461"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "sha2"
+version = "0.9.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800"
+dependencies = [
+ "block-buffer 0.9.0",
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.9.0",
+ "opaque-debug",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.6",
+]
+
+[[package]]
+name = "shlex"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
+
+[[package]]
+name = "signature"
+version = "1.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c"
+dependencies = [
+ "digest 0.10.6",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+
+[[package]]
+name = "spki"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b"
+dependencies = [
+ "base64ct",
+ "der",
+]
+
+[[package]]
+name = "ssh-encoding"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19cfdc32e0199062113edf41f344fbf784b8205a94600233c84eb838f45191e1"
+dependencies = [
+ "base64ct",
+ "pem-rfc7468",
+ "sha2 0.10.6",
+]
+
+[[package]]
+name = "ssh-key"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "288d8f5562af5a3be4bda308dd374b2c807b940ac370b5efa1c99311da91d9a1"
+dependencies = [
+ "aes",
+ "bcrypt-pbkdf",
+ "ctr",
+ "ed25519-dalek",
+ "num-bigint-dig",
+ "p256",
+ "p384",
+ "rand_core 0.6.4",
+ "rsa",
+ "sec1",
+ "sha2 0.10.6",
+ "signature",
+ "ssh-encoding",
+ "zeroize",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "subtle"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
+
+[[package]]
+name = "syn"
+version = "1.0.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "libc",
+ "redox_syscall",
+ "remove_dir_all",
+ "winapi",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "terminal_size"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "terminal_size"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb20089a8ba2b69debd491f8d2d023761cbf196e999218c591fa1e7e15a21907"
+dependencies = [
+ "rustix",
+ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "time"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
+dependencies = [
+ "itoa",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+
+[[package]]
+name = "time-macros"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"
+dependencies = [
+ "time-core",
+]
+
+[[package]]
+name = "tiny_http"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0d6ef4e10d23c1efb862eecad25c5054429a71958b4eeef85eb5e7170b477ca"
+dependencies = [
+ "ascii",
+ "chunked_transfer",
+ "log",
+ "openssl",
+ "time",
+ "url",
+ "zeroize",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "typenum"
+version = "1.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
+
+[[package]]
+name = "uds_windows"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce65604324d3cce9b966701489fbd0cf318cb1f7bd9dd07ac9a4ee6fb791930d"
+dependencies = [
+ "tempfile",
+ "winapi",
+]
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+
+[[package]]
+name = "ureq"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b97acb4c28a254fd7a4aeec976c46a7fa404eac4d7c134b30c75144846d7cb8f"
+dependencies = [
+ "base64",
+ "chunked_transfer",
+ "flate2",
+ "log",
+ "native-tls",
+ "once_cell",
+ "serde",
+ "serde_json",
+ "url",
+]
+
+[[package]]
+name = "url"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "versions"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee97e1d97bd593fb513912a07691b742361b3dd64ad56f2c694ea2dbfe0665d3"
+dependencies = [
+ "itertools",
+ "nom",
+ "serde",
+]
+
+[[package]]
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-sys"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+dependencies = [
+ "windows_aarch64_msvc 0.36.1",
+ "windows_i686_gnu 0.36.1",
+ "windows_i686_msvc 0.36.1",
+ "windows_x86_64_gnu 0.36.1",
+ "windows_x86_64_msvc 0.36.1",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc 0.42.0",
+ "windows_i686_gnu 0.42.0",
+ "windows_i686_msvc 0.42.0",
+ "windows_x86_64_gnu 0.42.0",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc 0.42.0",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
+
+[[package]]
+name = "zeroize"
+version = "1.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f"
+dependencies = [
+ "zeroize_derive",
+]
+
+[[package]]
+name = "zeroize_derive"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..b5ced98
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,79 @@
+[package]
+name = "it"
+version = "0.1.0"
+authors = ["Kim Altintop <kim@eagain.io>"]
+license = "GPL-2.0-only WITH openvpn-openssl-exception"
+
+edition = "2021"
+rust-version = "1.60"
+
+[features]
+default = ["vendored-libgit2"]
+vendored-libgit2 = ["git2/vendored-libgit2"]
+sha1dc = ["sha1collisiondetection"]
+
+[dependencies]
+anyhow.features = ["backtrace"]
+anyhow.version = "1"
+base64.version = "0.13"
+clap.features = ["derive", "env", "string", "wrap_help"]
+clap.version = "4.0"
+clap_complete.version = "4.0"
+clap_mangen.version = "0.2"
+console.default-features = false
+console.version = "0.15"
+directories.version = "4.0"
+either.version = "1.8"
+erased-serde.version = "0.3"
+git2.default-features = false
+git2.version = "0.15"
+globset.version = "0.4.9"
+hex.features = ["serde"]
+hex.version = "0.4"
+log.features = ["std"]
+log.version = "0.4"
+multipart.default-features = false
+multipart.features = ["client"]
+multipart.version = "0.18"
+num_cpus.version = "1.13"
+once_cell.version = "1.13"
+rand_core.features = ["getrandom"]
+rand_core.version = "0.6"
+serde.features = ["derive", "std", "rc"]
+serde.version = "1"
+serde_json.version = "1.0"
+sha2.version = "0.10"
+shlex.version = "1.1"
+signature.version = "1.6"
+ssh-encoding.version = "0.1"
+ssh-key.features = ["alloc", "ecdsa", "ed25519", "encryption", "p256", "rsa"]
+ssh-key.version = "0.5"
+tempfile.version = "3.3"
+thiserror.version = "1.0"
+threadpool.version = "1.8"
+time.features = ["serde-well-known"]
+time.version = "0.3.11"
+tiny_http.features = ["ssl-openssl"]
+tiny_http.version = "0.11"
+unicode-normalization.version = "0.1.21"
+ureq.default-features = false
+ureq.features = ["gzip", "json", "native-tls"]
+ureq.version = "2.5"
+url.features = ["serde"]
+url.version = "2.2"
+versions.features = ["serde"]
+versions.version = "4.1"
+zeroize.version = "1.5.7"
+
+#
+# Optionals
+#
+sha1collisiondetection.default-features = false
+sha1collisiondetection.optional = true
+sha1collisiondetection.version = "0.2"
+
+#
+# Platform specifics
+#
+[target.'cfg(windows)'.dependencies]
+uds_windows = "1.0"
diff --git a/DCO b/DCO
new file mode 100644
index 0000000..d53d3ad
--- /dev/null
+++ b/DCO
@@ -0,0 +1,40 @@
+ Developer Certificate of Origin
+
+By making your contribution, you are making the declaration set out in the
+Linux Foundation’s Developer Certificate of Origin version 1.1 as set out
+below, in which the “open source licence indicated in the file” is GPLv3.
+
+Developer Certificate of Origin
+Version 1.1
+
+Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+
+Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+(a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+(b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+(c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+(d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
diff --git a/Documentation/.gitignore b/Documentation/.gitignore
new file mode 100644
index 0000000..2d19fc7
--- /dev/null
+++ b/Documentation/.gitignore
@@ -0,0 +1 @@
+*.html
diff --git a/Documentation/Makefile b/Documentation/Makefile
new file mode 100644
index 0000000..0ad63a9
--- /dev/null
+++ b/Documentation/Makefile
@@ -0,0 +1,4 @@
+default: spec.html
+
+%.html: %.adoc
+ asciidoctor -v $<
diff --git a/Documentation/getting-started.adoc b/Documentation/getting-started.adoc
new file mode 100644
index 0000000..a5ca4bc
--- /dev/null
+++ b/Documentation/getting-started.adoc
@@ -0,0 +1,310 @@
+// Copyright © 2023 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: CC-BY-SA-4.0
+
+= Getting started with _it_
+:stylesheet: monospace.css
+:source-highlighter: pygments
+
+This document walks you through the very basics of _it_. Many of the
+interactions below may be automated away eventually, but our goal here is to
+provide an overview of what is going on under the hood.
+
+
+== Prerequisites
+
+We are going to assume you have the _it_ executable installed using
+
+ cargo install --git https://git.eagain.io/it
+
+Chances are that you already have an SSH key handy. If not, or if you want to
+use a key specifically for this exercise, generate one using
+
+ ssh-keygen -t ed25519
+
+It is also a good idea to add this key to your `ssh-agent`, so you don't have to
+type the password every time it is used for signing. Typing `ssh-add` usually
+does the trick.
+
+Next, we'll need to teach git to use our SSH key for signing. If you followed
+above recommendation and are using an agent for signing, the following commands
+will set it up as a default:
+
+ git config --global gpg.format ssh
+ git config --global user.signingKey "key::$(cat /path/to/your_key.pub)"
+
+If you prefer to not mess with your existing git configuration, you can also
+arrange for the key to be recognised by _it_ itself by running the following
+command instead:
+
+ git config --global it.signingKey "key::$(cat /path/to/your_key.pub)"
+
+Lastly, we'll create an _it_ xref:spec.adoc#_identities[identity] using this
+key:
+
+ it id init
+
+The command's output will look similar to this:
+
+[source,json]
+----
+{
+ "committed": {
+ "repo": "~/.local/share/it/ids",
+ "ref": "refs/heads/it/ids/671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2",
+ "commit": "e08c34df95cd28aa212a4d110ecfb8acec2a102c"
+ },
+ "data": {
+ "signed": {
+ "_type": "eagain.io/it/identity",
+ "spec_version": "0.1.0",
+ "prev": null,
+ "keys": [
+ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDtt6XEdNVInhiKkX+ccN++Bk8kccdP6SeBPg0Aq8XFo"
+ ],
+ "threshold": 1,
+ "mirrors": [],
+ "expires": null,
+ "custom": {}
+ },
+ "signatures": {
+ "ddc27a697903b8fe3ae3439818af81eaac20ba65e51a4170e3c81eb25abd1767": "5a460b26099ddd42912b7a52ee0c478619425ddfe4a562fd2ffd427d84cde6ab32effd8971308cfcdb64b08ac920e7a2c2a69d11b0ca7fe293e39306cd4d7c01"
+ }
+ }
+}
+----
+
+The `data` object is exactly what is stored in the repository `repo` at branch
+`ref`, which we invite you to convince yourself of using normal git commands.
+
+Identities can describe multiple keys, and carry additional custom metadata, but
+we'll skip over this for now.
+
+
+== Local drop
+
+_it_ is organised around patches. You know, like in the olden days, but
+xref:spec.adoc#_patches[not quite]. Patches are recorded onto a log dubbed
+xref:spec.adoc#_drops["`drop`"].
+
+If you have a git repository to toy around with, you can initialise a drop
+adjacent to the "normal" branches in it. You can record patches (which you may
+have received from elsewhere) onto that local drop, and push it to wherever you
+like.
+
+To initialise a drop in this way, just run:
+
+ it drop init --description "my project"
+
+This will drop you into `$EDITOR` to give you an opportunity to customise the
+drop's metadata, which will look similar to this:
+
+[source,json]
+----
+{
+ "description": "my project",
+ "roles": {
+ "drop": {
+ "ids": [
+ "671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2"
+ ],
+ "threshold": 1
+ },
+ "snapshot": {
+ "ids": [
+ "671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2"
+ ],
+ "threshold": 1
+ },
+ "mirrors": {
+ "ids": [
+ "671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2"
+ ],
+ "threshold": 1
+ },
+ "branches": {
+ "refs/heads/main": {
+ "ids": [
+ "671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2"
+ ],
+ "threshold": 1,
+ "description": "the default branch"
+ }
+ }
+ },
+ "custom": {}
+}
+----
+
+You may want to check if _it_ has guessed your mainline branch correctly (in the
+`branches` section), but otherwise just save and exit to finish the
+initialisation step. Run
+
+ git log -p refs/it/patches
+
+to see the effect.
+
+
+We want source code patches to be against the `refs/heads/main` branch, so we
+need to teach the drop about what the current state is:
+
+ it merge-point record
+
+Again, you may want to run `git log` as above to see what changed. You'll notice
+a line starting with "Re:" in the latest commit message: this is the
+xref:spec.adoc#_topics[topic] of a patch, and a xref:spec.adoc#mergepoints[merge
+point] is just a patch with a well-known topic. Run
+
+ it topic ls
+
+to see that this topic now exists, and
+
+ it topic show c44c20434bfdaa0384b67d48d6c3bb36d755b87576027671f606c404b09d9774
+
+to display the metadata recorded in it.
+
+Whenever you update `refs/heads/main`, run `merge-point record` again to convey
+the new head to the drop. ``show``ing the topic as above will give you a log of
+every such update.
+
+
+Finally, let's create a patch: make some changes on a feature branch, like you
+normally would, and then run
+
+ it patch record
+
+This will drop you into `$EDITOR`, asking you to describe what the patch is
+about. After you save and exit, a new record will be committed onto the drop,
+and a new topic will have been created:
+
+ $ it topic ls
+ {
+ "topic": "2d2d3c97df62b18d3d1476342fe9d6df0989592f6d55d151350422795da714d8",
+ "subject": "Just testin"
+ }
+ {
+ "topic": "c44c20434bfdaa0384b67d48d6c3bb36d755b87576027671f606c404b09d9774",
+ "subject": "Merges"
+ }
+
+You can post more patches to an existing topic, and reply to a specific entry
+within the topic. Because a patch in _it_ is really a combination of commentary
+and source code changes, and source code changes are actually optional, we have
+a handy shortcut to just, well, comment:
+
+ it topic comment record 2d2d3c97df62b18d3d1476342fe9d6df0989592f6d55d151350422795da714d8
+
+Type your comment into `$EDITOR`, save and exit. The result may look like this:
+
+ $ it topic show 2d2d3c97df62b18d3d1476342fe9d6df0989592f6d55d151350422795da714d8
+ {
+ "header": {
+ "id": "11337eb409fbd16a034d0323dfa8d879b5a0f36c",
+ "author": {
+ "name": "Kim Altintop",
+ "email": "kim@eagain.io"
+ },
+ "time": "2023-01-09T09:39:15+01:00",
+ "patch": {
+ "id": "8da0f98009aae98e7ca9df926125aa386a4f6a644c2036e9ec86a0810a7b8a62",
+ "tips": []
+ },
+ "in-reply-to": "0c9b7c0b437a3a072f3a1eead17703d22a0bf8f1"
+ },
+ "message": {
+ "_type": "eagain.io/it/notes/basic",
+ "message": "Ship it"
+ }
+ }
+ {
+ "header": {
+ "id": "0c9b7c0b437a3a072f3a1eead17703d22a0bf8f1",
+ "author": {
+ "name": "Kim Altintop",
+ "email": "kim@eagain.io"
+ },
+ "time": "2023-01-09T09:23:51+01:00",
+ "patch": {
+ "id": "502b3c4dcf709c9b16df2b58aece9a8966405347a2bf6ccbb305711120984951",
+ "tips": [
+ "refs/it/bundles/502b3c4dcf709c9b16df2b58aece9a8966405347a2bf6ccbb305711120984951/heads/main"
+ ]
+ }
+ },
+ "message": {
+ "_type": "eagain.io/it/notes/basic",
+ "message": "Just testin"
+ }
+ }
+
+
+Notice the `patch.tips` array? If the patch contains references which are
+conventionally recognised as source code changes (i.e. `refs/heads/...`,
+`refs/tags/...`), their physical location inside the drop's repository will be
+shown here. _it_ is currently lacking a nice UI for this, but you can just do
+
+ git diff refs/it/bundles/502b3c4dcf709c9b16df2b58aece9a8966405347a2bf6ccbb305711120984951/heads/main
+
+to see the diff against your currently checked-out branch. If you're satisfied,
+go ahead and merge this ref into your local `main` branch. Don't forget to thank
+yourself for the contribution by commenting on the topic!
+
+
+To wrap it up, you may be wondering how _it_ stored everything in your
+repository, and perhaps clean it up. Run
+
+ git for-each-ref refs/it
+
+to poke around the references _it_ uses to maintain its state. Note, however,
+that this structure is not part of any public API, and may change without
+further notice!
+
+The actual xref:spec.adoc#_bundles[patch bundles] can be found in
+`.git/it/bundles`. Note that a patch bundle is self-contained -- you can send
+them over email, store them in IPFS, or whatever is convenient to move them from
+one place to another.
+
+
+== Remote drop
+
+We said that you could receive patches over whatever channel, and apply them to
+your local drop. A more tangible way is to serve the drop over HTTP, allowing
+anyone to submit patches to it. While it's possible to do this from your working
+repository, it is preferable to create a dedicated repo for the drop:
+
+ it drop init --git-dir /the/drop.git --description "my public drop"
+ it merge-point record --git-dir /the/drop.git --source-dir .
+ cd /the/drop.git
+ RUST_LOG=debug it serve
+
+In a second terminal, cd into your working repo and add the drop as a regular
+git remote:
+
+ git remote add dropit /the/drop.git
+ git remote update dropit
+
+You can now submit to it by replacing `record` with `submit` for the respective
+commands, and specifying `--drop dropit/patches` to use the remote drop as the
+reference.
+
+Currently, an extra command `it drop bundles sync` is needed to receive the
+patch bundles after updating the remote. This is not particularly smart yet,
+especially given that we do support inspecting individual topics (as
+opposed to the entire drop history) by `it topic unbundle`. We'll get there.
+
+
+== Loose ends
+
+If you've used email to send around patches, or even the excellent
+https://git.kernel.org/pub/scm/utils/b4/b4.git[b4] tool, this may all seem
+vaguely familiar to you: instead of `mbox` archives we have binary git bundles,
+what gives?
+
+That's fair, we haven't really detailed how _it_ permits much richer
+interactions and datatypes, for lack of a UI. For brevity, we also haven't shown
+that patch bundles can be stored on IPFS, the "commit bit" can be extended to
+co-maintainers, or how more complex topologies can be created by drop
+aggregation (and without resorting to HTTP POST).
+
+We invite you to play around with the available commands, read the
+xref:spec.adoc[spec], and perhaps consider to contribute where you see _it_ is
+currently lacking :)
diff --git a/Documentation/monospace.css b/Documentation/monospace.css
new file mode 100644
index 0000000..056ed3e
--- /dev/null
+++ b/Documentation/monospace.css
@@ -0,0 +1,72 @@
+/* SPDX-License-Identifier: MIT
+ *
+ * Based on https://github.com/darshandsoni/asciidoctor-skins/blob/c98a8ab9b27571e5b63d75912a3c753cc72ed8e4/css/monospace.css
+ *
+ */
+
+@import url(https://cdn.jsdelivr.net/gh/asciidoctor/asciidoctor@2.0/data/stylesheets/asciidoctor-default.css);
+
+:root {
+--maincolor:#FFFFFF;
+--primarycolor:#000000;
+--secondarycolor:#000000;
+--tertiarycolor: #000000;
+--sidebarbackground:#CCC;
+--linkcolor:#000000;
+--linkcoloralternate:#f44336;
+--white:#FFFFFF;
+--black:#000000;
+--font: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;
+}
+
+html,body {
+ font-size: 80%;
+}
+body {
+ font-family: var(--font);
+}
+h1 {
+ color: var(--primarycolor) !important;
+ font-family: var(--font);
+}
+h2,h3,h4,h5,h6 {
+ color: var(--secondarycolor) !important;
+ font-family: var(--font);
+}
+pre,code {
+ font-family: var(--font);
+ white-space: pre;
+}
+
+#header,#content,#footnotes,#footer {
+ max-width: 72em;
+}
+#footer {
+ display: none;
+}
+#footnotes .footnote {
+ font-size: 1em;
+}
+.title {
+ color: var(--tertiarycolor) !important;
+ font-family: var(--font) !important;
+ font-style: normal !important;
+ font-weight: normal !important;
+}
+.content>pre {
+ font-size: 1rem;
+}
+#toctitle {
+ font-family: var(--font);
+ font-size: 2em;
+ color: var(--primarycolor);
+}
+.sectlevel1 {font-family: var(--font)!important; font-size: 1.0625rem;}
+.sectlevel2 {font-family: var(--font)!important;}
+.sectlevel3 {font-family: var(--font)!important;}
+.sectlevel4 {font-family: var(--font)!important;}
+
+.lst,dl {
+ padding-left: 5%;
+ padding-right: 10%;
+}
diff --git a/Documentation/spec.adoc b/Documentation/spec.adoc
new file mode 100644
index 0000000..0816a11
--- /dev/null
+++ b/Documentation/spec.adoc
@@ -0,0 +1,1373 @@
+// Copyright © 2022-2023 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: CC-BY-SA-4.0
+
+= it: zero-g git
+Kim Altintop <kim@eagain.io>
+:revdate: 2022
+:revnumber: 0.1.0
+:revremark: draft
+:attribute-missing: warn
+:listing-caption: Figure
+:reproducible:
+:sectanchors:
+:sectnums:
+:stylesheet: monospace.css
+:toc: preamble
+:toclevels: 3
+:xrefstyle: short
+
+_it_ aims to augment git with primitives to build integrated, cryptographically
+verifiable collaboration workflows around source code. It maintains the
+distributed property of git, not requiring a central server. _it_ is transport
+agnostic, and permits data dissemination in client-server, federated, as well as
+peer-to-peer network topologies.
+
+
+// Footnotes. Ironically at the head, due to asciidoc's evaluation order
+:fn-content-hash: pass:q,a[footnote:hash[ \
+Hashing with both the SHA-1 and SHA-256 algorithms allows internally-linked \
+data to roam between git repositories with different object formats. We hope \
+that when and if git introduces support for a new hash algorithm post SHA-256, \
+it will also have interoperability implemented. Otherwise, the burden will \
+fall on _it_ implementations. \
+]]
+:fn-peel: footnote:peel[ \
+"Peeling" is git jargon for dereferencing the natural target of a git object \
+until an object of the desired type is found. \
+]
+:fn-commit-signing: pass:q,a[footnote:commit-signing[ \
+_it_ does not prescribe whether commits or tags pertaining source code \
+histories must be cryptographically signed. Due to the non-commutativity of \
+git commits (their identity changes when reordered), it is highly dependent on \
+the development model whether author signatures are preserved in published \
+histories. Thus, we leave it to users to decide if signatures should be \
+applied at the git level, or other forms of attestation (e.g. via topic \
+entries) are employed. \
+]]
+:fn-resolve-id: footnote:resolveid[ \
+Normally, identities must be resolvable within the same tree as the drop \
+metadata. However, resolution may be substituted if e.g. the client believes \
+to have more up-to-date identity data elsewhere. \
+]
+//
+
+
+== Introduction
+
+=== Motivation
+
+The checks and balances of Free and Open Source Software (FOSS) is the ability
+for anyone to contribute to or diverge from ("`fork`") a line of development
+freely and cheaply. As FOSS is defined by the community developing it, this
+extends to all artefacts of communication and collaboration, not just the source
+code itself. In other words, an open development model is a transparent process.
+
+It is easy to see that this model necessitates _data sovereignty_: control over
+the data implies controlling participation.
+
+Traditionally, this property has been approximated by using internet email for
+collaboration. While its simplicity as a medium has its merits, email is clearly
+declining in popularity for our purpose. We attribute this to mainly two
+weaknesses: intended primarily as a free-form medium, email is lacking the
+_programmability_ of the web, impeding innovation in both tooling and services.
+Secondly, the protocol is inherently prone to abuse by permitting unsolicited
+messages, and the response measures implemented over the years have amplified
+monopolization: today, it takes significant effort and expertise to maintain a
+mail exchanger independent of large providers (let alone one which hosts a
+mailing list fanning out messages to a potentially large number of subscribers).
+
+It is not obvious, however, how an alternative could look like on a protocol
+level. Among the tradeoffs to consider is the tension between openness,
+addressability and availability -- and it highly depends on the situation which
+one has higher priority. It thus seems unlikely that it can be resolved once and
+for all. Instead, we recognise it as desirable to provide the user with choices
+of transport methods. Or, put differently, that "`the network is optional`", as
+Kleppmann et al. have called for in their essay on <<local-first,"Local-first
+software">>.
+
+Git is prototypical of the _local-first_ idea, providing data sovereignty -- for
+as long as we do not consider bidirectional collaboration: git commits do not
+commute, and so concurrent modifications do not converge, but must be explicitly
+linearised. This is not satisfying if we want to eliminate both intermediaries
+and online rendezvous. It is tempting to design a source code management and
+collaboration system from the ground up with commutativity in mind, yet git is
+so ubiquitous that we feel that we cannot forgo to present a solution which
+preserves the ability to use its existing toolchain and ecosystem. It turns out
+that, while it would be difficult to retrofit git into a proper, idealised
+_local-first_ application, it is perfectly suitable for _hosting_ such an
+application which models the collaboration process itself.
+
+
+=== Overview
+
+_it_ is essentially a collection of datatypes.
+
+We start by establishing identities (<<Identities>>), which for our purposes
+only need to certify ownership of public keys. By using an extensible,
+human-readable metadata format, we leave it to the user to bind the identity to
+external identifiers or extend it with "`profile`" information in order to
+convey a _persona_. As the metadata can be conveniently managed using git, it
+can be published easily.
+
+_it_ inherits the paradigm of most distributed version control systems, where
+changes are exchanged as small increments ("`patches`", <<Patches>>), but
+generalises the concept to include both source code changes and associated data
+such as commentary. An _it_ patch is thus similar to an email message, but
+mandates the associated data to be structured (as opposed to free-form).
+Ordering with respect to related patches is determined via git's commit graph,
+optionally allowing for sophisticated shared state objects to be constructed if
+a <<CRDT>>-based payload is used.
+
+Patches are recorded onto a log structure ("`drop`", <<Drops>>), for which we
+define a representation as a git commit history. The patch contents are,
+however, not stored directly in this structure, but redistributed verbatim. This
+is done so as to reduce data dissemination to mostly (static) file transmission,
+which opens up more choices for alternative transport protocols and minimises
+resource consumption imposed by dynamic repacking.
+
+The drop is responsible for ensuring that the dependencies (or: prerequisites)
+of a patch are satisfied before recording it, enforcing that the partial
+ordering of related patches can be recovered. Apart from that, a drop does not
+provide any ordering guarantees, which means that independent drops may converge
+even though their (commit) hashes differ.
+
+Finally, a drop is secured by a trust delegation scheme which authorises
+operations modifying its state. It also serves as a PKI, allowing verification
+of all signed objects it refers to.
+
+Networking is exemplified by a simple HTTP API (<<HTTP API>>), hinting at
+alternative protocols where appropriate. We envisage patch submission to give
+rise to gateway services, which may be elaborated on in future revisions of this
+document.
+
+
+// TODO: Related work?
+
+== Conventions and Terminology
+
+The key words "`MUST`", "`MUST NOT`", "`REQUIRED`", "`SHALL`", "`SHALL NOT`",
+"`SHOULD`", "`SHOULD NOT`", "`RECOMMENDED`", "`NOT RECOMMENDED`", "`MAY`", and
+"`OPTIONAL`" in this document are to be interpreted as described in <<RFC2119>>
+and <<RFC8174>> when, and only when, they appear in all capitals, as shown here.
+
+Familiarity with git concepts and datastructures is assumed, and terminology
+used without further explanation. Refer to the <<gitglossary>> instead.
+
+== Formats
+
+=== Signed Values
+
+Signed data items in _it_ are encoded as a subset of JSON which disallows
+floating point numbers, and requires string values and object keys to be UTF-8
+encoded. Signatures are obtained over the SHA-512 hash of the
+<<Canonical-JSON,canonical form>> of the JSON object (hashing is used to
+minimise the payload size, which may be sent to an agent process for signing).
+
+JSON values SHOULD be stored in pretty-printed form, with object keys sorted
+lexicographically.
+
+Empty optional fields SHOULD NOT be omitted from the output, but be set to
+`null` if the value is a custom type represented by a JSON string, or the
+neutral element of the JSON type otherwise.
+
+Unless otherwise noted, JSON arrays SHALL be treated as sets.
+
+Where JSON data is signed inline, it is wrapped in an object:
+
+[source,subs="+macros"]
+----
+{
+ "signed": <<OBJECT>>,
+ "signatures": {
+ <<KEYID>>: <<SIGNATURE>>,
+ ...
+ }
+}
+----
+
+[[OBJECT]]OBJECT::
+ A JSON object. Its canonical form is obtained as per <<Canonical-JSON>>.
+
+[[KEYID]]KEYID::
+ The identifier of the key signing the OBJECT, which is the SHA-256 hash of
+ the canonical form of the key, in hexadecimal.
+
+[[SIGNATURE]]SIGNATURE::
+ The hex-encoded signature of the SHA-512 hash of the canonical form of
+ OBJECT.
+
+=== Common Types
+
+[[BLOB_HASH]]BLOB_HASH::
+ Hash of the payload `p`, as if created by <<git-hash-object>>. That is, for
+ a hash algorithm `H`:
++
+[source]
+----
+H('blob ' || LEN(p) || NUL || p)
+----
+
+
+[[CONTENT_HASH]]CONTENT_HASH::
+ Dictionary of both the SHA-1 and SHA-256 <<BLOB_HASH>> of the referenced
+ object{fn-content-hash}:
++
+[source,subs="+macros"]
+----
+{
+ "sha1": <<BLOB_HASH>>,
+ "sha2": <<BLOB_HASH>>
+}
+----
+
+
+[[DATETIME]]DATETIME::
+ Date-time string in <<RFC3339>> format, e.g. "`2022-08-23T14:48:00Z`".
+
+[[OBJECT_ID]]OBJECT_ID::
+ Hexadecimal git object id.
+
+[[SPEC_VERSION]]SPEC_VERSION::
+ Version of this specification in "`dotted triple`" format, currently
+ {revnumber}. The semantics loosely follows the <<semver,"Semantic
+ Versioning">> convention, but gives no significance to leading zeroes.
+
+[[URL]]URL::
+ A URL as per the <<WHATWG-URL, WHATWG specification>>.
+
+[[VARCHAR]]VARCHAR(N)::
+ A UTF-8 encoded string of at most length `N` (in bytes).
+
+
+== Identities
+
+Like most decentralised systems, _it_ relies on public key cryptography to
+ensure authenticity of data. In order to manage and distribute public keys, _it_
+defines a <<id-metadata,simple, JSON-based format>> which can conveniently be
+stored in git.
+
+The subject of an _it_ identity is not inherently a human, it could just as well
+be a machine user such as a CI- or merge bot, or a group of users extending
+ultimate trust to each other. Consequently, it should not be assumed that
+ownership of the keys constituting the identity lies with a single actor in the
+system. It is, however, illegal to reuse keys for multiple identities within the
+same context.
+
+The context of an identity is generally a <<Drops,drop>>. Thus, a subject may
+create as many identities as they see fit (provided keys are not reused).
+Conversely, the `*custom*` attribute of an <<id-json,id.json>> document permits
+to associate an _it_ identity with external methods certifying the subject's
+_persona_, such as custodial identity providers or <<DID>> controllers (for
+example by embedding a DID document in the `*custom*` section).
+
+In general, _it_ does not specify how trust is initially established in an
+identity.
+
+Identities in _it_ are self-certifying, in that introduction or revocation of
+keys are signed by a threshold of the specified keys themselves. A threshold
+greater than one reduces the probability of identity compromise, even if a
+subset of its keys is compromised. For usability reasons, owners of personal
+identities may want to set the `threshold` to `2` and carry a certification key
+on a portable device.
+
+For practical reasons, it is RECOMMENDED for implementations to use the widely
+deployed <<OpenSSH>> suite for signing purposes, including for git commits.
+Verification of SSH-signed git commits (available since git version 2.34) MUST
+be supported. Via the <<ssh-agent>> protocol, alternative tooling is not
+precluded. All key algorithms and signature schemes supported by OpenSSH MUST be
+supported by _it_ implementations. To make it easy for users to visually match
+output from OpenSSH with <<id-json,id.json>> documents, <<KEY,keys>> are encoded
+in the format used by OpenSSH.
+
+Additional key algorithms, signature schemes or public key encodings may be
+introduced in the future.
+
+[#id-metadata]
+=== Metadata
+
+Identity information is stored in a JSON file, conventionally named `id.json`.
+The file's contents can be amended using a threshold signature scheme, and
+revisions are hash-linked to their predecessors.
+
+The `*signed*` portion of the `id.json` file is defined as follows:
+
+[source#id-json,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/identity",
+ "spec_version": <<SPEC_VERSION>>,
+ "prev": <<CONTENT_HASH>> | null,
+ "keys": [
+ <<KEY>>,
+ ...
+ ],
+ "threshold": <<THRESHOLD>>,
+ "mirrors": [
+ <<URL>>,
+ ...
+ ],
+ "expires": <<DATETIME>> | null,
+ "custom": <<CUSTOM>>
+}
+----
+
+[[KEY]]KEY::
+ Public key in SSH encoding, specified in <<RFC4253>>, <<RFC5656>> and
+ <<RFC8709>>. The comment or label part after the base64-encoded key SHOULD
+ be omitted in the document.
++
+Example:
++
+----
+ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDtt6XEdNVInhiKkX+ccN++Bk8kccdP6SeBPg0Aq8XFo
+----
+
+[[THRESHOLD]]THRESHOLD::
+ An integer number of keys whose signatures are required in order to consider
+ the identity metadata to be properly signed. Must be between 1 and the
+ number of `*keys*` in the metadata file.
+
+[#id-verification]
+=== Verification
+
+Verification of an identity history proceeds as follows:
+
+[.lst]
+ . Load the latest known <<id-json,id.json>> metadata
+
+ . If the `*expires*` attribute is not `null`, check that the specified
+ <<DATETIME>> does not lie in the past. Otherwise, abort and report an
+ error.
+
+ . Verify that at least `*threshold*` of `*keys*` have provided valid
+ signatures
+
+ . If `*prev*` is not `null`, load the corresponding previous revision of the
+ metadata
+
+ . Verify that at least `*threshold*` of `*keys*` of the _previous_ revision
+ have provided valid signatures over the _current_ revision
+
+ . Repeat steps 4. and 5. until `*prev*` is `null`
+
+ . [[IDENTITY_ID]]Compute the SHA-256 hash over the canonical form of the
+ initial revision. This is the *_identity id_*.
+
+ . If a particular identity id was expected, check that it matches the
+ computed one
+
+
+== Patches
+
+A source code patch is traditionally a differential between source code files.
+In practice, however, such diffs are seldomly exchanged without additional
+context, usually prose describing and motivating the change.
+
+During the process of accepting a patch into the mainline history of a project,
+collaborators may leave comments on the original submission, reference points
+may be annotated ("`tagged`"), and revised versions of the patch may be
+submitted. The degree to which this process is formalised varies between
+projects, as does the preference for capturing it in formal datastructures such
+as ticketing systems. A common property of all these different contributions to
+a code base is that they can be seen as state transitions, where the git commit
+chain helpfully provides a way to establish a partial ordering.
+
+_it_ seeks to unify all kinds of contributions into a single exchange format, a
+<<Bundles,bundle>>, which is already native to git. The semantics of a bundle,
+apart from causal ordering, is defined by its contents, which makes the format
+amenable for future extensions.
+
+In that sense, _it_ aspirationally uses the term "`patch`" in the generalised
+way described by theoretical work such as <<Darcs>>, <<CaPT>>, and <<HoPT>>.
+When describing the more mundane processing procedures, the term "`patch
+bundle`" is also used, alluding to the container format.
+
+=== Bundles
+
+A patch bundle is a <<gitformat-bundle,git bundle>> of either version supported
+by git (v2 or v3). If v3 is used, only the `object-format` capability is
+recognised, specifying an object `filter` is illegal.
+
+For compatibility with git, prerequisite object ids MUST refer to commit
+objects, even though the format specification permits any object type.
+
+The pack data of the bundle MUST NOT contain objects unreachable from the tips
+specified in the bundle header.
+
+[NOTE]
+====
+Enforcing this rule on the receiving end of a patch bundle may not be practical
+in some circumstances. Unreachable objects will automatically be purged if and
+when <<snapshots,snapshots>> are taken (which imply repacking), but it is worth
+noting that there might be security implications of redistributing patch bundles
+which have not been verified to adhere to this rule, as it is possible to
+"`hide`" arbitrary objects in the bundle.
+====
+
+The bundle references may contain zero or more branches, tags or notes. A
+<<Topics,topic>> ref MUST be present. If <<Identities,identities>> need to be
+added or updated, zero or more `ids` refs may be present whose target either
+resolves directly to an updated <<id-json,id.json>>, or is peelable{fn-peel} to
+a tree containing the updated document in a blob named `id.json` at the root.
+
+Where more than one occurrence is permissible, the receiver MAY limit the total
+number of occurrences (see also <<drop-validation>>).
+
+More formally, the permissible references are (in ABNF notation):
+
+[source#bundle-refs,abnf,subs="+macros"]
+----
+refname = topic / *identity / *branch / *tag / *note
+
+topic = "refs/it/topics/" <<TOPIC_ID>>
+identity = "refs/it/ids/" <<IDENTITY_ID>>
+branch = "refs/heads/" name
+tag = "refs/tags/" name
+note = "refs/notes/" name
+----
+
+[[TOPIC_ID]]TOPIC_ID::
+ SHA-256 hash, in hexadecimal. The preimage is opaque to _it_, but should be
+ chosen by the initiator of a <<Topics,topic>> such that the probability of
+ collisions with independently initiated topics is very low (for example the
+ contents of the initial message combined with a random nonce).
+
+
+The pack data section of a bundle MAY be encrypted using either <<age>> or GPG.
+
+=== Topics
+
+A topic is conceptually similar to a mailing list thread or structured data such
+as a "`Pull Request`", in that it groups together related information. The
+<<TOPIC_ID,stable identifier>> of a topic is a SHA-256 hash, the preimage of
+which is opaque to _it_.
+
+A patch bundle MUST contain a topic commit history (`refs/it/topics/...`)
+containing objects which represent _interactions_ such as free-form comments,
+code review annotations, attestations ("`signoffs`") or results from CI services.
+The set of all histories referring to the same topic identifier forms a directed
+acyclic graph (DAG), usually a tree, yielding a partial order of topic entries.
+
+If topic entries form a <<CRDT>>, sophisticated "`mutable`" state objects can be
+constructed, resembling concepts commonly managed in a centralised fashion such
+as "`Issues`", "`Task trackers`" or automated merge queues. However, not all
+workflows require this level of sophistication (namely the ability to change
+state collaboratively), and traversing a DAG of semi-structured, easily
+parseable data in topological order is sufficient. Examples of this include
+mailing-list style conversations or archives of external communication systems.
+
+Hence, _it_ mandates that topic histories can have one of two types: message
+based or CRDT based.
+
+[.lst]
+ - [[message-topic]]*Message based* topics consist of a single JSON object
+ per commit, found in a file named `*m*` at the root of the commit's tree.
+ A message based topic is represented by its commit graph.
+
+ - [[crdt-topic]]*CRDT based* topics consist of a single
+ <<automerge-change,change object>> per commit, found in a file named `*c*`
+ at the root of the commit's tree. CRDT based topics are represented by a
+ single object, to which changes are applied in the topological order of
+ the commit graph.
+
+[NOTE]
+====
+The <<Automerge>> CRDT is chosen for its generality. Future versions of this
+document may allow for other CRDTs to be used.
+
+The exact encoding of Automerge changes for use with _it_ is still under
+consideration. Since binary operation payloads are likely to be undesirable for
+the intended use, it may be preferable to define a textual encoding (such as
+JSON), which would make the stored data easier to inspect without specialised
+tooling.
+====
+
+Changing the type of a topic is illegal, and should result in the offending
+patch to be rejected, or be omitted during topic traversal.
+
+In both paradigms, authenticity of authorship is inferred from the cryptographic
+signature of the individual commits. Dependencies, respectively reply-to
+relationships, are expressed as commit parents.
+
+Note that no type or schema information is imposed. It is up to the client to
+interpret the data payload, and potentially omit unknown entries from the
+output.
+
+[#patch-equivalence]
+=== Equivalence
+
+
+Depending on context, two patch bundles are considered equivalent if:
+
+[.lst]
+- The set of bundle reference targets is equal
++
+This means that the bundles logically carry the same information, which is
+preserved even if repacked (e.g. when <<snapshots,snapshots>> are used). This
+equivalence is captured in the <<BUNDLE_HEADS>> value, which is the value a
+patch submitter signs and which determines whether a patch has been received
+before by a <<Drops,drop>>.
+
+- The union of the reference targets and prerequisite objects is equal
++
+When applied to an existing object database, the packfiles require the same
+objects to be present, and result in the same (reachable) state afterwards, and
+so are for practical purposes "`the same`". +
++
+However, packfile generation is not formally guaranteed to be deterministic
+across git implementations or -versions, namely the order of entries in the
+file. For long-term storage, patch bundles are thus referred to by their
+<<BUNDLE_HASH>>.
+
+- Or, the exact file contents are equal
++
+When downloading bundles from untrusted sources, or from content-addressable
+storage, the checksum of the exact contents should be verified. This information
+is preserved as the <<BUNDLE_CHECKSUM>>.
+
+
+== Drops
+
+A _drop_ (as in _-box_ or _deadletter-_) is a hash-linked log which timestamps
+the reception of <<Patches,patches>>. In git terms, it is a history of
+(single-parent) commits, where integrity is ensured through git itself. To add
+authenticity, drops carry additional <<drop-metadata,metadata>> which is secured
+using a scheme based on <<TUF,The Update Framework Specification (TUF)>>.
+
+A drop also carries all <<Identities,identities>> needed to verify cryptographic
+signatures on metadata, <<Patches,patches>>, and optionally git
+commits{fn-commit-signing}, thus forming a PKI. Identities are themselves
+updated through patches.
+
+Importantly, the drop history does _not_ carry the patch payload itself. Patch
+bundles are kept and redistributed as received, and so can make heavy use of
+content distribution networks. At the same time, the drop history itself remains
+fairly small even if not delta-encoded. Together, this allows to operate even
+public drops on relatively constrained hardware.
+
+A drop is a strictly _local-first_ concept -- the drop history may never leave a
+single machine. In order to be able to accept patch proposals, however, a drop
+may make itself _externally addressable_, for example by exposing an HTTP API
+(see <<HTTP API>>).
+
+It is important to note that drop histories, even if they logically describe the
+same project, are not in principle required to converge. In git terms, this
+means that two drop histories may refer to the same set of patch bundles, but
+differ in the ordering of the commits (or other parameters which change the
+commit identity). Conversely, the respective sets of patch bundles may also be
+distinct, to the extent permitted by the connectivity requirement (see
+<<record-patch>>).
+
+An exception to this rule are *mirrors*, whose network addresses are published
+as part of the drop <<mirrors-json,metadata>>: the addresses listed therein are
+interchangeable, i.e. obtaining the drop history from any of them MUST result in
+the exact same state.
+
+[#aggregation]
+Instead of or in addition to exposing a public means of patch submission, drops
+may *aggregate* patches from other drops. That is, they may follow other drops
+just like a normal git remote, and apply patch records to their own history. By
+specifying <<alternates-json,alternates>> in the metadata, a drop promises to
+aggregate submissions from those locations. Aggregation is, however, not limited
+to published alternates: for example, a contributor may maintain their own
+private drop recording only the patches created by that contributor. Another
+drop for the same project may be made aware of a mirror URL for that private
+drop, and update itself from there periodically.
+
+[#drop-metadata]
+=== Metadata
+
+The authenticity of drops is ensured by a trust delegation scheme derived from
+<<TUF>>. There, a role-based threshold signature scheme is used to prove
+authenticity of updates to certain parts of an abstract "`repository`",
+including the metadata containing the trust delegations itself.
+
+For our purposes, some of the properties of a "`repository`" are upheld by git
+itself, while other roles are specific to _it_. There are four roles defined for
+_it_ drops:
+
+[.lst]
+ . Root role
+ . Snapshot role
+ . Mirrors role
+ . Branch roles
+
+Like in TUF, the mirrors role is optional. Also like TUF, we note that it is
+possible to instantiate a drop with a single <<Identities,identity>> (and even
+with a single key) -- which is not considered to be secure, but may be
+convenient in some scenarios.
+
+Root role::
+
+The root role delegates trust to specific <<Identities,identities>> trusted for
+all other roles, by means of being eligible to sign updates to the
+<<drop-json,drop.json>> metadata file.
++
+Delegating to identities instead of directly to keys permits to rotate the
+respective keys independently, thus weakening the requirement for air-gapped
+storage of all root keys.
+
+Snapshot role::
+
+The snapshot role permits signing commits onto the drop history.
++
+This applies mainly to new <<record-json,records>>, but note that it may also
+include updates to the metadata files, yet does not render those updates valid
+as their signatures are verified independently.
++
+The snapshot role is typically granted to machine users on public drop servers.
++
+Snapshot signatures are regular git commit signatures. Pending a practical
+method to obtain multiple signatures on a git commit, `*threshold*` values other
+than `1` are not currently supported.
+
+Mirrors role::
+
+The mirrors role permits signing the <<mirrors-json,mirrors.json>> and
+<<alternates-json,alternates.json>> metadata files.
++
+This role is optional, as not all drop operators may find it practical or useful
+to publish signed mirrors/alternates lists.
+
+[[branch-roles]]Branch roles::
+
+Branch roles are keyed by concrete reference names, which the listed
+<<Identities,identities>> are trusted to update (see <<Mergepoints>>).
+
+
+The metadata files establishing the scheme are described in the following
+sections.
+
+[#drop-json]
+==== `drop.json`
+
+The `drop.json` metadata file is signed by the root role and indicates which
+<<Identities,identities>> are authorised for all roles, including the root role
+itself.
+
+The `*signed*` portion the `drop.json` metadata file is defined as follows:
+
+[source,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/drop",
+ "spec_version": <<SPEC_VERSION>>,
+ "description": <<DESCRIPTION>>,
+ "prev": <<CONTENT_HASH>> | null,
+ "roles": {
+ "root": <<ROLE>>,
+ "snapshot": <<ROLE>>,
+ "mirrors": <<ROLE>>,
+ "branches": {
+ <<REFNAME>>: <<ANNOTATED_ROLE>>,
+ ...
+ }
+ },
+ "custom": <<CUSTOM>>
+}
+----
+
+[[ANNOTATED_ROLE]]ANNOTATED_ROLE::
+ Like a <<ROLE>>, but with an additional field `*description*` of type
+ <<DESCRIPTION>>.
++
+[source#annotated-role,subs="+macros"]
+----
+{
+ "ids": [
+ <<IDENTITY_ID>>,
+ ...
+ ],
+ "threshold": <<THRESHOLD>>,
+ "description": <<DESCRIPTION>>
+}
+----
+
+[[CUSTOM]]CUSTOM::
+ An arbitrary JSON object carrying user-defined data. To avoid conflicts, it
+ is RECOMMENDED to key custom objects by a URL-like identifier. For example:
++
+[source#example-custom]
+----
+{
+ "custom": {
+ "eagain.io/it/emojicoin": {
+ "insert-here": "lol1u2vgx76adff"
+ }
+ }
+}
+----
+
+[[DESCRIPTION]]DESCRIPTION::
+ A UTF-8 string with a maximum length of 128 bytes, i.e. a
+ <<VARCHAR,VARCHAR(128)>>.
+
+[[REFNAME]]REFNAME::
+ A full git refname (i.e. starting with "`refs/`"), well-formed as per
+ <<git-check-ref-format>>.
+
+[[ROLE]]ROLE::
+ Dictionary of a set of <<IDENTITY_ID,identity ids>> assigned to that role,
+ paired with a <<THRESHOLD,threshold>>. I.e.:
++
+[source#role,subs="+macros"]
+----
+{
+ "ids": [
+ <<IDENTITY_ID>>,
+ ...
+ ],
+ "threshold": <<THRESHOLD>>
+}
+----
++
+Example:
++
+[source#example-role,json]
+----
+{
+ "ids": [
+ "671e27d4cce92f747106c7da90bcc2be7072909afa304d008eb8ecbfdebfbfe2"
+ ],
+ "threshold": 1
+}
+----
+
+[#mirrors-json]
+==== `mirrors.json`
+
+The `mirrors.json` file is signed by the mirrors role. It describes known
+network addresses of read-only copies of the drop, believed to be kept in-sync
+with the drop within a reasonable time window by its operators.
+
+The `*signed*` portion of the `mirrors.json` file is defined as follows:
+
+[source,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/mirrors",
+ "spec_version": <<SPEC_VERSION>>,
+ "mirrors": [
+ <<MIRROR>>,
+ ...
+ ],
+ "expires": <<DATETIME>> | null
+}
+----
+
+[[MIRROR]]MIRROR::
+ A dictionary describing a mirror.
++
+[source#mirror,subs="+macros"]
+----
+{
+ "url": <<URL>>,
+ "kind": <<MIRROR_KIND>>,
+ "custom": <<CUSTOM>>
+}
+----
+
+[[MIRROR_KIND]]MIRROR_KIND::
+ Hint at what retrieval method is offered by the mirror. Unknown values MUST
+ be accepted during parsing and signature verification. Defined values are:
+
+[.lst]
+ - *bundled*: the mirror is expected to serve patch bundles at the well-known
+ <<http-fetch-bundle,HTTP>> endpoint relative to `*url*`, if `*url*`
+ denotes a HTTP URL
+ - *packed*: the mirror is a plain git server, but the client may reify
+ bundles by requesting the appropriate objects over the regular git network
+ protocol
+ - *sparse*: the mirror does not host bundle data at all, only the drop
+ history. This can be useful in constrained environments such as
+ peer-to-peer storage if (and only if) the <<record-json,record.json>>
+ entries specify stable bundle URIs.
+
+
+[#alternates-json]
+==== `alternates.json`
+
+The `alternates.json` file is signed by the mirrors role. It describes known
+network addresses of writeable (e.g. via <<HTTP API,HTTP>>) drops where
+<<Patches,patches>> pertaining the same project may be submitted. The method of
+submission is described by the alternate's URL. A drop publishing an
+`alternates.json` file implicitly promises to <<aggregation,aggregate>> patches
+from the alternates listed, although it is free to do so only selectively.
+
+The `*signed*` portion of the `alternates.json` file is defined as follows:
+
+[source,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/alternates",
+ "spec_version": <<SPEC_VERSION>>,
+ "alternates": [
+ <<URL>>,
+ ...
+ ],
+ "custom": <<CUSTOM>>,
+ "expires": <<DATETIME>> | null
+}
+----
+
+[#drop-verification]
+=== Verification
+
+To verify a drop, the <<drop-json,drop.json>> metadata file must be verified
+first:
+
+[.lst]
+ . From the latest known commit of the drop history, load the
+ <<drop-json,drop.json>> file
+
+ . For each <<IDENTITY_ID,identity id>> in the `*root*` role of the file,
+ resolve the corresponding <<Identities,identity>> and
+ <<id-verification,verify>> it{fn-resolve-id}
+
+ . Verify that no key is being referenced by more than one identity
+
+ . Verify that the <<drop-json,drop.json>> file is signed by a threshold of
+ identities as specified in the `*threshold*` attribute of the `*root*`
+ role. Signatures by multiple keys from the same identity are allowed, but
+ don't count toward the threshold.
+
+ . If `*prev*` is not `null`, load the corresponding previous revision of the
+ metadata
+
+ . Verify that the threshold specified in the _previous_ revision is met on
+ the _current_ revision, loading and verifying additional identities as
+ needed
+
+ . Repeat steps 5. and 6. until `*prev*` is `null`
+
+Having obtained a verified <<drop-json,drop.json>> metadata file, it can now be
+verified that the head commit of the drop history is signed by a key belonging
+to an identity which is assigned the `*snapshot*` role.
+
+If a <<mirrors-json,mirrors.json>> and/or <<alternates-json,alternates.json>> is
+present in the head commit's tree, it should be verified as follows:
+
+[.lst]
+ . Load the metadata file
+
+ . If the `*expires*` attribute is not `null`, check that the specified
+ <<DATETIME>> does not lie in the past
+
+ . For each <<IDENTITY_ID,identity id>> in the `*mirrors*` role of the
+ <<drop-json,drop.json>> file, resolve the corresponding
+ <<Identities,identity>> and <<id-verification,verify>> it{fn-resolve-id}
+
+ . Verify that the metadata file is signed by a threshold of identities as
+ specified in the `*threshold*` attribute of the `*mirrors*` role.
+ Signatures by multiple keys from the same identity are allowed, but don't
+ count toward the threshold.
+
+Verification of mirror- and alternates-lists MAY be deferred until they are
+actually used. Failure to verify <<mirrors-json,mirrors.json>> or
+<<alternates-json,alternates.json>> does not render the drop metadata invalid.
+
+[#history-repr]
+=== History representation
+
+A drop history is stored as a git commit history. Initially, it contains only
+the metadata, organised in a tree with the following layout:
+
+.Drop metadata tree
+[source#drop-tree,subs="+macros"]
+----
+.
+|-- <<drop-json,drop.json>>
+|-- <<mirrors-json,mirrors.json>>
+|-- <<alternates-json,alternates.json>>
+`-- ids
+ |-- <<IDENTITY_ID,identity-id>>
+ | `-- <<id-json,id.json>>
+ `-- ...
+----
+
+[NOTE]
+====
+In this document, tree entries are ordered for legibility, which is not
+necessarily how they are ordered by git.
+====
+
+In <<drop-tree>>, the `mirrors.json` and `alternates.json` files are
+optional. The `ids` hierarchy contains at least all <<Identities,identities>>
+needed to verify the metadata files, where the `id.json` file represents the
+most recent revision of the identity. It is up to the implementation how to make
+previous revisions available, although most are expected to opt for a "`folded`"
+representation where previous revisions are stored as files in a subdirectory.
+
+A commit which updates metadata files may carry a free-form commit message. Data
+created by a previous patch commit SHOULD be removed from the tree.
+
+To <<record-patch,record a patch>>, the <<record-json,record.json>> is written
+to the tree adjacent to the other metadata files. If the patch contains identity
+updates, the `ids` subtree is updated accordingly.
+
+The patch <<Topics,topic>> is written as a <<git-interpret-trailers,trailer>>
+keyed "`Re:`", as shown in <<example-topic-commit>>. This allows to collect
+patches for a particular topic from the drop history without having to access
+objects deeper than the commit.
+
+
+.Simplified topic commit
+[#example-topic-commit]
+----
+commit ccd1fd5736bed6fb6342e34c9d8cbc2b9db7f326
+Author: Kim Altintop <kim@eagain.io>
+Date: Mon Dec 12 10:47:32 2022 +0100
+
+ Re: 1fdc53e27b01b440839ff1b6c14ef81c3d63d0f2b39aae8fb4abd0b565ea0b10
+----
+
+Lastly, the <<BUNDLE_HEADS>> (cf. <<patch-equivalence>>) are written to a file
+`heads` adjacent to the `record.json` file in the tree. Provided appropriate
+atomicity measures, this provides a reasonably efficient way to determine if a
+patch has been received before by simply probing the object database for
+existence of the corresponding <<BLOB_HASH>>.
+
+==== Location-independent storage
+
+Since the drop history only stores metadata, it should be suitable for
+location-independent storage inheriting some of git's data model, e.g. <<IPFS>>,
+<<Hypercore>>, or <<SSB>>. Those systems come with their own limitations,
+perhaps the most severe one in our context being the lack of a reliable and
+efficient way to propagate contributions from _unknown_ identities back to the
+root drop. Thus, exact mappings are deferred to a future revision of this
+document.
+
+We note, however, that distributing <<gitformat-bundle,git bundle>> snapshots of
+the drop history itself over protocols which support some form of name
+resolution (such as <<IPNS>>) may present an attractive bandwidth-sharing
+mechanism.
+
+
+[#record-patch]
+=== Recording patches
+
+Once a patch has passed <<drop-validation,validation>>, its reception is
+recorded in the drop history as a file containing metadata about the patch. The
+file's schema may be extended over time, where the currently defined properties
+are:
+
+.`record.json`
+[source#record-json,subs="+macros"]
+----
+{
+ "bundle": {
+ "len": <<BUNDLE_SIZE>>,
+ "hash": <<BUNDLE_HASH>>,
+ "checksum": <<BUNDLE_CHECKSUM>>,
+ "prerequisites": [
+ <<OBJECT_ID>>,
+ ...
+ ],
+ "references": {
+ <<REFNAME>>: <<OBJECT_ID>>,
+ ...
+ },
+ "encryption": "age" | "gpg",
+ "uris": [
+ <<URL>>,
+ ...
+ ]
+ },
+ "signature": {
+ "signer": <<CONTENT_HASH>>,
+ "signature": <<SIGNATURE>>,
+ }
+}
+----
+
+[[BUNDLE_SIZE]]BUNDLE_SIZE::
+ Size in bytes of the bundle file as received.
+
+[[BUNDLE_HASH]]BUNDLE_HASH::
+ SHA-256 hash over the sorted set of object ids (in bytes) referenced by the
+ bundles, i.e. both the prerequisites and reference heads.
+
+[[BUNDLE_CHECKSUM]]BUNDLE_CHECKSUM::
+ SHA-256 hash over the bundle file as received.
+
+[[BUNDLE_SIGNATURE]]BUNDLE_SIGNATURE::
+ Signature over the <<BUNDLE_HEADS>>, in hexadecimal.
+
+[[BUNDLE_HEADS]]BUNDLE_HEADS::
+ SHA-256 hash over the sorted set of object ids (in bytes) or the reference
+ heads of the bundle (i.e. without the prerequisites).
+
+The `*signature*` field captures the signature made by the submitter of the
+patch. Multiple signatures may be supported in a future revision of this
+document.
+
+The `*uris*` field enumerates alternate network addresses from which the bundle
+file may be downloaded. Since the recorded information is immutable, this is
+mainly intended for content-based addresses, such as IPFS CIDs.
+
+Additionally, the drop will want to record the hashed reference heads in an
+efficiently retrievable form, such that it can be quickly determined if a patch
+has been received before (see <<patch-equivalence>>, <<history-repr>>).
+Similarly for the patch <<Topics,topic>>.
+
+
+[#drop-validation]
+==== Validation
+
+Accepting a patch for inclusion in the drop history is subject to validation
+rules, some of which depend on preferences or policies. A public drop server
+will want to apply stricter rules before accepting a patch than a user who is
+applying a patch to a local (unpublished) drop history.
+
+The *mandatory* validations are:
+
+[.lst]
+ . The bundle file MUST be available locally before creating a log entry
+ . The bundle MUST be connected, i.e. its prerequisite objects must be
+ present in bundles received prior to the one under consideration
+ . The bundle MUST NOT have been received before (cf. <<patch-equivalence>>)
+ . The bundle MUST conform to the conventions specified in <<Patches>>
+ . The bundle MUST be signed and the signer's (i.e. submitter's)
+ <<Identities,identity>> resolvable, either from the drop state or the
+ bundle contents (or both)
+ . If the bundle contains identity updates, they MUST pass
+ <<id-verification,verification>> and MUST NOT diverge from their
+ previously recorded history (if any)
+
+[NOTE]
+====
+Validation 5. entails that a patch submission message must carry the
+<<CONTENT_HASH>> of the submitter's identity head revision.
+====
+
+Additional RECOMMENDED validations include:
+
+[.lst]
+ - restricting the size in bytes of the patch bundle
+ - restricting the number of references a bundle can convey
+ - restricting the number of commits, or total number of objects a bundle can
+ contain
+ - rejecting patches whose <<Topics,topic>> is not properly signed by the
+ submitter, does not cleanly apply to a merged history of previously
+ received patches on the same topic, or contains otherwise invalid data
+
+Beyond that, a drop may also decide to reject a patch if it is encrypted, or if
+its contents do not pass content analysis proper (e.g. Bayesian filtering).
+
+
+[#snapshots]
+=== Snapshots
+
+Over time, a drop will accumulate many small patch bundles. Repacking them into
+larger bundles is likely to reclaim storage space by means of offering more
+opportunities for delta compression. It can also be beneficial for data
+synchronisation (especially non-incremental) to avoid too many network
+roundtrips.
+
+In principle, a drop could employ a dynamic repacking scheme, and either serve
+larger than requested data when individual bundles are requested, or offer a way
+to dynamically discover snapshotted alternatives via the bundle-uri negotiation
+mechanism (see <<http-fetch-bundle>>). This would, however, preclude drops which
+delegate bundle storage entirely (such as packed or sparse
+<<MIRROR_KIND,mirrors>>) from benefiting from this optimisation. Therefore, we
+define a convention for publishing snapshots as patches on the drop itself.
+
+A snapshot is a <<Patches,patch>> posted to the well-known topic
+`SHA256("snapshots")`, i.e.:
+
+[source]
+----
+2b36a6e663158ffd942c174de74dbe163bfdb1b18f6d0ffc647e00647abca9bb
+----
+
+A snapshot bundle may either capture the entire history of the drop, or depend
+on an earlier snapshot. The bundle references capture all references of the
+patch bundles received prior to the snapshot, up until the previous snapshot if
+the snapshot is incremental. In order to be unique within the snapshot bundle,
+the patch bundle references are rewritten as follows:
+
+[.lst]
+ . Strip the `refs/` prefix
+ . Prepend `refs/it/bundles/<<BUNDLE_HEADS>>/`
+
+For example:
+
+[source#example-snapshot-refs]
+----
+refs/it/bundles/107e80b2287bc763d7a64bee9bc4401e12778c55925265255d4f2a38296262b8/heads/main 77ce512aa813988bdca54fa2ba5754f3a46c71f3
+refs/it/bundles/107e80b2287bc763d7a64bee9bc4401e12778c55925265255d4f2a38296262b8/it/topics/c44c20434bfdaa0384b67d48d6c3bb36d755b87576027671f606c404b09d9774 65cdd5234e310efc1cb0afbc7de0a2786e6dd582
+----
+
+The payload of the <<Topics,topic>> entry associated with a snapshot is not
+defined normatively. It is RECOMMENDED to use a <<message-topic,message based
+topic>>, where a payload schema could be:
+
+[source#snapshot-topic-payload,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/notes/checkpoint",
+ "kind": "snapshot",
+ "refs": {
+ <<REFNAME>>: <<OBJECT_ID>>,
+ ...
+ }
+}
+----
+
+Taking a snapshot implies privileged access to the drop repository, and can only
+be submitted by the snapshot role.
+
+After publishing a snapshot, a drop MAY prune patch bundles recorded prior to
+the snapshot, possibly after a grace period (for example, by only pruning
+bundles older than the N-1st snapshot). When synchronising with a drop, clients
+which encounter a snapshot record should thus prefer fetching only snapshots
+from this point on in the drop history.
+
+
+[#mergepoints]
+=== Mergepoints
+
+It is often useful for a drop to convey cryptographically verifiable reference
+points for contributors to base source code changes on, i.e. long-running
+branches.
+
+While the process of agreeing on what changes are to be finalised into such
+branches can vary widely between projects, and could even involve the evaluation
+of <<crdt-topic,CRDT state>>, the final statement can be reduced to restricting
+the set of allowed signers of a patch bundle (which updates a certain set of
+branches). This is what the <<branch-roles,branch roles>> in the
+<<drop-json,drop.json>> metadata file are for: they make certain
+<<Identities,identities>> eligible for submitting _mergepoints_ affecting named
+long-running branches.
+
+A mergepoint is a <<Patches,patch>> posted to the well-known topic
+`SHA256("merges")`, i.e.:
+
+[source]
+----
+c44c20434bfdaa0384b67d48d6c3bb36d755b87576027671f606c404b09d9774
+----
+
+A mergepoint bundle may contain one or more references matching exactly the
+names specified in the drop's <<branch-roles,branch roles>>, and MUST only be
+accepted if the submitter(s) identities are allowed as per the role definition.
+
+As with <<snapshots,snapshots>>, the topic payload is not defined normatively.
+It is RECOMMENDED to use <<message-topic,message based topic>>, where a payload
+schema could be:
+
+[source#mergepoint-topic-payload,subs="+macros"]
+----
+{
+ "_type": "eagain.io/it/notes/checkpoint",
+ "kind": "merge",
+ "refs: {
+ <<REFNAME>>: <<OBJECT_ID>>,
+ ...
+ }
+}
+----
+
+Upon encountering a mergepoint properly signed by the applicable branch roles, a
+client may update the targets of a local representation of the mergepoint
+references _iff_ the local targets are in the ancestry path of the mergepoint
+targets.
+
+
+=== HTTP API
+
+<<Drops,Drops>> MAY expose an HTTP API for accepting and serving patch bundles.
+Drops listed as alternates in the drop <<alternates-json,metadata>> MUST conform
+to this API (endpoint paths are interpreted as relative to the alternate URL).
+The defined endpoints of the API are as follows:
+
+[#http-fetch-bundle]
+==== Fetching patch bundles
+
+---
+
+[source,subs="+macros"]
+----
+GET /bundles/<<BUNDLE_HASH,bundle-hash>>[.bundle|.uris]
+----
+
+---
+
+Without a file extension suffix, this endpoint conforms to the git
+<<bundle-uri>> specification: the server may either respond by sending the
+bundle file identified by <<BUNDLE_HASH,bundle-hash>>, or a bundle list.
+
+When responding with a bundle list:
+
+[.lst]
+ - `mode` MUST be `any`
+ - `<id>` segments MUST be treated as opaque by the client
+ - entries specifying a `filter` MUST be ignored by the client
+
+In addition to regular `uri` values (relative, `http://`, `https://`), `ipfs://`
+URLs are accepted. If encountered, a client MAY rewrite them to
+<<IPFS-GATEWAY,gateway URLs>> to fetch the bundle from.
+
+By specifying the `.bundle` suffix, a client instructs the server to either
+respond with the bundle file, or a 404 status, but never with a bundle list.
+Correspondingly, by specifying `.uris`, the server MUST respond with a bundle
+list, or a 404 status, but never with a bundle file.
+
+.Example bundle list
+[source]
+----
+[bundle]
+ version = 1
+ mode = any
+ heuristic = creationToken
+
+[bundle "8aea1a1c20b09ed9ad4737adc6319203d65a0026ac86873f84f7961bd42f132c"]
+ uri = /bundles/6c4d3d4e4db8e37c698f891e94780c63e1b94f87c67925cd30163915c7d7923e.bundle
+
+[bundle "816dc1231cb1b82a91144ebb9e325c3655f4b4da30f806a84fa86fdb06ca9c04"]
+ uri = https://it.example.com/bundles/6c4d3d4e4db8e37c698f891e94780c63e1b94f87c67925cd30163915c7d7923e.bundle
+ creationToken = 1670838467
+
+[bundle "f4ecc80c9339ecdbc2a8f4c0d19e990f8ee9631e6b7f3e044b86c35fe69505d3"]
+ uri = ipfs://QmVTw4vVFWkPBN6ZT7To4BHoNBfaBNjVJ17wK15tci6bn1
+ creationToken = 1670839391
+----
+
+
+[#http-submit-patch]
+==== Submitting patches
+
+---
+
+[source,subs="+macros"]
+----
+POST /patches
+<<HEADER_SIGNATURE>>
+----
+
+---
+[[HEADER_SIGNATURE]]HEADER_SIGNATURE::
+ A <<BUNDLE_SIGNATURE>> and corresponding identity <<CONTENT_HASH>>, encoded
+ suitable for use as a HTTP header value:
++
+[source,subs="+macros"]
+----
+X-it-signature: s1={<<BLOB_HASH>>}; s2={<<BLOB_HASH>>}; sd={<<BUNDLE_SIGNATURE>>}
+----
+
+The body of this request is a bundle file. The bundle signature is transmitted
+as a HTTP header, allowing for the bundle file to be streamed directly from
+disk.
+
+Once the drop server has received the request body, it attempts to
+<<record-patch,record the patch>>, and responds with the corresponding
+<<record-json,record.json>> document, or an error.
+
+Optionally, the server MAY accept a request of the form:
+
+---
+
+[source#request-pull,subs="+macros"]
+----
+POST /patches/request-pull
+Content-Type: application/x-www-form-urlencoded
+<<HEADER_SIGNATURE>>
+
+url=<<URL>>
+----
+
+---
+
+If accepted, the server attempts to fetch the bundle file from the URL given in
+the form field before continuing as if the bundle was submitted directly in the
+request body. Otherwise, the server responds with an error code in the 4xx range
+to indicate that this method of submission is not supported.
+
+
+== Future work
+
+We found that git bundles are a simple yet effective container format. They are,
+however, not extensible: git, being the reference implementation, rejects
+bundles whose header does not exactly conform to the specified format. While
+compatibility with upstream git was a design goal for the current iteration of
+_it_, we may want to evolve the format independently in the future, e.g. by
+embedding cryptographic signatures directly in the file.
+
+We have deliberately not mandated strict schema checking for topic payloads
+respectively CRDT objects, although we acknowledge that interoperability will
+eventually demand for some method to be devised. Since the design space is quite
+large -- ranging from static schema definitions to runtime evaluation of a
+dynamic interpreter -- this would have been well beyond the scope of the
+current specification.
+
+---
+
+[discrete]
+== Acknowledgements
+
+The author would like to thank Alex Good for a perpetual supply of ideas worth
+considering.
+
+---
+
+[discrete]
+== Copyright notice
+
+Copyright © 2022-2023 Kim Altintop. This work is made available under the
+<<CC-BY-SA-4,Creative Commons Attribution 4.0 International License>>. To the
+extent portions of it are incorporated into source code, such portions in the
+source code are licensed under either the <<Apache-2,Apache License 2.0>> or the
+<<MIT,MIT license>> at your option.
+
+
+[bibliography]
+== References
+
+// IETF
+* [[[RFC2119]]]: https://datatracker.ietf.org/doc/html/rfc2119
+* [[[RFC3339]]]: https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
+* [[[RFC4253]]]: https://datatracker.ietf.org/doc/html/rfc4253
+* [[[RFC5656]]]: https://datatracker.ietf.org/doc/html/rfc5656
+* [[[RFC8174]]]: https://datatracker.ietf.org/doc/html/rfc8174
+* [[[RFC8709]]]: https://datatracker.ietf.org/doc/html/rfc8709
+* [[[ssh-agent]]]: https://datatracker.ietf.org/doc/html/draft-miller-ssh-agent
+
+// Other specs
+* [[[automerge-change]]]: https://alexjg.github.io/automerge-storage-docs/#change-reference
+* [[[Canonical-JSON]]]: http://wiki.laptop.org/go/Canonical_JSON
+* [[[DID]]]: https://www.w3.org/TR/did-core
+* [[[semver]]]: https://semver.org
+* [[[TUF]]]: https://theupdateframework.github.io/specification/latest
+* [[[WHATWG-URL]]]: https://url.spec.whatwg.org
+
+// Licenses
+* [[[Apache-2]]]: https://www.apache.org/licenses/LICENSE-2.0
+* [[[CC-BY-SA-4]]]: https://creativecommons.org/licenses/by/4.0
+* [[[MIT]]]: https://spdx.org/licenses/MIT.html
+
+// git
+* [[[bundle-uri]]]: https://git-scm.com/docs/bundle-uri
+* [[[git-check-ref-format]]]: https://git-scm.com/docs/git-check-ref-format
+* [[[git-format-patch]]]: https://git-scm.com/docs/git-format-patch
+* [[[git-hash-object]]]: https://git-scm.com/docs/git-hash-object
+* [[[git-interpret-trailers]]]: https://git-scm.com/docs/git-interpret-trailers
+* [[[git]]]: https://git-scm.com
+* [[[gitformat-bundle]]]: https://git-scm.com/docs/gitformat-bundle
+* [[[gitglossary]]]: https://git-scm.com/docs/gitglossary
+
+// Patch Theory
+* [[[Darcs]]]: https://en.wikibooks.org/wiki/Understanding_Darcs/Patch_theory
+* [[[CaPT]]]: https://arxiv.org/abs/1311.3903
+* [[[HoPT]]]: https://www.cambridge.org/core/journals/journal-of-functional-programming/article/homotopical-patch-theory/42AD8BB8A91688BCAC16FD4D6A2C3FE7
+
+// Misc
+* [[[age]]]: https://age-encryption.org/v1
+* [[[Automerge]]]: https://automerge.org
+* [[[CRDT]]]: https://en.wikipedia.org/wiki/Conflict-free_replicated_data_type
+* [[[Hypercore]]]: https://hypercore-protocol.org
+* [[[IPFS-GATEWAY]]]: https://docs.ipfs.tech/concepts/ipfs-gateway
+* [[[IPFS]]]: https://ipfs.tech
+* [[[IPNS]]]: https://docs.ipfs.tech/concepts/ipns
+* [[[local-first]]]: https://www.inkandswitch.com/local-first/
+* [[[OpenSSH]]]: https://www.openssh.com
+* [[[SSB]]]: https://scuttlebutt.nz
diff --git a/LICENSES/Apache-2.0 b/LICENSES/Apache-2.0
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSES/Apache-2.0
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/LICENSES/CC-BY-SA-4.0 b/LICENSES/CC-BY-SA-4.0
new file mode 100644
index 0000000..7d4f96c
--- /dev/null
+++ b/LICENSES/CC-BY-SA-4.0
@@ -0,0 +1,427 @@
+Attribution-ShareAlike 4.0 International
+
+=======================================================================
+
+Creative Commons Corporation ("Creative Commons") is not a law firm and
+does not provide legal services or legal advice. Distribution of
+Creative Commons public licenses does not create a lawyer-client or
+other relationship. Creative Commons makes its licenses and related
+information available on an "as-is" basis. Creative Commons gives no
+warranties regarding its licenses, any material licensed under their
+terms and conditions, or any related information. Creative Commons
+disclaims all liability for damages resulting from their use to the
+fullest extent possible.
+
+Using Creative Commons Public Licenses
+
+Creative Commons public licenses provide a standard set of terms and
+conditions that creators and other rights holders may use to share
+original works of authorship and other material subject to copyright
+and certain other rights specified in the public license below. The
+following considerations are for informational purposes only, are not
+exhaustive, and do not form part of our licenses.
+
+ Considerations for licensors: Our public licenses are
+ intended for use by those authorized to give the public
+ permission to use material in ways otherwise restricted by
+ copyright and certain other rights. Our licenses are
+ irrevocable. Licensors should read and understand the terms
+ and conditions of the license they choose before applying it.
+ Licensors should also secure all rights necessary before
+ applying our licenses so that the public can reuse the
+ material as expected. Licensors should clearly mark any
+ material not subject to the license. This includes other CC-
+ licensed material, or material used under an exception or
+ limitation to copyright. More considerations for licensors:
+ wiki.creativecommons.org/Considerations_for_licensors
+
+ Considerations for the public: By using one of our public
+ licenses, a licensor grants the public permission to use the
+ licensed material under specified terms and conditions. If
+ the licensor's permission is not necessary for any reason--for
+ example, because of any applicable exception or limitation to
+ copyright--then that use is not regulated by the license. Our
+ licenses grant only permissions under copyright and certain
+ other rights that a licensor has authority to grant. Use of
+ the licensed material may still be restricted for other
+ reasons, including because others have copyright or other
+ rights in the material. A licensor may make special requests,
+ such as asking that all changes be marked or described.
+ Although not required by our licenses, you are encouraged to
+ respect those requests where reasonable. More considerations
+ for the public:
+ wiki.creativecommons.org/Considerations_for_licensees
+
+=======================================================================
+
+Creative Commons Attribution-ShareAlike 4.0 International Public
+License
+
+By exercising the Licensed Rights (defined below), You accept and agree
+to be bound by the terms and conditions of this Creative Commons
+Attribution-ShareAlike 4.0 International Public License ("Public
+License"). To the extent this Public License may be interpreted as a
+contract, You are granted the Licensed Rights in consideration of Your
+acceptance of these terms and conditions, and the Licensor grants You
+such rights in consideration of benefits the Licensor receives from
+making the Licensed Material available under these terms and
+conditions.
+
+
+Section 1 -- Definitions.
+
+ a. Adapted Material means material subject to Copyright and Similar
+ Rights that is derived from or based upon the Licensed Material
+ and in which the Licensed Material is translated, altered,
+ arranged, transformed, or otherwise modified in a manner requiring
+ permission under the Copyright and Similar Rights held by the
+ Licensor. For purposes of this Public License, where the Licensed
+ Material is a musical work, performance, or sound recording,
+ Adapted Material is always produced where the Licensed Material is
+ synched in timed relation with a moving image.
+
+ b. Adapter's License means the license You apply to Your Copyright
+ and Similar Rights in Your contributions to Adapted Material in
+ accordance with the terms and conditions of this Public License.
+
+ c. BY-SA Compatible License means a license listed at
+ creativecommons.org/compatiblelicenses, approved by Creative
+ Commons as essentially the equivalent of this Public License.
+
+ d. Copyright and Similar Rights means copyright and/or similar rights
+ closely related to copyright including, without limitation,
+ performance, broadcast, sound recording, and Sui Generis Database
+ Rights, without regard to how the rights are labeled or
+ categorized. For purposes of this Public License, the rights
+ specified in Section 2(b)(1)-(2) are not Copyright and Similar
+ Rights.
+
+ e. Effective Technological Measures means those measures that, in the
+ absence of proper authority, may not be circumvented under laws
+ fulfilling obligations under Article 11 of the WIPO Copyright
+ Treaty adopted on December 20, 1996, and/or similar international
+ agreements.
+
+ f. Exceptions and Limitations means fair use, fair dealing, and/or
+ any other exception or limitation to Copyright and Similar Rights
+ that applies to Your use of the Licensed Material.
+
+ g. License Elements means the license attributes listed in the name
+ of a Creative Commons Public License. The License Elements of this
+ Public License are Attribution and ShareAlike.
+
+ h. Licensed Material means the artistic or literary work, database,
+ or other material to which the Licensor applied this Public
+ License.
+
+ i. Licensed Rights means the rights granted to You subject to the
+ terms and conditions of this Public License, which are limited to
+ all Copyright and Similar Rights that apply to Your use of the
+ Licensed Material and that the Licensor has authority to license.
+
+ j. Licensor means the individual(s) or entity(ies) granting rights
+ under this Public License.
+
+ k. Share means to provide material to the public by any means or
+ process that requires permission under the Licensed Rights, such
+ as reproduction, public display, public performance, distribution,
+ dissemination, communication, or importation, and to make material
+ available to the public including in ways that members of the
+ public may access the material from a place and at a time
+ individually chosen by them.
+
+ l. Sui Generis Database Rights means rights other than copyright
+ resulting from Directive 96/9/EC of the European Parliament and of
+ the Council of 11 March 1996 on the legal protection of databases,
+ as amended and/or succeeded, as well as other essentially
+ equivalent rights anywhere in the world.
+
+ m. You means the individual or entity exercising the Licensed Rights
+ under this Public License. Your has a corresponding meaning.
+
+
+Section 2 -- Scope.
+
+ a. License grant.
+
+ 1. Subject to the terms and conditions of this Public License,
+ the Licensor hereby grants You a worldwide, royalty-free,
+ non-sublicensable, non-exclusive, irrevocable license to
+ exercise the Licensed Rights in the Licensed Material to:
+
+ a. reproduce and Share the Licensed Material, in whole or
+ in part; and
+
+ b. produce, reproduce, and Share Adapted Material.
+
+ 2. Exceptions and Limitations. For the avoidance of doubt, where
+ Exceptions and Limitations apply to Your use, this Public
+ License does not apply, and You do not need to comply with
+ its terms and conditions.
+
+ 3. Term. The term of this Public License is specified in Section
+ 6(a).
+
+ 4. Media and formats; technical modifications allowed. The
+ Licensor authorizes You to exercise the Licensed Rights in
+ all media and formats whether now known or hereafter created,
+ and to make technical modifications necessary to do so. The
+ Licensor waives and/or agrees not to assert any right or
+ authority to forbid You from making technical modifications
+ necessary to exercise the Licensed Rights, including
+ technical modifications necessary to circumvent Effective
+ Technological Measures. For purposes of this Public License,
+ simply making modifications authorized by this Section 2(a)
+ (4) never produces Adapted Material.
+
+ 5. Downstream recipients.
+
+ a. Offer from the Licensor -- Licensed Material. Every
+ recipient of the Licensed Material automatically
+ receives an offer from the Licensor to exercise the
+ Licensed Rights under the terms and conditions of this
+ Public License.
+
+ b. Additional offer from the Licensor -- Adapted Material.
+ Every recipient of Adapted Material from You
+ automatically receives an offer from the Licensor to
+ exercise the Licensed Rights in the Adapted Material
+ under the conditions of the Adapter's License You apply.
+
+ c. No downstream restrictions. You may not offer or impose
+ any additional or different terms or conditions on, or
+ apply any Effective Technological Measures to, the
+ Licensed Material if doing so restricts exercise of the
+ Licensed Rights by any recipient of the Licensed
+ Material.
+
+ 6. No endorsement. Nothing in this Public License constitutes or
+ may be construed as permission to assert or imply that You
+ are, or that Your use of the Licensed Material is, connected
+ with, or sponsored, endorsed, or granted official status by,
+ the Licensor or others designated to receive attribution as
+ provided in Section 3(a)(1)(A)(i).
+
+ b. Other rights.
+
+ 1. Moral rights, such as the right of integrity, are not
+ licensed under this Public License, nor are publicity,
+ privacy, and/or other similar personality rights; however, to
+ the extent possible, the Licensor waives and/or agrees not to
+ assert any such rights held by the Licensor to the limited
+ extent necessary to allow You to exercise the Licensed
+ Rights, but not otherwise.
+
+ 2. Patent and trademark rights are not licensed under this
+ Public License.
+
+ 3. To the extent possible, the Licensor waives any right to
+ collect royalties from You for the exercise of the Licensed
+ Rights, whether directly or through a collecting society
+ under any voluntary or waivable statutory or compulsory
+ licensing scheme. In all other cases the Licensor expressly
+ reserves any right to collect such royalties.
+
+
+Section 3 -- License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the
+following conditions.
+
+ a. Attribution.
+
+ 1. If You Share the Licensed Material (including in modified
+ form), You must:
+
+ a. retain the following if it is supplied by the Licensor
+ with the Licensed Material:
+
+ i. identification of the creator(s) of the Licensed
+ Material and any others designated to receive
+ attribution, in any reasonable manner requested by
+ the Licensor (including by pseudonym if
+ designated);
+
+ ii. a copyright notice;
+
+ iii. a notice that refers to this Public License;
+
+ iv. a notice that refers to the disclaimer of
+ warranties;
+
+ v. a URI or hyperlink to the Licensed Material to the
+ extent reasonably practicable;
+
+ b. indicate if You modified the Licensed Material and
+ retain an indication of any previous modifications; and
+
+ c. indicate the Licensed Material is licensed under this
+ Public License, and include the text of, or the URI or
+ hyperlink to, this Public License.
+
+ 2. You may satisfy the conditions in Section 3(a)(1) in any
+ reasonable manner based on the medium, means, and context in
+ which You Share the Licensed Material. For example, it may be
+ reasonable to satisfy the conditions by providing a URI or
+ hyperlink to a resource that includes the required
+ information.
+
+ 3. If requested by the Licensor, You must remove any of the
+ information required by Section 3(a)(1)(A) to the extent
+ reasonably practicable.
+
+ b. ShareAlike.
+
+ In addition to the conditions in Section 3(a), if You Share
+ Adapted Material You produce, the following conditions also apply.
+
+ 1. The Adapter's License You apply must be a Creative Commons
+ license with the same License Elements, this version or
+ later, or a BY-SA Compatible License.
+
+ 2. You must include the text of, or the URI or hyperlink to, the
+ Adapter's License You apply. You may satisfy this condition
+ in any reasonable manner based on the medium, means, and
+ context in which You Share Adapted Material.
+
+ 3. You may not offer or impose any additional or different terms
+ or conditions on, or apply any Effective Technological
+ Measures to, Adapted Material that restrict exercise of the
+ rights granted under the Adapter's License You apply.
+
+
+Section 4 -- Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that
+apply to Your use of the Licensed Material:
+
+ a. for the avoidance of doubt, Section 2(a)(1) grants You the right
+ to extract, reuse, reproduce, and Share all or a substantial
+ portion of the contents of the database;
+
+ b. if You include all or a substantial portion of the database
+ contents in a database in which You have Sui Generis Database
+ Rights, then the database in which You have Sui Generis Database
+ Rights (but not its individual contents) is Adapted Material,
+ including for purposes of Section 3(b); and
+
+ c. You must comply with the conditions in Section 3(a) if You Share
+ all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not
+replace Your obligations under this Public License where the Licensed
+Rights include other Copyright and Similar Rights.
+
+
+Section 5 -- Disclaimer of Warranties and Limitation of Liability.
+
+ a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
+ EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
+ AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
+ ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
+ IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
+ WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
+ ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
+ KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
+ ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
+
+ b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
+ TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
+ NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
+ INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
+ COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
+ USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
+ ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
+ DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
+ IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
+
+ c. The disclaimer of warranties and limitation of liability provided
+ above shall be interpreted in a manner that, to the extent
+ possible, most closely approximates an absolute disclaimer and
+ waiver of all liability.
+
+
+Section 6 -- Term and Termination.
+
+ a. This Public License applies for the term of the Copyright and
+ Similar Rights licensed here. However, if You fail to comply with
+ this Public License, then Your rights under this Public License
+ terminate automatically.
+
+ b. Where Your right to use the Licensed Material has terminated under
+ Section 6(a), it reinstates:
+
+ 1. automatically as of the date the violation is cured, provided
+ it is cured within 30 days of Your discovery of the
+ violation; or
+
+ 2. upon express reinstatement by the Licensor.
+
+ For the avoidance of doubt, this Section 6(b) does not affect any
+ right the Licensor may have to seek remedies for Your violations
+ of this Public License.
+
+ c. For the avoidance of doubt, the Licensor may also offer the
+ Licensed Material under separate terms or conditions or stop
+ distributing the Licensed Material at any time; however, doing so
+ will not terminate this Public License.
+
+ d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
+ License.
+
+
+Section 7 -- Other Terms and Conditions.
+
+ a. The Licensor shall not be bound by any additional or different
+ terms or conditions communicated by You unless expressly agreed.
+
+ b. Any arrangements, understandings, or agreements regarding the
+ Licensed Material not stated herein are separate from and
+ independent of the terms and conditions of this Public License.
+
+
+Section 8 -- Interpretation.
+
+ a. For the avoidance of doubt, this Public License does not, and
+ shall not be interpreted to, reduce, limit, restrict, or impose
+ conditions on any use of the Licensed Material that could lawfully
+ be made without permission under this Public License.
+
+ b. To the extent possible, if any provision of this Public License is
+ deemed unenforceable, it shall be automatically reformed to the
+ minimum extent necessary to make it enforceable. If the provision
+ cannot be reformed, it shall be severed from this Public License
+ without affecting the enforceability of the remaining terms and
+ conditions.
+
+ c. No term or condition of this Public License will be waived and no
+ failure to comply consented to unless expressly agreed to by the
+ Licensor.
+
+ d. Nothing in this Public License constitutes or may be interpreted
+ as a limitation upon, or waiver of, any privileges and immunities
+ that apply to the Licensor or You, including from the legal
+ processes of any jurisdiction or authority.
+
+
+=======================================================================
+
+Creative Commons is not a party to its public
+licenses. Notwithstanding, Creative Commons may elect to apply one of
+its public licenses to material it publishes and in those instances
+will be considered the “Licensor.” The text of the Creative Commons
+public licenses is dedicated to the public domain under the CC0 Public
+Domain Dedication. Except for the limited purpose of indicating that
+material is shared under a Creative Commons public license or as
+otherwise permitted by the Creative Commons policies published at
+creativecommons.org/policies, Creative Commons does not authorize the
+use of the trademark "Creative Commons" or any other trademark or logo
+of Creative Commons without its prior written consent including,
+without limitation, in connection with any unauthorized modifications
+to any of its public licenses or any other arrangements,
+understandings, or agreements concerning use of licensed material. For
+the avoidance of doubt, this paragraph does not form part of the
+public licenses.
+
+Creative Commons may be contacted at creativecommons.org.
diff --git a/LICENSES/GPL-2.0 b/LICENSES/GPL-2.0
new file mode 100644
index 0000000..d159169
--- /dev/null
+++ b/LICENSES/GPL-2.0
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/LICENSES/MIT b/LICENSES/MIT
new file mode 100644
index 0000000..a348e30
--- /dev/null
+++ b/LICENSES/MIT
@@ -0,0 +1,20 @@
+MIT License
+
+Copyright (c) <year> <copyright holders>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/LICENSES/OpenSSL b/LICENSES/OpenSSL
new file mode 100644
index 0000000..f86c523
--- /dev/null
+++ b/LICENSES/OpenSSL
@@ -0,0 +1,123 @@
+ LICENSE ISSUES
+ ==============
+
+ The OpenSSL toolkit stays under a double license, i.e. both the conditions of
+ the OpenSSL License and the original SSLeay license apply to the toolkit.
+ See below for the actual license texts.
+
+ OpenSSL License
+ ---------------
+
+/* ====================================================================
+ * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * 3. All advertising materials mentioning features or use of this
+ * software must display the following acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+ *
+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ * endorse or promote products derived from this software without
+ * prior written permission. For written permission, please contact
+ * openssl-core@openssl.org.
+ *
+ * 5. Products derived from this software may not be called "OpenSSL"
+ * nor may "OpenSSL" appear in their names without prior written
+ * permission of the OpenSSL Project.
+ *
+ * 6. Redistributions of any form whatsoever must retain the following
+ * acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ * ====================================================================
+ *
+ * This product includes cryptographic software written by Eric Young
+ * (eay@cryptsoft.com). This product includes software written by Tim
+ * Hudson (tjh@cryptsoft.com).
+ *
+ */
+
+ Original SSLeay License
+ -----------------------
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
diff --git a/LICENSES/exceptions/openvpn-openssl-exception b/LICENSES/exceptions/openvpn-openssl-exception
new file mode 100644
index 0000000..c63327f
--- /dev/null
+++ b/LICENSES/exceptions/openvpn-openssl-exception
@@ -0,0 +1,10 @@
+Special exception for linking `it` with OpenSSL:
+
+In addition, as a special exception, the copyright holders give permission to
+link the code of this program with the OpenSSL Library (or with modified
+versions of OpenSSL that use the same license as OpenSSL), and distribute linked
+combinations including the two. You must obey the GNU General Public License in
+all respects for all of the code used other than OpenSSL. If you modify this
+file, you may extend this exception to your version of the file, but you are not
+obligated to do so. If you do not wish to do so, delete this exception statement
+from your version.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2b902f6
--- /dev/null
+++ b/README.md
@@ -0,0 +1,36 @@
+# it: zero-g git
+
+_it_ aims to augment git with primitives to build integrated, cryptographically
+verifiable collaboration workflows around source code. It maintains the
+distributed property of git, not requiring a central server. _it_ is transport
+agnostic, and permits data dissemination in client-server, federated, as well as
+peer-to-peer network topologies.
+
+
+## Status
+
+The [spec](./Documentation/spec.adoc) is thought of as being stable, in the
+sense that any amendments will consider backwards compatibility.
+
+The source code in this repository has been developed exploratively, in order to
+validate and refine the ideas laid out in above document. As such, it is
+incomplete, may occasionally malfunction, and does not yet provide the fine bone
+porcelain rendering it usable in anger. It's a prototype, if you wish.
+
+_it_ is actively developed on a volunteer basis.
+
+
+## Usage
+
+The _it_ implementation is written in Rust and can be installed from source
+using [cargo](https://doc.rust-lang.org/cargo/):
+
+ cargo install --git https://git.eagain.io/it
+
+To get an overview, see the [getting started](./Documentation/getting-started.adoc)
+document.
+
+
+## License
+
+GPL-2.0, see [COPYING](./COPYING)
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..0c98462
--- /dev/null
+++ b/TODO
@@ -0,0 +1,55 @@
+- UI
+ - Fetch drop bundles
+ - git (from "dumb" mirrors)
+
+ - Apply patches from foreign drops
+ - Allow building drop state from foreign drop
+
+ - Improve $EDITOR use
+ - don't launch unless isatty
+ - for metadata json, use ft=javascript and add help text
+ - re-launch when edit error
+
+ - List auto-branches
+ - More topic traversals (DFS, BFS, ..)
+
+ - Handle $PAGER, like git does?
+ - Diff/apply patch?
+ - Teardown?
+
+- Internals
+ - Cache topic subjects
+ - Open repo eagerly at outermost command layer
+
+ So we can return actual iterators from commands. Do we need a repo always,
+ though?
+
+ - Index bundle hash -> header meta
+
+ For dynamic repacking
+
+ - Upstream bindings to git_indexer_*
+
+ So that we can resolve delta bases without committing to the odb
+ immediately. This would allow to deduplicate packfile storage, and
+ partition the odb as follows:
+
+ - store the bundle pack and associated index in a directory separate from
+ the repo objects (possibly scoped by drop)
+ - add this directory as an alternate to the repo, preventing compaction
+ initiated by git
+ - strip the pack data from the bundle and replace with a pointer to the
+ pack
+ - snapshots now become compactions of this alternate objects store,
+ controlled by `it`
+ - when a snapshot is taken, dangling bundle (-headers) may either be
+ pruned, resulting in a 404 from the HTTP server when the bundle is
+ requested, or the pointer be replaced by a marker indicating that the
+ bundle was made obsolete by the corresponding snapshot bundle. The HTTP
+ server may then send a redirect to the snapshot instead.
+
+- Protocol
+ - Implement automerge topics
+
+- HTTP
+ - Accept checkpoints if authorised
diff --git a/src/bin/it.rs b/src/bin/it.rs
new file mode 100644
index 0000000..e6d5d4c
--- /dev/null
+++ b/src/bin/it.rs
@@ -0,0 +1,181 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ io,
+ path::PathBuf,
+};
+
+use clap::ValueHint;
+use clap_complete::Shell;
+
+static OUTPUT: it::Output = it::Output;
+
+fn main() -> it::Result<()> {
+ use clap::Parser as _;
+
+ log::set_logger(&OUTPUT)?;
+ log::set_max_level(
+ std::env::var("RUST_LOG")
+ .ok()
+ .and_then(|v| v.parse().ok())
+ .unwrap_or(log::LevelFilter::Info),
+ );
+
+ let cli = It::parse();
+ match cli.cmd {
+ Cmd::Cmd(cmd) => cmd
+ .run()
+ .and_then(|o| render(o, cli.compact))
+ .or_else(|e| e.downcast::<it::cmd::Aborted>().map(|_aborted| ())),
+ Cmd::Hidden(cmd) => match cmd {
+ Hidden::Man { out } => hidden::mangen(&out),
+ Hidden::Completions { shell, out } => hidden::completions(shell, out.as_deref()),
+ },
+ }
+}
+
+/// it: zero-g git
+#[derive(Debug, clap::Parser)]
+#[clap(author, version, about, propagate_version = true, max_term_width = 100)]
+struct It {
+ /// Path to the git repository containing the drop state
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ env = "GIT_DIR",
+ default_value_os_t = std::env::current_dir().unwrap(),
+ value_hint = ValueHint::DirPath,
+ global = true,
+ )]
+ git_dir: PathBuf,
+ /// Do not pretty-print the output
+ #[clap(long, value_parser, default_value_t = false, global = true)]
+ compact: bool,
+ #[clap(subcommand)]
+ cmd: Cmd,
+}
+
+fn render(output: it::cmd::Output, compact: bool) -> it::Result<()> {
+ use it::cmd::Output::*;
+
+ let go = |v| {
+ let out = io::stdout();
+ if compact {
+ serde_json::to_writer(out, &v)
+ } else {
+ serde_json::to_writer_pretty(out, &v)
+ }
+ };
+
+ match output {
+ Val(v) => go(v)?,
+ Iter(i) => {
+ for v in i {
+ let v = v?;
+ go(v)?;
+ println!();
+ }
+ },
+ }
+
+ Ok(())
+}
+
+#[derive(Debug, clap::Subcommand)]
+#[allow(clippy::large_enum_variant)]
+enum Cmd {
+ #[clap(flatten)]
+ Cmd(it::Cmd),
+ #[clap(flatten)]
+ Hidden(Hidden),
+}
+
+#[derive(Debug, clap::Subcommand)]
+#[clap(hide = true)]
+enum Hidden {
+ /// Generate man pages
+ #[clap(hide = true)]
+ Man {
+ /// Output to this directory
+ #[clap(
+ value_parser,
+ default_value = "man",
+ value_name = "DIR",
+ value_hint = ValueHint::DirPath,
+ )]
+ out: PathBuf,
+ },
+ /// Generate shell completions
+ #[clap(hide = true)]
+ Completions {
+ /// The shell to generate completions for
+ #[clap(value_parser)]
+ shell: Shell,
+ /// Output file (stdout if not set)
+ #[clap(value_parser, value_name = "FILE", value_hint = ValueHint::FilePath)]
+ out: Option<PathBuf>,
+ },
+}
+
+mod hidden {
+ use std::{
+ fs::File,
+ io,
+ path::Path,
+ };
+
+ use clap::CommandFactory as _;
+ use clap_complete::Shell;
+ use clap_mangen::Man;
+
+ pub fn mangen(out: &Path) -> it::Result<()> {
+ std::fs::create_dir_all(out)?;
+ let it = super::It::command();
+ for cmd in it.get_subcommands() {
+ if cmd.get_name() == "dev" {
+ continue;
+ }
+ for sub in cmd.get_subcommands() {
+ let name = format!("{}-{}-{}", it.get_name(), cmd.get_name(), sub.get_name());
+ let filename = out.join(&name).with_extension("1");
+
+ let the_cmd = sub.clone().name(&name);
+ let man = Man::new(the_cmd)
+ .title(name.to_uppercase())
+ .section("1")
+ .manual("It Manual");
+
+ eprintln!("Generating {}...", filename.display());
+ man.render(
+ &mut File::options()
+ .write(true)
+ .create(true)
+ .truncate(true)
+ .open(&filename)?,
+ )?;
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn completions(shell: Shell, out: Option<&Path>) -> it::Result<()> {
+ match out {
+ Some(path) => {
+ let mut out = File::options()
+ .write(true)
+ .create(true)
+ .truncate(true)
+ .open(path)?;
+ clap_complete::generate(shell, &mut super::It::command(), "it", &mut out);
+ },
+ None => {
+ clap_complete::generate(shell, &mut super::It::command(), "it", &mut io::stdout());
+ },
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/bundle.rs b/src/bundle.rs
new file mode 100644
index 0000000..25eafd0
--- /dev/null
+++ b/src/bundle.rs
@@ -0,0 +1,114 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::io;
+
+use log::info;
+use sha2::{
+ Digest,
+ Sha256,
+};
+use url::Url;
+
+use crate::io::{
+ HashWriter,
+ LenWriter,
+};
+
+pub mod error;
+
+mod fetch;
+pub use fetch::{
+ Fetched,
+ Fetcher,
+};
+
+mod header;
+pub use header::{
+ Hash,
+ Header,
+ ObjectFormat,
+ ObjectId,
+ Version,
+};
+
+pub mod list;
+pub use list::{
+ List,
+ Location,
+ Uri,
+};
+
+pub const FILE_EXTENSION: &str = "bundle";
+pub const DOT_FILE_EXTENSION: &str = ".bundle";
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+pub struct Info {
+ pub len: u64,
+ pub hash: Hash,
+ #[serde(with = "hex::serde")]
+ pub checksum: [u8; 32],
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
+ pub uris: Vec<Url>,
+}
+
+#[derive(Clone, Copy)]
+pub struct Expect<'a> {
+ pub len: u64,
+ pub hash: &'a Hash,
+ pub checksum: Option<&'a [u8]>,
+}
+
+impl<'a> From<&'a Info> for Expect<'a> {
+ fn from(
+ Info {
+ len,
+ hash,
+ checksum,
+ ..
+ }: &'a Info,
+ ) -> Self {
+ Self {
+ len: *len,
+ hash,
+ checksum: Some(checksum),
+ }
+ }
+}
+
+pub fn create<W>(mut out: W, repo: &git2::Repository, header: &Header) -> crate::Result<Info>
+where
+ W: io::Write,
+{
+ let mut hasher = HashWriter::new(Sha256::new(), &mut out);
+ let mut writer = LenWriter::new(&mut hasher);
+ let mut pack = {
+ let mut pack = repo.packbuilder()?;
+ let mut walk = repo.revwalk()?;
+ for pre in &header.prerequisites {
+ walk.hide(pre.try_into()?)?;
+ }
+ for inc in header.references.values() {
+ walk.push(inc.try_into()?)?;
+ }
+ pack.insert_walk(&mut walk)?;
+ pack
+ };
+ header.to_writer(&mut writer)?;
+
+ info!("Packing objects...");
+ pack.foreach(|chunk| io::Write::write_all(&mut writer, chunk).is_ok())?;
+
+ let len = writer.bytes_written();
+ let hash = header.hash();
+ let checksum = hasher.hash().into();
+
+ info!("Created patch bundle {hash}");
+
+ Ok(Info {
+ len,
+ hash,
+ checksum,
+ uris: vec![],
+ })
+}
diff --git a/src/bundle/error.rs b/src/bundle/error.rs
new file mode 100644
index 0000000..41529c2
--- /dev/null
+++ b/src/bundle/error.rs
@@ -0,0 +1,31 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use thiserror::Error;
+
+use super::{
+ ObjectFormat,
+ ObjectId,
+};
+use crate::git::refs;
+
+#[derive(Debug, Error)]
+pub enum Header {
+ #[error("invalid header: {0}")]
+ Format(&'static str),
+
+ #[error("unrecognised header {0}")]
+ UnrecognisedHeader(String),
+
+ #[error("object id {oid} not valid for object-format {fmt}")]
+ ObjectFormat { fmt: ObjectFormat, oid: ObjectId },
+
+ #[error("invalid reference name")]
+ Refname(#[from] refs::error::RefFormat),
+
+ #[error("invalid hex oid")]
+ Oid(#[from] hex::FromHexError),
+
+ #[error(transparent)]
+ Io(#[from] std::io::Error),
+}
diff --git a/src/bundle/fetch.rs b/src/bundle/fetch.rs
new file mode 100644
index 0000000..4e58000
--- /dev/null
+++ b/src/bundle/fetch.rs
@@ -0,0 +1,130 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs,
+ io::{
+ self,
+ Read,
+ Seek,
+ SeekFrom,
+ Write,
+ },
+ path::{
+ Path,
+ PathBuf,
+ },
+};
+
+use anyhow::ensure;
+use either::Either::{
+ self,
+ Left,
+ Right,
+};
+use sha2::{
+ Digest,
+ Sha256,
+};
+use tempfile::NamedTempFile;
+use url::Url;
+
+use super::{
+ header,
+ Expect,
+ Header,
+};
+use crate::{
+ bundle,
+ fs::LockedFile,
+ git,
+ io::HashWriter,
+};
+
+const MAX_BUNDLE_URIS_BYTES: u64 = 50_000;
+
+pub struct Fetched {
+ path: PathBuf,
+ info: bundle::Info,
+}
+
+impl Fetched {
+ pub fn into_inner(self) -> (PathBuf, bundle::Info) {
+ (self.path, self.info)
+ }
+}
+
+pub struct Fetcher {
+ agent: ureq::Agent,
+}
+
+impl Default for Fetcher {
+ fn default() -> Self {
+ Self {
+ agent: ureq::agent(),
+ }
+ }
+}
+
+impl Fetcher {
+ pub fn fetch(
+ &self,
+ url: &Url,
+ out_dir: &Path,
+ expect: Expect,
+ ) -> crate::Result<Either<bundle::List, Fetched>> {
+ let resp = self.agent.request_url("GET", url).call()?;
+ let mut body = resp.into_reader();
+
+ let mut buf = [0; 16];
+ body.read_exact(&mut buf)?;
+ let is_bundle = buf.starts_with(header::SIGNATURE_V2.as_bytes())
+ || buf.starts_with(header::SIGNATURE_V3.as_bytes());
+ if is_bundle {
+ ensure!(
+ matches!(buf.last(), Some(b'\n')),
+ "malformed bundle header: trailing data"
+ )
+ }
+
+ if is_bundle {
+ let mut path = out_dir.join(expect.hash.to_string());
+ path.set_extension(bundle::FILE_EXTENSION);
+
+ let mut lck = {
+ fs::create_dir_all(out_dir)?;
+ LockedFile::atomic(&path, true, LockedFile::DEFAULT_PERMISSIONS)?
+ };
+
+ let mut out = HashWriter::new(Sha256::new(), &mut lck);
+ out.write_all(&buf)?;
+
+ let len = buf.len() as u64 + io::copy(&mut body.take(expect.len), &mut out)?;
+ let checksum = out.hash().into();
+ if let Some(chk) = expect.checksum {
+ ensure!(chk == checksum, "checksum mismatch");
+ }
+ lck.seek(SeekFrom::Start(0))?;
+ let header = Header::from_reader(&mut lck)?;
+ let hash = header.hash();
+
+ lck.persist()?;
+
+ let info = bundle::Info {
+ len,
+ hash,
+ checksum,
+ uris: vec![url.clone()],
+ };
+ Ok(Right(Fetched { path, info }))
+ } else {
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(&buf)?;
+ io::copy(&mut body.take(MAX_BUNDLE_URIS_BYTES), &mut tmp)?;
+ let cfg = git::config::Snapshot::try_from(git2::Config::open(tmp.path())?)?;
+ let list = bundle::List::from_config(cfg)?;
+
+ Ok(Left(list))
+ }
+ }
+}
diff --git a/src/bundle/header.rs b/src/bundle/header.rs
new file mode 100644
index 0000000..6f3dfe3
--- /dev/null
+++ b/src/bundle/header.rs
@@ -0,0 +1,365 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::fmt;
+use std::{
+ collections::{
+ BTreeMap,
+ BTreeSet,
+ },
+ io,
+ ops::Deref,
+ str::FromStr,
+};
+
+use hex::{
+ FromHex,
+ FromHexError,
+};
+use refs::Refname;
+use sha2::{
+ Digest,
+ Sha256,
+};
+
+use super::error;
+use crate::{
+ git::refs,
+ io::Lines,
+};
+
+pub const SIGNATURE_V2: &str = "# v2 git bundle";
+pub const SIGNATURE_V3: &str = "# v3 git bundle";
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum Version {
+ V2,
+ V3,
+}
+
+impl Default for Version {
+ fn default() -> Self {
+ Self::V2
+ }
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum ObjectFormat {
+ Sha1,
+ Sha256,
+}
+
+impl Default for ObjectFormat {
+ fn default() -> Self {
+ Self::Sha1
+ }
+}
+
+impl fmt::Display for ObjectFormat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(match self {
+ Self::Sha1 => "sha1",
+ Self::Sha256 => "sha256",
+ })
+ }
+}
+
+#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)]
+#[serde(untagged)]
+pub enum ObjectId {
+ Sha1(#[serde(with = "hex::serde")] [u8; 20]),
+ Sha2(#[serde(with = "hex::serde")] [u8; 32]),
+}
+
+impl ObjectId {
+ pub fn as_bytes(&self) -> &[u8] {
+ self.as_ref()
+ }
+}
+
+impl AsRef<[u8]> for ObjectId {
+ fn as_ref(&self) -> &[u8] {
+ match self {
+ Self::Sha1(b) => &b[..],
+ Self::Sha2(b) => &b[..],
+ }
+ }
+}
+
+impl fmt::Display for ObjectId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self))
+ }
+}
+
+impl fmt::Debug for ObjectId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Self::Sha1(x) => f.debug_tuple("Sha1").field(&hex::encode(x)).finish(),
+ Self::Sha2(x) => f.debug_tuple("Sha2").field(&hex::encode(x)).finish(),
+ }
+ }
+}
+
+impl FromHex for ObjectId {
+ type Error = hex::FromHexError;
+
+ #[inline]
+ fn from_hex<T: AsRef<[u8]>>(hex: T) -> Result<Self, Self::Error> {
+ match hex.as_ref().len() {
+ 40 => Ok(Self::Sha1(<[u8; 20]>::from_hex(hex)?)),
+ 64 => Ok(Self::Sha2(<[u8; 32]>::from_hex(hex)?)),
+ _ => Err(hex::FromHexError::InvalidStringLength),
+ }
+ }
+}
+
+impl From<&git2::Oid> for ObjectId {
+ fn from(oid: &git2::Oid) -> Self {
+ let bs = oid.as_bytes();
+ match bs.len() {
+ 20 => Self::Sha1(bs.try_into().unwrap()),
+ 32 => Self::Sha2(bs.try_into().unwrap()),
+ x => unreachable!("oid with strange hash size: {}", x),
+ }
+ }
+}
+
+impl TryFrom<&ObjectId> for git2::Oid {
+ type Error = git2::Error;
+
+ fn try_from(oid: &ObjectId) -> Result<Self, Self::Error> {
+ match oid {
+ ObjectId::Sha1(hash) => Self::from_bytes(hash),
+ ObjectId::Sha2(_) => Err(git2::Error::new(
+ git2::ErrorCode::Invalid,
+ git2::ErrorClass::Sha1,
+ "sha2 oids not yet supported",
+ )),
+ }
+ }
+}
+
+#[derive(Debug, Default, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct Header {
+ pub version: Version,
+ pub object_format: ObjectFormat,
+ pub prerequisites: BTreeSet<ObjectId>,
+ pub references: BTreeMap<Refname, ObjectId>,
+}
+
+impl Header {
+ /// Parse a [`Header`] from an IO stream.
+ ///
+ /// The stream will be buffered internally, and its position set to the
+ /// start of the packfile section.
+ pub fn from_reader<R>(mut io: R) -> Result<Self, error::Header>
+ where
+ R: io::Read + io::Seek,
+ {
+ use hex::FromHex as _;
+
+ let mut lines = Lines::new(io::BufReader::new(&mut io)).until_blank();
+
+ let mut version: Option<Version> = None;
+ let mut object_format: Option<ObjectFormat> = None;
+ let mut prerequisites = BTreeSet::new();
+ let mut references = BTreeMap::new();
+
+ match lines
+ .next()
+ .ok_or(error::Header::Format("empty input"))??
+ .as_str()
+ {
+ SIGNATURE_V2 => {
+ version = Some(Version::V2);
+ object_format = Some(ObjectFormat::Sha1);
+ Ok(())
+ },
+
+ SIGNATURE_V3 => {
+ version = Some(Version::V2);
+ Ok(())
+ },
+
+ _ => Err(error::Header::Format("invalid signature")),
+ }?;
+
+ if let Some(Version::V3) = version {
+ for capability in lines.by_ref() {
+ let capability = capability?;
+
+ if !capability.starts_with('@') {
+ return Err(error::Header::Format("expected capabilities"));
+ }
+
+ if capability.starts_with("@filter") {
+ return Err(error::Header::Format("object filters are not supported"));
+ }
+
+ match capability.strip_prefix("@object-format=") {
+ Some("sha1") => {
+ object_format = Some(ObjectFormat::Sha1);
+ },
+
+ Some("sha256") => {
+ object_format = Some(ObjectFormat::Sha256);
+ },
+
+ _ => return Err(error::Header::Format("unrecognised capability")),
+ }
+
+ if object_format.is_some() {
+ break;
+ }
+ }
+ }
+
+ let version = version.unwrap();
+ let object_format = object_format.ok_or(error::Header::Format("missing object-format"))?;
+
+ for tip in lines.by_ref() {
+ let mut tip = tip?;
+ let oid_off = usize::from(tip.starts_with('-'));
+ let oid_hexsz = match object_format {
+ ObjectFormat::Sha1 => 40,
+ ObjectFormat::Sha256 => 64,
+ };
+
+ let oid = ObjectId::from_hex(&tip[oid_off..oid_hexsz + oid_off])?;
+ if matches!(
+ (&object_format, &oid),
+ (ObjectFormat::Sha1, ObjectId::Sha2(_)) | (ObjectFormat::Sha256, ObjectId::Sha1(_))
+ ) {
+ return Err(error::Header::ObjectFormat {
+ fmt: object_format,
+ oid,
+ });
+ }
+ if !matches!(tip.chars().nth(oid_off + oid_hexsz), None | Some(' ')) {
+ return Err(error::Header::UnrecognisedHeader(tip));
+ }
+
+ if oid_off > 0 {
+ prerequisites.insert(oid);
+ } else {
+ let refname = tip.split_off(oid_off + oid_hexsz + 1);
+ if !refname.starts_with("refs/") {
+ return Err(error::Header::Format("shorthand refname"));
+ }
+ if references.insert(refname.parse()?, oid).is_some() {
+ return Err(error::Header::Format("duplicate refname"));
+ }
+ }
+ }
+
+ if references.is_empty() {
+ return Err(error::Header::Format("empty references"));
+ }
+
+ let pos = io::Seek::stream_position(&mut lines)?;
+ drop(lines);
+ io.seek(io::SeekFrom::Start(pos))?;
+
+ Ok(Header {
+ version,
+ object_format,
+ prerequisites,
+ references,
+ })
+ }
+
+ pub fn to_writer<W>(&self, mut io: W) -> io::Result<()>
+ where
+ W: io::Write,
+ {
+ match self.version {
+ Version::V2 => writeln!(&mut io, "{}", SIGNATURE_V2)?,
+ Version::V3 => {
+ writeln!(&mut io, "{}", SIGNATURE_V3)?;
+ match self.object_format {
+ ObjectFormat::Sha1 => writeln!(&mut io, "@object-format=sha1")?,
+ ObjectFormat::Sha256 => writeln!(&mut io, "@object-format=sha256")?,
+ }
+ },
+ }
+ for pre in &self.prerequisites {
+ writeln!(&mut io, "-{}", pre)?;
+ }
+ for (name, oid) in &self.references {
+ writeln!(&mut io, "{} {}", oid, name)?;
+ }
+
+ writeln!(&mut io)
+ }
+
+ pub fn add_prerequisite<O>(&mut self, oid: O) -> bool
+ where
+ O: Into<ObjectId>,
+ {
+ self.prerequisites.insert(oid.into())
+ }
+
+ pub fn add_reference<O>(&mut self, name: Refname, oid: O) -> Option<ObjectId>
+ where
+ O: Into<ObjectId>,
+ {
+ self.references.insert(name, oid.into())
+ }
+
+ pub fn hash(&self) -> Hash {
+ let mut ids: BTreeSet<&ObjectId> = BTreeSet::new();
+ ids.extend(self.prerequisites.iter());
+ ids.extend(self.references.values());
+
+ let mut sha = Sha256::new();
+ for id in ids {
+ sha.update(id);
+ }
+ Hash(sha.finalize().into())
+ }
+}
+
+#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)]
+pub struct Hash(#[serde(with = "hex::serde")] [u8; 32]);
+
+impl Hash {
+ pub fn as_bytes(&self) -> &[u8] {
+ self.deref()
+ }
+
+ pub fn is_valid(hex: &str) -> bool {
+ Self::from_str(hex).is_ok()
+ }
+}
+
+impl Deref for Hash {
+ type Target = [u8; 32];
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl fmt::Display for Hash {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl fmt::Debug for Hash {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl FromStr for Hash {
+ type Err = FromHexError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ <[u8; 32]>::from_hex(s).map(Self)
+ }
+}
diff --git a/src/bundle/list.rs b/src/bundle/list.rs
new file mode 100644
index 0000000..21753fa
--- /dev/null
+++ b/src/bundle/list.rs
@@ -0,0 +1,335 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+//! Bundle Lists in git config format, as per [`bundle-uri`].
+//!
+//! [`bundle-uri`]: https://git.kernel.org/pub/scm/git/git.git/tree/Documentation/technical/bundle-uri.txt
+
+use std::{
+ borrow::Cow,
+ cmp::Ordering,
+ collections::HashMap,
+ fmt,
+ io,
+ str::FromStr,
+ time::{
+ SystemTime,
+ UNIX_EPOCH,
+ },
+};
+
+use anyhow::anyhow;
+use once_cell::sync::Lazy;
+use sha2::{
+ Digest,
+ Sha256,
+};
+use url::Url;
+
+use crate::git::{
+ self,
+ if_not_found_none,
+};
+
+pub const FILE_EXTENSION: &str = "uris";
+pub const DOT_FILE_EXTENSION: &str = ".uris";
+
+#[derive(Clone, Copy, Debug)]
+pub enum Mode {
+ All,
+ Any,
+}
+
+impl Mode {
+ pub fn as_str(&self) -> &str {
+ match self {
+ Self::All => "all",
+ Self::Any => "any",
+ }
+ }
+}
+
+impl fmt::Display for Mode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
+
+impl FromStr for Mode {
+ type Err = crate::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "all" => Ok(Self::All),
+ "any" => Ok(Self::Any),
+ x => Err(anyhow!("unknown bundle list mode: {x}")),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Uri {
+ Absolute(Url),
+ Relative(String),
+}
+
+impl Uri {
+ pub fn as_str(&self) -> &str {
+ match self {
+ Self::Absolute(url) => url.as_str(),
+ Self::Relative(path) => path.as_str(),
+ }
+ }
+
+ pub fn abs(&self, base: &Url) -> Result<Cow<Url>, url::ParseError> {
+ match self {
+ Self::Absolute(url) => Ok(Cow::Borrowed(url)),
+ Self::Relative(path) => base.join(path).map(Cow::Owned),
+ }
+ }
+}
+
+impl From<Url> for Uri {
+ fn from(url: Url) -> Self {
+ Self::Absolute(url)
+ }
+}
+
+impl FromStr for Uri {
+ type Err = url::ParseError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ static DUMMY_BASE: Lazy<Url> =
+ Lazy::new(|| Url::parse("https://bundles.example.com").unwrap());
+
+ Url::parse(s).map(Self::Absolute).or_else(|e| match e {
+ url::ParseError::RelativeUrlWithoutBase => {
+ let url = Url::options().base_url(Some(&DUMMY_BASE)).parse(s)?;
+
+ let path = if s.starts_with('/') {
+ url.path()
+ } else {
+ url.path().trim_start_matches('/')
+ };
+
+ Ok(Self::Relative(path.to_owned()))
+ },
+ other => Err(other),
+ })
+ }
+}
+
+#[derive(Debug)]
+pub struct Location {
+ pub id: String,
+ pub uri: Uri,
+ pub filter: Option<String>,
+ pub creation_token: Option<u64>,
+ pub location: Option<String>,
+}
+
+impl Location {
+ pub fn new(id: String, uri: Uri) -> Self {
+ Self {
+ id,
+ uri,
+ filter: None,
+ creation_token: None,
+ location: None,
+ }
+ }
+
+ pub fn to_config(&self, cfg: &mut git2::Config) -> crate::Result<()> {
+ let section = format!("bundle.{}", self.id);
+
+ cfg.set_str(&format!("{section}.uri"), self.uri.as_str())?;
+ if let Some(filter) = self.filter.as_deref() {
+ cfg.set_str(&format!("{section}.filter"), filter)?;
+ }
+ if let Some(token) = &self.creation_token {
+ cfg.set_str(&format!("{section}.creationToken"), &token.to_string())?;
+ }
+ if let Some(loc) = self.location.as_deref() {
+ cfg.set_str(&format!("{section}.location"), loc)?;
+ }
+
+ Ok(())
+ }
+
+ pub fn to_writer<W: io::Write>(&self, mut out: W) -> io::Result<()> {
+ writeln!(&mut out, "[bundle \"{}\"]", self.id)?;
+ writeln!(&mut out, "\turi = {}", self.uri.as_str())?;
+ if let Some(filter) = self.filter.as_deref() {
+ writeln!(&mut out, "\tfilter = {}", filter)?;
+ }
+ if let Some(token) = &self.creation_token {
+ writeln!(&mut out, "\tcreationToken = {}", token)?;
+ }
+ if let Some(loc) = self.location.as_deref() {
+ writeln!(&mut out, "\tlocation = {}", loc)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl From<Url> for Location {
+ fn from(url: Url) -> Self {
+ let id = hex::encode(Sha256::digest(url.as_str()));
+ let now = SystemTime::now()
+ .duration_since(UNIX_EPOCH)
+ .expect("backwards system clock")
+ .as_secs();
+ Self {
+ id,
+ uri: url.into(),
+ filter: None,
+ creation_token: Some(now),
+ location: None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct List {
+ pub mode: Mode,
+ pub heuristic: Option<String>,
+ pub bundles: Vec<Location>,
+}
+
+impl List {
+ pub fn any() -> Self {
+ Self {
+ mode: Mode::Any,
+ heuristic: Some("creationToken".into()),
+ bundles: Vec::new(),
+ }
+ }
+
+ /// Parse a bundle list from a [`git2::Config`]
+ ///
+ /// The config is expected to contain the list config keys `bundle.mode` and
+ /// optionally `bundle.heuristic`. `bundle.version` is currently ignored.
+ ///
+ /// A bundle [`Location`] is yielded if at least `bundle.<id>.uri` is set
+ /// and a valid [`Url`]. The `base` [`Url`] must be provided to resolve
+ /// relative uris in the file.
+ ///
+ /// The [`Location`] list is sorted by creation token in descending order
+ /// (entries without a token sort last). The sort is unstable.
+ pub fn from_config(cfg: git::config::Snapshot) -> crate::Result<Self> {
+ // nb. ignoring version
+ let mode = cfg.get_str("bundle.mode")?.parse()?;
+ let heuristic = if_not_found_none(cfg.get_string("bundle.heuristic"))?;
+
+ #[derive(Default)]
+ struct Info {
+ uri: Option<Uri>,
+ filter: Option<String>,
+ creation_token: Option<u64>,
+ location: Option<String>,
+ }
+
+ let mut bundles: HashMap<String, Info> = HashMap::new();
+ let mut iter = cfg.entries(Some("bundle\\.[^.]+\\.[^.]+$"))?;
+ while let Some(entry) = iter.next() {
+ let entry = entry?;
+ if let Some(("bundle", id, key)) = entry
+ .name()
+ .and_then(|name| name.split_once('.'))
+ .and_then(|(a, b)| b.split_once('.').map(|(c, d)| (a, c, d)))
+ {
+ let value = entry
+ .value()
+ .ok_or_else(|| anyhow!("value for bundle.{id}.{key} not utf8"))?;
+ let info = bundles.entry(id.to_owned()).or_default();
+ match key {
+ "uri" => {
+ let uri = value.parse()?;
+ info.uri = Some(uri);
+ },
+
+ "filter" => {
+ info.filter = Some(value.to_owned());
+ },
+
+ "creationToken" | "creationtoken" => {
+ let token = value.parse()?;
+ info.creation_token = Some(token);
+ },
+
+ "location" => {
+ info.location = Some(value.to_owned());
+ },
+
+ _ => {},
+ }
+ }
+ }
+ let mut bundles = bundles
+ .into_iter()
+ .filter_map(|(id, info)| {
+ info.uri.map(|uri| Location {
+ id,
+ uri,
+ filter: info.filter,
+ creation_token: info.creation_token,
+ location: info.location,
+ })
+ })
+ .collect::<Vec<_>>();
+ bundles.sort_unstable_by(|a, b| match (&a.creation_token, &b.creation_token) {
+ (Some(x), Some(y)) => y.cmp(x),
+ (Some(_), None) => Ordering::Less,
+ (None, Some(_)) => Ordering::Greater,
+ (None, None) => Ordering::Equal,
+ });
+
+ Ok(Self {
+ mode,
+ heuristic,
+ bundles,
+ })
+ }
+
+ pub fn to_config(&self, cfg: &mut git2::Config) -> crate::Result<()> {
+ cfg.set_i32("bundle.version", 1)?;
+ cfg.set_str("bundle.mode", self.mode.as_str())?;
+ if let Some(heuristic) = self.heuristic.as_deref() {
+ cfg.set_str("bundle.heuristic", heuristic)?;
+ }
+ self.bundles.iter().try_for_each(|loc| loc.to_config(cfg))?;
+
+ Ok(())
+ }
+
+ pub fn to_writer<W: io::Write>(&self, mut out: W) -> io::Result<()> {
+ writeln!(&mut out, "[bundle]")?;
+ writeln!(&mut out, "\tversion = 1")?;
+ writeln!(&mut out, "\tmode = {}", self.mode)?;
+ if let Some(heuristic) = self.heuristic.as_deref() {
+ writeln!(&mut out, "\theuristic = {}", heuristic)?;
+ }
+ for loc in &self.bundles {
+ writeln!(&mut out)?;
+ loc.to_writer(&mut out)?;
+ }
+
+ Ok(())
+ }
+
+ pub fn to_str(&self) -> String {
+ let mut buf = Vec::new();
+ self.to_writer(&mut buf).unwrap();
+ unsafe { String::from_utf8_unchecked(buf) }
+ }
+}
+
+impl Extend<Location> for List {
+ fn extend<T>(&mut self, iter: T)
+ where
+ T: IntoIterator<Item = Location>,
+ {
+ self.bundles.extend(iter)
+ }
+}
diff --git a/src/cfg.rs b/src/cfg.rs
new file mode 100644
index 0000000..b6a74da
--- /dev/null
+++ b/src/cfg.rs
@@ -0,0 +1,180 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub mod paths {
+ use directories::ProjectDirs;
+ use std::path::{
+ Path,
+ PathBuf,
+ };
+
+ pub fn ids() -> PathBuf {
+ project_dirs().data_dir().join("ids")
+ }
+
+ /// Default path where to store bundles.
+ ///
+ /// This is a relative path, to be treated as relative to GIT_DIR.
+ pub fn bundles() -> &'static Path {
+ Path::new("it/bundles")
+ }
+
+ fn project_dirs() -> ProjectDirs {
+ ProjectDirs::from("io", "eagain", "it").expect("no valid $HOME")
+ }
+}
+
+pub mod git {
+ use std::path::Path;
+
+ use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+ };
+ use zeroize::Zeroizing;
+
+ use crate::{
+ git::{
+ self,
+ if_not_found_none,
+ Refname,
+ },
+ keys::{
+ Agent,
+ Signer,
+ },
+ metadata::IdentityId,
+ ssh::{
+ self,
+ agent,
+ },
+ };
+
+ /// Last resort to override the signing key, if neither [`USER_SIGNING_KEY`]
+ /// nor [`SSH_KEY_COMMAND`] will cut it.
+ pub const IT_SIGNING_KEY: &str = "it.signingKey";
+ /// The default `it` identity to use.
+ pub const IT_ID: &str = "it.id";
+ /// Command to dynamically set the signing key, see
+ /// [`gpg.ssh.defaultKeyCommand`]
+ ///
+ /// [`gpg.ssh.defaultKeyCommand`]: https://git-scm.com/docs/git-config#Documentation/git-config.txt-gpgsshdefaultKeyCommand
+ pub const SSH_KEY_COMMAND: &str = "gpg.ssh.defaultKeyCommand";
+ /// The key to sign git and it objects with, see [`user.signingKey`]
+ ///
+ /// [`user.signingKey`]: https://git-scm.com/docs/git-config#Documentation/git-config.txt-usersigningKey
+ pub const USER_SIGNING_KEY: &str = "user.signingKey";
+ /// The default branch name, see [`init.defaultBranch`]
+ ///
+ /// If not set, the default branch is "master".
+ ///
+ /// [`init.defaultBranch`]: https://git-scm.com/docs/git-config#Documentation/git-config.txt-initdefaultBranch
+ pub const DEFAULT_BRANCH: &str = "init.defaultBranch";
+
+ #[allow(clippy::large_enum_variant)]
+ pub enum Key {
+ Secret(ssh::PrivateKey),
+ Public(ssh::PublicKey),
+ }
+
+ impl Key {
+ pub fn public(&self) -> &ssh::PublicKey {
+ match self {
+ Self::Secret(sk) => sk.public_key(),
+ Self::Public(pk) => pk,
+ }
+ }
+ }
+
+ pub fn signing_key(c: &git2::Config) -> crate::Result<Option<Key>> {
+ match if_not_found_none(c.get_string(IT_SIGNING_KEY))? {
+ Some(v) => ssh_signing_key_from_config_value(v).map(Some),
+ None => ssh_signing_key(c)
+ .transpose()
+ .or_else(|| ssh_key_command(c).transpose())
+ .transpose(),
+ }
+ }
+
+ pub fn signer<F>(c: &git2::Config, askpass: F) -> crate::Result<Box<dyn Signer>>
+ where
+ F: Fn(&str) -> crate::Result<Zeroizing<Vec<u8>>>,
+ {
+ let key = signing_key(c)?.ok_or_else(|| anyhow!("no signing key in git config"))?;
+ match key {
+ Key::Public(pk) => {
+ let client = agent::Client::from_env()?;
+ Ok(Box::new(Agent::new(client, pk.into())))
+ },
+ Key::Secret(sk) => {
+ if sk.is_encrypted() {
+ let prompt = format!(
+ "`it` wants to use the key {}. Please provide a passphrase to decrypt it",
+ sk.public_key().to_openssh()?
+ );
+ for _ in 0..3 {
+ let pass = askpass(&prompt)?;
+ if let Ok(key) = sk.decrypt(pass) {
+ return Ok(Box::new(key));
+ }
+ }
+ bail!("unable to decrypt secret key");
+ } else {
+ Ok(Box::new(sk))
+ }
+ },
+ }
+ }
+
+ pub fn identity(c: &git2::Config) -> crate::Result<Option<IdentityId>> {
+ if_not_found_none(c.get_string(IT_ID))?
+ .map(IdentityId::try_from)
+ .transpose()
+ .map_err(Into::into)
+ }
+
+ pub fn ssh_signing_key(cfg: &git2::Config) -> crate::Result<Option<Key>> {
+ if_not_found_none(cfg.get_string(USER_SIGNING_KEY))?
+ .map(ssh_signing_key_from_config_value)
+ .transpose()
+ }
+
+ pub(crate) fn ssh_signing_key_from_config_value<V: AsRef<str>>(v: V) -> crate::Result<Key> {
+ match v.as_ref().strip_prefix("key::") {
+ Some(lit) => {
+ let key = ssh::PublicKey::from_openssh(lit)?;
+ Ok(Key::Public(key))
+ },
+ None => {
+ let path = Path::new(v.as_ref());
+ ensure!(
+ path.exists(),
+ "{} is not a valid path to an SSH private key",
+ path.display()
+ );
+ let key = ssh::PrivateKey::read_openssh_file(path)?;
+ Ok(Key::Secret(key))
+ },
+ }
+ }
+
+ pub fn ssh_key_command(cfg: &git2::Config) -> crate::Result<Option<Key>> {
+ let out = git::config_command(cfg, SSH_KEY_COMMAND)?;
+ let key = out
+ .as_deref()
+ .map(ssh::PublicKey::from_openssh)
+ .transpose()?
+ .map(Key::Public);
+
+ Ok(key)
+ }
+
+ pub fn default_branch(cfg: &git2::Config) -> crate::Result<Refname> {
+ if_not_found_none(cfg.get_string(DEFAULT_BRANCH))?
+ .unwrap_or_else(|| String::from("master"))
+ .try_into()
+ .map_err(Into::into)
+ }
+}
+pub use git::signer;
diff --git a/src/cmd.rs b/src/cmd.rs
new file mode 100644
index 0000000..85669f9
--- /dev/null
+++ b/src/cmd.rs
@@ -0,0 +1,117 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::metadata::git::{
+ find_parent,
+ FromGit,
+ GitAlternates,
+ GitDrop,
+ GitIdentity,
+ GitMirrors,
+};
+
+mod util;
+use util::args;
+
+pub mod drop;
+pub mod id;
+pub mod mergepoint;
+pub mod patch;
+pub mod topic;
+pub mod ui;
+
+pub use crate::{
+ Error,
+ Result,
+};
+
+/// Error indicating that the command was cancelled at the user's request, eg.
+/// by pressing ESC in an interactive prompt.
+///
+/// By means of [`anyhow::Error::downcast`], this allows for exiting the program
+/// with a zero exit status, even though the invocation returned an `Err`.
+#[derive(Debug, thiserror::Error)]
+#[error("command aborted")]
+pub struct Aborted;
+
+/// Shortcut to return early from a command with an [`Aborted`] error.
+macro_rules! abort {
+ () => {
+ return Err(crate::Error::from(Aborted))
+ };
+}
+pub(crate) use abort;
+
+pub enum Output {
+ Val(Box<dyn erased_serde::Serialize>),
+ Iter(Box<dyn Iterator<Item = Result<Box<dyn erased_serde::Serialize>>>>),
+}
+
+impl Output {
+ pub fn val<T>(v: T) -> Self
+ where
+ T: serde::Serialize + 'static,
+ {
+ Self::Val(Box::new(v))
+ }
+
+ pub fn iter<T, U>(v: T) -> Self
+ where
+ T: IntoIterator<Item = Result<U>> + 'static,
+ U: serde::Serialize + 'static,
+ {
+ let iter = v
+ .into_iter()
+ .map(|x| x.map(|i| Box::new(i) as Box<dyn erased_serde::Serialize>));
+
+ Self::Iter(Box::new(iter))
+ }
+}
+
+trait IntoOutput {
+ fn into_output(self) -> Output;
+}
+
+impl<T> IntoOutput for T
+where
+ T: serde::Serialize + 'static,
+{
+ fn into_output(self) -> Output {
+ Output::Val(Box::new(self))
+ }
+}
+
+#[derive(Debug, clap::Subcommand)]
+pub enum Cmd {
+ /// Drop management
+ #[clap(subcommand)]
+ Drop(drop::Cmd),
+
+ /// Identity management
+ #[clap(subcommand)]
+ Id(id::Cmd),
+
+ /// Patches
+ #[clap(subcommand)]
+ Patch(patch::Cmd),
+
+ /// Merge points
+ #[clap(subcommand)]
+ MergePoint(mergepoint::Cmd),
+
+ /// Topics
+ #[clap(subcommand)]
+ Topic(topic::Cmd),
+}
+
+impl Cmd {
+ pub fn run(self) -> Result<Output> {
+ match self {
+ Self::Drop(cmd) => cmd.run(),
+ Self::Id(cmd) => cmd.run(),
+ Self::Patch(cmd) => cmd.run(),
+ Self::MergePoint(cmd) => cmd.run(),
+ Self::Topic(cmd) => cmd.run(),
+ }
+ }
+}
diff --git a/src/cmd/drop.rs b/src/cmd/drop.rs
new file mode 100644
index 0000000..208dbd6
--- /dev/null
+++ b/src/cmd/drop.rs
@@ -0,0 +1,205 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ ops::Deref,
+ path::PathBuf,
+};
+
+use anyhow::{
+ ensure,
+ Context,
+};
+use clap::ValueHint;
+use either::Either::Left;
+
+use crate::{
+ cmd,
+ metadata::{
+ self,
+ git::{
+ FromGit,
+ META_FILE_ALTERNATES,
+ META_FILE_MIRRORS,
+ },
+ IdentityId,
+ Signed,
+ },
+ patches::REF_HEADS_PATCHES,
+};
+
+mod bundles;
+pub use bundles::{
+ sync,
+ Bundles,
+ Sync,
+};
+
+mod edit;
+pub use edit::{
+ edit,
+ Edit,
+};
+
+mod init;
+pub use init::{
+ init,
+ Init,
+};
+
+mod serve;
+pub use serve::{
+ serve,
+ Serve,
+};
+
+mod snapshot;
+pub use snapshot::{
+ snapshot,
+ Snapshot,
+};
+
+mod show;
+pub use show::{
+ show,
+ Show,
+};
+
+mod unbundle;
+pub use unbundle::{
+ unbundle,
+ Unbundle,
+};
+
+#[derive(Debug, clap::Subcommand)]
+#[allow(clippy::large_enum_variant)]
+pub enum Cmd {
+ /// Initialise a drop
+ Init(Init),
+ /// Display the drop metadata
+ Show(Show),
+ /// Serve bundles and patch submission over HTTP
+ Serve(Serve),
+ /// Edit the drop metadata
+ Edit(Edit),
+ /// Manage patch bundles
+ #[clap(subcommand)]
+ Bundles(Bundles),
+ /// Take a snapshot of the patches received so far
+ Snapshot(Snapshot),
+ /// Unbundle the entire drop history
+ Unbundle(Unbundle),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Init(args) => init(args).map(cmd::IntoOutput::into_output),
+ Self::Show(args) => show(args).map(cmd::IntoOutput::into_output),
+ Self::Serve(args) => serve(args).map(cmd::IntoOutput::into_output),
+ Self::Edit(args) => edit(args).map(cmd::IntoOutput::into_output),
+ Self::Bundles(cmd) => cmd.run(),
+ Self::Snapshot(args) => snapshot(args).map(cmd::IntoOutput::into_output),
+ Self::Unbundle(args) => unbundle(args).map(cmd::IntoOutput::into_output),
+ }
+ }
+}
+
+#[derive(Debug, clap::Args)]
+struct Common {
+ /// Path to the drop repository
+ #[clap(from_global)]
+ git_dir: PathBuf,
+ /// A list of paths to search for identity repositories
+ #[clap(
+ long,
+ value_parser,
+ value_name = "PATH",
+ env = "IT_ID_PATH",
+ default_value_t,
+ value_hint = ValueHint::DirPath,
+ )]
+ id_path: cmd::util::args::IdSearchPath,
+}
+
+fn find_id(
+ repo: &git2::Repository,
+ id_path: &[git2::Repository],
+ id: &IdentityId,
+) -> cmd::Result<Signed<metadata::Identity>> {
+ let signed = metadata::Identity::from_search_path(id_path, cmd::id::identity_ref(Left(id))?)?
+ .meta
+ .signed;
+
+ let verified_id = signed
+ .verify(cmd::find_parent(repo))
+ .with_context(|| format!("invalid identity {id}"))?;
+ ensure!(
+ &verified_id == id,
+ "ids do not match after verification: expected {id}, found {verified_id}",
+ );
+
+ Ok(signed)
+}
+
+#[derive(serde::Serialize, serde::Deserialize)]
+struct Editable {
+ description: metadata::drop::Description,
+ roles: metadata::drop::Roles,
+ custom: metadata::Custom,
+}
+
+impl From<metadata::Drop> for Editable {
+ fn from(
+ metadata::Drop {
+ description,
+ roles,
+ custom,
+ ..
+ }: metadata::Drop,
+ ) -> Self {
+ Self {
+ description,
+ roles,
+ custom,
+ }
+ }
+}
+
+impl TryFrom<Editable> for metadata::Drop {
+ type Error = crate::Error;
+
+ fn try_from(
+ Editable {
+ description,
+ roles,
+ custom,
+ }: Editable,
+ ) -> Result<Self, Self::Error> {
+ ensure!(!roles.root.ids.is_empty(), "drop role cannot be empty");
+ ensure!(
+ !roles.snapshot.ids.is_empty(),
+ "snapshot roles cannot be empty"
+ );
+ ensure!(
+ !roles.branches.is_empty(),
+ "at least one branch role is required"
+ );
+ for (name, ann) in &roles.branches {
+ ensure!(
+ !ann.role.ids.is_empty(),
+ "branch role {name} cannot be empty"
+ );
+ ensure!(name.starts_with("refs/heads/"), "not a branch {name}");
+ ensure!(name.deref() != REF_HEADS_PATCHES, "reserved branch {name}");
+ }
+
+ Ok(Self {
+ spec_version: crate::SPEC_VERSION,
+ description,
+ prev: None,
+ roles,
+ custom,
+ })
+ }
+}
diff --git a/src/cmd/drop/bundles.rs b/src/cmd/drop/bundles.rs
new file mode 100644
index 0000000..7c3e726
--- /dev/null
+++ b/src/cmd/drop/bundles.rs
@@ -0,0 +1,32 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::cmd;
+
+mod prune;
+pub use prune::{
+ prune,
+ Prune,
+};
+
+mod sync;
+pub use sync::{
+ sync,
+ Sync,
+};
+
+#[derive(Debug, clap::Subcommand)]
+#[allow(clippy::large_enum_variant)]
+pub enum Bundles {
+ Sync(Sync),
+ Prune(Prune),
+}
+
+impl Bundles {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Sync(args) => sync(args).map(cmd::IntoOutput::into_output),
+ Self::Prune(args) => prune(args).map(cmd::IntoOutput::into_output),
+ }
+ }
+}
diff --git a/src/cmd/drop/bundles/prune.rs b/src/cmd/drop/bundles/prune.rs
new file mode 100644
index 0000000..6bd984d
--- /dev/null
+++ b/src/cmd/drop/bundles/prune.rs
@@ -0,0 +1,113 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeSet,
+ fs,
+ path::PathBuf,
+ str::FromStr,
+};
+
+use clap::ValueHint;
+
+use crate::{
+ bundle,
+ cfg,
+ cmd::{
+ self,
+ ui::{
+ info,
+ warn,
+ },
+ },
+ git,
+ patches::iter::dropped,
+};
+
+// TODO:
+//
+// - option to prune bundles made obsolete by snapshots
+
+#[derive(Debug, clap::Args)]
+pub struct Prune {
+ /// Path to the drop repository
+ #[clap(from_global)]
+ git_dir: PathBuf,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = cfg::paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// Name of a git ref holding the drop metadata history
+ ///
+ /// All locally tracked drops should be given, otherwise bundles might get
+ /// pruned which are still being referred to.
+ #[clap(long = "drop", value_parser, value_name = "REF")]
+ drop_refs: Vec<String>,
+ /// Pretend to unlink, but don't
+ #[clap(long, value_parser)]
+ dry_run: bool,
+ /// Also remove location files (.uris)
+ #[clap(long, value_parser)]
+ remove_locations: bool,
+}
+
+pub fn prune(args: Prune) -> cmd::Result<Vec<bundle::Hash>> {
+ let repo = git::repo::open_bare(&args.git_dir)?;
+ let bundle_dir = if args.bundle_dir.is_relative() {
+ repo.path().join(args.bundle_dir)
+ } else {
+ args.bundle_dir
+ };
+
+ let mut seen = BTreeSet::new();
+ for short in &args.drop_refs {
+ let drop_ref = repo.resolve_reference_from_short_name(short)?;
+ let ref_name = drop_ref.name().expect("drop references to be valid utf8");
+ info!("Collecting bundle hashes from {ref_name} ...");
+ for record in dropped::records(&repo, ref_name) {
+ let record = record?;
+ seen.insert(*record.bundle_hash());
+ }
+ }
+
+ info!("Traversing bundle dir {} ...", bundle_dir.display());
+ let mut pruned = Vec::new();
+ for entry in fs::read_dir(&bundle_dir)? {
+ let entry = entry?;
+ let path = entry.path();
+ match path.extension() {
+ Some(ext) if ext == bundle::FILE_EXTENSION => {
+ let name = path.file_stem();
+ match name
+ .and_then(|n| n.to_str())
+ .and_then(|s| bundle::Hash::from_str(s).ok())
+ {
+ Some(hash) => {
+ if !seen.contains(&hash) {
+ if !args.dry_run {
+ fs::remove_file(&path)?;
+ }
+ pruned.push(hash);
+ }
+ },
+ None => warn!("Ignoring {}: file name not a bundle hash", path.display()),
+ }
+ },
+ Some(ext) if ext == bundle::list::FILE_EXTENSION => {
+ if args.remove_locations {
+ fs::remove_file(&path)?;
+ }
+ },
+ _ => warn!("Ignoring {}: missing .bundle", path.display()),
+ }
+ }
+
+ Ok(pruned)
+}
diff --git a/src/cmd/drop/bundles/sync.rs b/src/cmd/drop/bundles/sync.rs
new file mode 100644
index 0000000..21fd58b
--- /dev/null
+++ b/src/cmd/drop/bundles/sync.rs
@@ -0,0 +1,276 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ mem,
+ num::NonZeroUsize,
+ path::PathBuf,
+ sync::{
+ Arc,
+ Mutex,
+ },
+ time::{
+ SystemTime,
+ UNIX_EPOCH,
+ },
+};
+
+use anyhow::anyhow;
+use clap::ValueHint;
+use either::Either::{
+ Left,
+ Right,
+};
+use threadpool::ThreadPool;
+use url::Url;
+
+use crate::{
+ bundle,
+ cfg,
+ cmd::{
+ self,
+ drop::Common,
+ ui::{
+ debug,
+ info,
+ warn,
+ },
+ },
+ git::{
+ self,
+ if_not_found_none,
+ },
+ patches::{
+ self,
+ iter::dropped,
+ record,
+ REF_IT_PATCHES,
+ },
+};
+
+/// Max number of locations to store from the remote for which we don't know if
+/// they'd succeed or not.
+pub const MAX_UNTRIED_LOCATIONS: usize = 3;
+
+#[derive(Debug, clap::Args)]
+pub struct Sync {
+ #[clap(flatten)]
+ common: Common,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = cfg::paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// Name of the git ref holding the drop metadata history
+ #[clap(long = "drop", value_parser, value_name = "REF")]
+ drop_ref: Option<String>,
+ /// Base URL to fetch from
+ #[clap(long, value_parser, value_name = "URL", value_hint = ValueHint::Url)]
+ url: Url,
+ /// Fetch via IPFS
+ #[clap(
+ long,
+ value_parser,
+ value_name = "URL",
+ value_hint = ValueHint::Url,
+ env = "IPFS_GATEWAY",
+ default_value_t = Url::parse("https://ipfs.io").unwrap(),
+ )]
+ ipfs_gateway: Url,
+ /// Fetch even if the bundle already exists locally
+ #[clap(long, value_parser)]
+ overwrite: bool,
+ /// Ignore snapshots if encountered
+ #[clap(long, value_parser)]
+ no_snapshots: bool,
+ /// Maximum number of concurrent downloads. Default is the number of
+ /// available cores.
+ #[clap(short, long, value_parser, default_value_t = def_jobs())]
+ jobs: NonZeroUsize,
+}
+
+fn def_jobs() -> NonZeroUsize {
+ NonZeroUsize::new(num_cpus::get()).unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
+}
+
+pub fn sync(args: Sync) -> cmd::Result<Vec<bundle::Info>> {
+ let repo = git::repo::open_bare(&args.common.git_dir)?;
+ let bundle_dir = if args.bundle_dir.is_relative() {
+ repo.path().join(args.bundle_dir)
+ } else {
+ args.bundle_dir
+ };
+ let drop_ref = match args.drop_ref {
+ Some(rev) => if_not_found_none(repo.resolve_reference_from_short_name(&rev))?
+ .ok_or_else(|| anyhow!("no ref matching {rev} found"))?
+ .name()
+ .ok_or_else(|| anyhow!("invalid drop"))?
+ .to_owned(),
+ None => REF_IT_PATCHES.to_owned(),
+ };
+ let base_url = args.url.join("bundles/")?;
+ let fetcher = Arc::new(Fetcher {
+ fetcher: bundle::Fetcher::default(),
+ bundle_dir,
+ base_url: base_url.clone(),
+ ipfs_gateway: args.ipfs_gateway,
+ });
+
+ let pool = ThreadPool::new(args.jobs.get());
+
+ let fetched = Arc::new(Mutex::new(Vec::new()));
+ let mut chasing_snaphots = false;
+ for record in dropped::records(&repo, &drop_ref) {
+ let record = record?;
+ let hexdig = record.bundle_hash().to_string();
+
+ if record.is_snapshot() {
+ if args.no_snapshots {
+ info!("Skipping snapshot bundle {hexdig}");
+ continue;
+ } else {
+ chasing_snaphots = true;
+ }
+ } else if chasing_snaphots && !record.is_mergepoint() {
+ info!("Skipping non-snapshot bundle {hexdig}");
+ continue;
+ }
+
+ if !args.overwrite && record.bundle_path(&fetcher.bundle_dir).exists() {
+ info!("Skipping existing bundle {hexdig}");
+ continue;
+ }
+
+ let record::BundleInfo {
+ info: bundle::Info { len, hash, .. },
+ prerequisites,
+ ..
+ } = record.bundle_info();
+ let url = base_url.join(&hexdig)?;
+
+ pool.execute({
+ let len = *len;
+ let hash = *hash;
+ let fetched = Arc::clone(&fetched);
+ let fetcher = Arc::clone(&fetcher);
+ move || match fetcher.try_fetch(url, len, &hash) {
+ Ok(hash) => fetched.lock().unwrap().push(hash),
+ Err(e) => warn!("Download failed: {e}"),
+ }
+ });
+
+ if record.is_snapshot() && prerequisites.is_empty() {
+ info!("Full snapshot encountered, stopping here");
+ break;
+ }
+ }
+
+ pool.join();
+ let fetched = {
+ let mut guard = fetched.lock().unwrap();
+ mem::take(&mut *guard)
+ };
+
+ Ok(fetched)
+}
+
+struct Fetcher {
+ fetcher: bundle::Fetcher,
+ bundle_dir: PathBuf,
+ base_url: Url,
+ ipfs_gateway: Url,
+}
+
+impl Fetcher {
+ fn try_fetch(&self, url: Url, len: u64, hash: &bundle::Hash) -> cmd::Result<bundle::Info> {
+ info!("Fetching {url} ...");
+
+ let expect = bundle::Expect {
+ len,
+ hash,
+ checksum: None,
+ };
+ let mut locations = Vec::new();
+ let (fetched, origin) = self
+ .fetcher
+ .fetch(&url, &self.bundle_dir, expect)
+ .and_then(|resp| match resp {
+ Right(fetched) => Ok((fetched, url)),
+ Left(lst) => {
+ info!("{url}: response was a bundle list, trying alternate locations");
+
+ let mut iter = lst.bundles.into_iter();
+ let mut found = None;
+
+ for bundle::Location { uri, .. } in &mut iter {
+ if let Some(url) = self.url_from_uri(uri) {
+ if let Ok(Right(info)) =
+ self.fetcher.fetch(&url, &self.bundle_dir, expect)
+ {
+ found = Some((info, url));
+ break;
+ }
+ }
+ }
+
+ // If there are bundle uris left, remember a few
+ let now = SystemTime::now()
+ .duration_since(UNIX_EPOCH)
+ .expect("backwards system clock")
+ .as_secs();
+ locations.extend(
+ iter
+ // Don't let the remote inflate the priority of
+ // unverified locations
+ .filter(|loc| loc.creation_token.map(|t| t < now).unwrap_or(true))
+ // Only known protocols, relative to base url
+ .filter_map(|loc| {
+ let url = loc.uri.abs(&self.base_url).ok()?;
+ matches!(url.scheme(), "http" | "https" | "ipfs").then(|| {
+ bundle::Location {
+ uri: url.into_owned().into(),
+ ..loc
+ }
+ })
+ })
+ .take(MAX_UNTRIED_LOCATIONS),
+ );
+
+ found.ok_or_else(|| anyhow!("{url}: no reachable location found"))
+ },
+ })?;
+
+ info!("Downloaded {hash} from {origin}");
+ let bundle = patches::Bundle::from_fetched(fetched)?;
+ bundle.write_bundle_list(locations)?;
+
+ Ok(bundle.into())
+ }
+
+ fn url_from_uri(&self, uri: bundle::Uri) -> Option<Url> {
+ uri.abs(&self.base_url)
+ .map_err(Into::into)
+ .and_then(|url: Cow<Url>| -> cmd::Result<Url> {
+ match url.scheme() {
+ "http" | "https" => Ok(url.into_owned()),
+ "ipfs" => {
+ let cid = url
+ .host_str()
+ .ok_or_else(|| anyhow!("{url}: host part not an IPFS CID"))?;
+ let url = self.ipfs_gateway.join(&format!("/ipfs/{cid}"))?;
+ Ok(url)
+ },
+ _ => Err(anyhow!("{url}: unsupported protocol")),
+ }
+ })
+ .map_err(|e| debug!("discarding {}: {}", uri.as_str(), e))
+ .ok()
+ }
+}
diff --git a/src/cmd/drop/edit.rs b/src/cmd/drop/edit.rs
new file mode 100644
index 0000000..9103819
--- /dev/null
+++ b/src/cmd/drop/edit.rs
@@ -0,0 +1,368 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ iter,
+ path::PathBuf,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+};
+
+use super::{
+ find_id,
+ Common,
+ Editable,
+};
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ ui::{
+ self,
+ edit_commit_message,
+ edit_metadata,
+ info,
+ },
+ Aborted,
+ },
+ git::{
+ self,
+ refs,
+ Refname,
+ },
+ json,
+ keys::Signer,
+ metadata::{
+ self,
+ git::{
+ FromGit,
+ GitDrop,
+ META_FILE_ALTERNATES,
+ META_FILE_DROP,
+ META_FILE_MIRRORS,
+ },
+ IdentityId,
+ Metadata,
+ },
+ patches::{
+ self,
+ REF_HEADS_PATCHES,
+ REF_IT_PATCHES,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Edit {
+ #[clap(flatten)]
+ common: Common,
+ /// Commit message for this edit
+ ///
+ /// Like git, $EDITOR will be invoked if not specified.
+ #[clap(short, long, value_parser)]
+ message: Option<String>,
+
+ #[clap(subcommand)]
+ cmd: Option<Cmd>,
+}
+
+#[derive(Debug, clap::Subcommand)]
+enum Cmd {
+ /// Edit the mirrors file
+ Mirrors,
+ /// Edit the alternates file
+ Alternates,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ repo: PathBuf,
+ #[serde(rename = "ref")]
+ refname: Refname,
+ #[serde(with = "crate::git::serde::oid")]
+ commit: git2::Oid,
+}
+
+pub fn edit(args: Edit) -> cmd::Result<Output> {
+ let Common { git_dir, id_path } = args.common;
+
+ let repo = git::repo::open(git_dir)?;
+ let drop_ref = if repo.is_bare() {
+ REF_HEADS_PATCHES
+ } else {
+ REF_IT_PATCHES
+ }
+ .parse()
+ .unwrap();
+
+ let id_path = id_path.open_git();
+ git::add_alternates(&repo, &id_path)?;
+ let cfg = repo.config()?.snapshot()?;
+ let signer = cfg::signer(&cfg, ui::askpass)?;
+ let signer_id = SignerIdentity::new(&signer, &repo, &cfg, &id_path)?;
+ let meta = metadata::Drop::from_tip(&repo, &drop_ref)?;
+
+ let s = EditState {
+ repo,
+ id_path,
+ signer,
+ signer_id,
+ drop_ref,
+ meta,
+ };
+
+ match args.cmd {
+ None => s.edit_drop(args.message),
+ Some(Cmd::Mirrors) => s.edit_mirrors(args.message),
+ Some(Cmd::Alternates) => s.edit_alternates(args.message),
+ }
+}
+
+struct EditState<S> {
+ repo: git2::Repository,
+ id_path: Vec<git2::Repository>,
+ signer: S,
+ signer_id: SignerIdentity,
+ drop_ref: Refname,
+ meta: GitDrop,
+}
+
+impl<S: Signer + 'static> EditState<S> {
+ fn edit_drop(mut self, message: Option<String>) -> cmd::Result<Output> {
+ let GitDrop {
+ hash: parent_hash,
+ signed: metadata::Signed { signed: parent, .. },
+ } = self.meta;
+
+ ensure!(
+ self.signer_id.can_edit_drop(&parent),
+ "signer identity not allowed to edit the drop metadata"
+ );
+
+ let mut meta: metadata::Drop = edit_metadata(Editable::from(parent.clone()))?.try_into()?;
+ if meta.canonicalise()? == parent.canonicalise()? {
+ info!("Document unchanged");
+ cmd::abort!();
+ }
+ meta.prev = Some(parent_hash);
+
+ let signed = Metadata::drop(&meta).sign(iter::once(&mut self.signer as &mut dyn Signer))?;
+
+ let mut tx = refs::Transaction::new(&self.repo)?;
+ let drop_ref = tx.lock_ref(self.drop_ref)?;
+
+ let parent = self
+ .repo
+ .find_reference(drop_ref.name())?
+ .peel_to_commit()?;
+ let parent_tree = parent.tree()?;
+ let mut root = self.repo.treebuilder(Some(&parent_tree))?;
+ patches::Record::remove_from(&mut root)?;
+
+ let mut ids = self
+ .repo
+ .treebuilder(get_tree(&self.repo, &root, "ids")?.as_ref())?;
+ let identities = meta
+ .roles
+ .ids()
+ .into_iter()
+ .map(|id| find_id(&self.repo, &self.id_path, &id).map(|signed| (id, signed)))
+ .collect::<Result<Vec<_>, _>>()?;
+ for (iid, id) in identities {
+ let iid = iid.to_string();
+ let mut tb = self
+ .repo
+ .treebuilder(get_tree(&self.repo, &ids, &iid)?.as_ref())?;
+ metadata::identity::fold_to_tree(&self.repo, &mut tb, id)?;
+ ids.insert(&iid, tb.write()?, git2::FileMode::Tree.into())?;
+ }
+ root.insert("ids", ids.write()?, git2::FileMode::Tree.into())?;
+
+ root.insert(
+ META_FILE_DROP,
+ json::to_blob(&self.repo, &signed)?,
+ git2::FileMode::Blob.into(),
+ )?;
+ let tree = self.repo.find_tree(root.write()?)?;
+
+ let msg = message.map(Ok).unwrap_or_else(|| {
+ edit_commit_message(&self.repo, drop_ref.name(), &parent_tree, &tree)
+ })?;
+ let commit = git::commit_signed(&mut self.signer, &self.repo, msg, &tree, &[&parent])?;
+ drop_ref.set_target(commit, "it: metadata edit");
+
+ tx.commit()?;
+
+ Ok(Output {
+ repo: self.repo.path().to_owned(),
+ refname: drop_ref.into(),
+ commit,
+ })
+ }
+
+ pub fn edit_mirrors(mut self, message: Option<String>) -> cmd::Result<Output> {
+ ensure!(
+ self.signer_id.can_edit_mirrors(&self.meta.signed.signed),
+ "signer identity not allowed to edit mirrors"
+ );
+
+ let prev = metadata::Mirrors::from_tip(&self.repo, &self.drop_ref)
+ .map(|m| m.signed.signed)
+ .or_else(|e| {
+ if e.is::<metadata::git::error::FileNotFound>() {
+ Ok(Default::default())
+ } else {
+ Err(e)
+ }
+ })?;
+ let prev_canonical = prev.canonicalise()?;
+ let meta = edit_metadata(prev)?;
+ if meta.canonicalise()? == prev_canonical {
+ info!("Document unchanged");
+ cmd::abort!();
+ }
+
+ let signed =
+ Metadata::mirrors(meta).sign(iter::once(&mut self.signer as &mut dyn Signer))?;
+
+ let mut tx = refs::Transaction::new(&self.repo)?;
+ let drop_ref = tx.lock_ref(self.drop_ref)?;
+
+ let parent = self
+ .repo
+ .find_reference(drop_ref.name())?
+ .peel_to_commit()?;
+ let parent_tree = parent.tree()?;
+ let mut root = self.repo.treebuilder(Some(&parent_tree))?;
+ patches::Record::remove_from(&mut root)?;
+ root.insert(
+ META_FILE_MIRRORS,
+ json::to_blob(&self.repo, &signed)?,
+ git2::FileMode::Blob.into(),
+ )?;
+ let tree = self.repo.find_tree(root.write()?)?;
+
+ let msg = message.map(Ok).unwrap_or_else(|| {
+ edit_commit_message(&self.repo, drop_ref.name(), &parent_tree, &tree)
+ })?;
+ let commit = git::commit_signed(&mut self.signer, &self.repo, msg, &tree, &[&parent])?;
+ drop_ref.set_target(commit, "it: mirrors edit");
+
+ tx.commit()?;
+
+ Ok(Output {
+ repo: self.repo.path().to_owned(),
+ refname: drop_ref.into(),
+ commit,
+ })
+ }
+
+ pub fn edit_alternates(mut self, message: Option<String>) -> cmd::Result<Output> {
+ ensure!(
+ self.signer_id.can_edit_mirrors(&self.meta.signed.signed),
+ "signer identity not allowed to edit alternates"
+ );
+
+ let prev = metadata::Alternates::from_tip(&self.repo, &self.drop_ref)
+ .map(|m| m.signed.signed)
+ .or_else(|e| {
+ if e.is::<metadata::git::error::FileNotFound>() {
+ Ok(Default::default())
+ } else {
+ Err(e)
+ }
+ })?;
+ let prev_canonical = prev.canonicalise()?;
+ let meta = edit_metadata(prev)?;
+ if meta.canonicalise()? == prev_canonical {
+ info!("Document unchanged");
+ cmd::abort!();
+ }
+
+ let signed =
+ Metadata::alternates(meta).sign(iter::once(&mut self.signer as &mut dyn Signer))?;
+
+ let mut tx = refs::Transaction::new(&self.repo)?;
+ let drop_ref = tx.lock_ref(self.drop_ref)?;
+
+ let parent = self
+ .repo
+ .find_reference(drop_ref.name())?
+ .peel_to_commit()?;
+ let parent_tree = parent.tree()?;
+ let mut root = self.repo.treebuilder(Some(&parent_tree))?;
+ patches::Record::remove_from(&mut root)?;
+ root.insert(
+ META_FILE_ALTERNATES,
+ json::to_blob(&self.repo, &signed)?,
+ git2::FileMode::Blob.into(),
+ )?;
+ let tree = self.repo.find_tree(root.write()?)?;
+
+ let msg = message.map(Ok).unwrap_or_else(|| {
+ edit_commit_message(&self.repo, drop_ref.name(), &parent_tree, &tree)
+ })?;
+ let commit = git::commit_signed(&mut self.signer, &self.repo, msg, &tree, &[&parent])?;
+ drop_ref.set_target(commit, "it: alternates edit");
+
+ tx.commit()?;
+
+ Ok(Output {
+ repo: self.repo.path().to_owned(),
+ refname: drop_ref.into(),
+ commit,
+ })
+ }
+}
+
+fn get_tree<'a>(
+ repo: &'a git2::Repository,
+ builder: &git2::TreeBuilder,
+ name: &str,
+) -> cmd::Result<Option<git2::Tree<'a>>> {
+ if let Some(entry) = builder.get(name)? {
+ return Ok(Some(
+ entry
+ .to_object(repo)?
+ .into_tree()
+ .map_err(|_| anyhow!("{name} is not a tree"))?,
+ ));
+ }
+
+ Ok(None)
+}
+
+struct SignerIdentity {
+ id: IdentityId,
+}
+
+impl SignerIdentity {
+ pub fn new<S: Signer>(
+ signer: &S,
+ repo: &git2::Repository,
+ cfg: &git2::Config,
+ id_path: &[git2::Repository],
+ ) -> cmd::Result<Self> {
+ let id =
+ cfg::git::identity(cfg)?.ok_or_else(|| anyhow!("signer identity not in gitconfig"))?;
+ let meta = find_id(repo, id_path, &id)?;
+ let keyid = metadata::KeyId::from(signer.ident());
+
+ ensure!(
+ meta.signed.keys.contains_key(&keyid),
+ "signing key {keyid} is not in identity {id}"
+ );
+
+ Ok(Self { id })
+ }
+
+ pub fn can_edit_drop(&self, parent: &metadata::Drop) -> bool {
+ parent.roles.root.ids.contains(&self.id)
+ }
+
+ pub fn can_edit_mirrors(&self, parent: &metadata::Drop) -> bool {
+ parent.roles.mirrors.ids.contains(&self.id)
+ }
+}
diff --git a/src/cmd/drop/init.rs b/src/cmd/drop/init.rs
new file mode 100644
index 0000000..b843255
--- /dev/null
+++ b/src/cmd/drop/init.rs
@@ -0,0 +1,194 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ iter,
+ num::NonZeroUsize,
+ path::PathBuf,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+};
+
+use super::{
+ find_id,
+ Common,
+ Editable,
+};
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ args::Refname,
+ ui::{
+ self,
+ edit_metadata,
+ },
+ },
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ },
+ json,
+ metadata::{
+ self,
+ git::META_FILE_DROP,
+ Metadata,
+ },
+ patches::{
+ REF_HEADS_PATCHES,
+ REF_IT_PATCHES,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Init {
+ #[clap(flatten)]
+ common: Common,
+ /// A description for this drop instance, max. 128 characters
+ #[clap(long, value_parser, value_name = "STRING")]
+ description: metadata::drop::Description,
+ /// If the repository does not already exist, initialise it as non-bare
+ ///
+ /// A drop is usually initialised inside an already existing git repository,
+ /// or as a standalone drop repository. The latter is advisable for serving
+ /// over the network.
+ ///
+ /// When init is given a directory which does not already exist, it is
+ /// assumed that a standalone drop should be created, and thus the
+ /// repository is initialised as bare. This behaviour can be overridden
+ /// by --no-bare.
+ #[clap(long, value_parser)]
+ no_bare: bool,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ repo: PathBuf,
+ #[serde(rename = "ref")]
+ refname: Refname,
+ #[serde(with = "crate::git::serde::oid")]
+ commit: git2::Oid,
+}
+
+pub fn init(args: Init) -> cmd::Result<Output> {
+ let Common { git_dir, id_path } = args.common;
+ let drop_ref: Refname = REF_IT_PATCHES.parse().unwrap();
+
+ let repo = git::repo::open_or_init(
+ git_dir,
+ git::repo::InitOpts {
+ bare: !args.no_bare,
+ description: "`it` drop",
+ initial_head: &drop_ref,
+ },
+ )?;
+
+ let mut tx = refs::Transaction::new(&repo)?;
+ let drop_ref = tx.lock_ref(drop_ref)?;
+ ensure!(
+ if_not_found_none(repo.refname_to_id(drop_ref.name()))?.is_none(),
+ "{} already exists",
+ drop_ref
+ );
+
+ let id_path = id_path.open_git();
+ git::add_alternates(&repo, &id_path)?;
+
+ let cfg = repo.config()?.snapshot()?;
+ let mut signer = cfg::signer(&cfg, ui::askpass)?;
+ let signer_id = {
+ let iid =
+ cfg::git::identity(&cfg)?.ok_or_else(|| anyhow!("signer identity not in gitconfig"))?;
+ let id = find_id(&repo, &id_path, &iid)?;
+ let keyid = metadata::KeyId::from(signer.ident());
+ ensure!(
+ id.signed.keys.contains_key(&keyid),
+ "signing key {keyid} is not in identity {iid}"
+ );
+
+ iid
+ };
+
+ let default = {
+ let default_role = metadata::drop::Role {
+ ids: [signer_id].into(),
+ threshold: NonZeroUsize::new(1).unwrap(),
+ };
+ let default_branch = cfg::git::default_branch(&cfg)?;
+
+ metadata::Drop {
+ spec_version: crate::SPEC_VERSION,
+ description: args.description,
+ prev: None,
+ custom: Default::default(),
+ roles: metadata::drop::Roles {
+ root: default_role.clone(),
+ snapshot: default_role.clone(),
+ mirrors: default_role.clone(),
+ branches: [(
+ default_branch,
+ metadata::drop::Annotated {
+ role: default_role,
+ description: metadata::drop::Description::try_from(
+ "the default branch".to_owned(),
+ )
+ .unwrap(),
+ },
+ )]
+ .into(),
+ },
+ }
+ };
+ let meta: metadata::Drop = edit_metadata(Editable::from(default))?.try_into()?;
+ ensure!(
+ meta.roles.root.ids.contains(&signer_id),
+ "signing identity {signer_id} is lacking the drop role required to sign the metadata"
+ );
+ let signed = Metadata::drop(&meta).sign(iter::once(&mut signer))?;
+
+ let mut root = repo.treebuilder(None)?;
+ let mut ids = repo.treebuilder(None)?;
+ let identities = meta
+ .roles
+ .ids()
+ .into_iter()
+ .map(|id| find_id(&repo, &id_path, &id).map(|signed| (id, signed)))
+ .collect::<Result<Vec<_>, _>>()?;
+ for (iid, id) in identities {
+ let iid = iid.to_string();
+ let mut tb = repo.treebuilder(None)?;
+ metadata::identity::fold_to_tree(&repo, &mut tb, id)?;
+ ids.insert(&iid, tb.write()?, git2::FileMode::Tree.into())?;
+ }
+ root.insert("ids", ids.write()?, git2::FileMode::Tree.into())?;
+ root.insert(
+ META_FILE_DROP,
+ json::to_blob(&repo, &signed)?,
+ git2::FileMode::Blob.into(),
+ )?;
+ let tree = repo.find_tree(root.write()?)?;
+ let msg = format!("Create drop '{}'", meta.description);
+ let commit = git::commit_signed(&mut signer, &repo, msg, &tree, &[])?;
+
+ if repo.is_bare() {
+ // Arrange refs to be `git-clone`-friendly
+ let heads_patches = tx.lock_ref(REF_HEADS_PATCHES.parse()?)?;
+ heads_patches.set_target(commit, "it: create");
+ drop_ref.set_symbolic_target(heads_patches.name().clone(), String::new());
+ repo.set_head(heads_patches.name())?;
+ } else {
+ drop_ref.set_target(commit, "it: create");
+ }
+
+ tx.commit()?;
+
+ Ok(Output {
+ repo: repo.path().to_owned(),
+ refname: drop_ref.into(),
+ commit,
+ })
+}
diff --git a/src/cmd/drop/serve.rs b/src/cmd/drop/serve.rs
new file mode 100644
index 0000000..7540d58
--- /dev/null
+++ b/src/cmd/drop/serve.rs
@@ -0,0 +1,140 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::File,
+ io::Read,
+ path::PathBuf,
+ str::FromStr,
+};
+
+use clap::ValueHint;
+use url::Url;
+
+use super::Common;
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ args::Refname,
+ },
+ http,
+ patches::{
+ REF_IT_BUNDLES,
+ REF_IT_PATCHES,
+ REF_IT_SEEN,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Serve {
+ #[clap(flatten)]
+ common: Common,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = cfg::paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// Ref prefix under which to store the refs contained in patch bundles
+ #[clap(
+ long,
+ value_parser,
+ value_name = "REF",
+ default_value_t = Refname::from_str(REF_IT_BUNDLES).unwrap()
+ )]
+ unbundle_prefix: Refname,
+ /// The refname anchoring the seen objects tree
+ #[clap(
+ long,
+ value_parser,
+ value_name = "REF",
+ default_value_t = Refname::from_str(REF_IT_SEEN).unwrap()
+ )]
+ seen_ref: Refname,
+ /// 'host:port' to listen on
+ #[clap(
+ long,
+ value_parser,
+ value_name = "HOST:PORT",
+ default_value = "127.0.0.1:8084"
+ )]
+ listen: String,
+ /// Number of threads to use for the server
+ ///
+ /// If not set, the number of available cores is used.
+ #[clap(long, value_parser, value_name = "INT")]
+ threads: Option<usize>,
+ /// PEM-encoded TLS certificate
+ ///
+ /// Requires 'tls-key'. If not set (the default), the server will not use
+ /// TLS.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "FILE",
+ requires = "tls_key",
+ value_hint = ValueHint::FilePath
+ )]
+ tls_cert: Option<PathBuf>,
+ /// PEM-encoded TLS private key
+ ///
+ /// Requires 'tls-cert'. If not set (the default), the server will not use
+ /// TLS.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "FILE",
+ requires = "tls_cert",
+ value_hint = ValueHint::FilePath
+ )]
+ tls_key: Option<PathBuf>,
+ /// IPFS API to publish received patch bundle to
+ #[clap(
+ long,
+ value_parser,
+ value_name = "URL",
+ value_hint = ValueHint::Url,
+ )]
+ ipfs_api: Option<Url>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output;
+
+pub fn serve(args: Serve) -> cmd::Result<Output> {
+ let tls = args
+ .tls_cert
+ .map(|cert_path| -> cmd::Result<http::SslConfig> {
+ let mut certificate = Vec::new();
+ let mut private_key = Vec::new();
+ File::open(cert_path)?.read_to_end(&mut certificate)?;
+ File::open(args.tls_key.expect("presence of 'tls-key' ensured by clap"))?
+ .read_to_end(&mut private_key)?;
+
+ Ok(http::SslConfig {
+ certificate,
+ private_key,
+ })
+ })
+ .transpose()?;
+
+ http::serve(
+ args.listen,
+ http::Options {
+ git_dir: args.common.git_dir,
+ bundle_dir: args.bundle_dir,
+ unbundle_prefix: args.unbundle_prefix.into(),
+ drop_ref: REF_IT_PATCHES.into(),
+ seen_ref: args.seen_ref.into(),
+ threads: args.threads,
+ tls,
+ ipfs_api: args.ipfs_api,
+ },
+ )
+}
diff --git a/src/cmd/drop/show.rs b/src/cmd/drop/show.rs
new file mode 100644
index 0000000..e3fdcfc
--- /dev/null
+++ b/src/cmd/drop/show.rs
@@ -0,0 +1,208 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeMap,
+ io,
+ path::PathBuf,
+};
+
+use anyhow::Context;
+
+use super::{
+ Common,
+ META_FILE_ALTERNATES,
+ META_FILE_MIRRORS,
+};
+use crate::{
+ cmd::{
+ self,
+ util::args::Refname,
+ FromGit as _,
+ GitAlternates,
+ GitDrop,
+ GitMirrors,
+ },
+ git,
+ metadata::{
+ self,
+ ContentHash,
+ IdentityId,
+ KeySet,
+ },
+ patches::REF_IT_PATCHES,
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Show {
+ #[clap(flatten)]
+ common: Common,
+ /// Name of the git ref holding the drop metadata history
+ #[clap(
+ long = "drop",
+ value_parser,
+ value_name = "REF",
+ default_value_t = REF_IT_PATCHES.parse().unwrap(),
+ )]
+ drop_ref: Refname,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ repo: PathBuf,
+ refname: Refname,
+ drop: Data<metadata::Drop>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ mirrors: Option<Data<metadata::Mirrors>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ alternates: Option<Data<metadata::Alternates>>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Data<T> {
+ hash: ContentHash,
+ status: Status,
+ json: T,
+}
+
+#[derive(serde::Serialize)]
+#[serde(rename_all = "UPPERCASE")]
+pub enum Status {
+ Verified,
+ #[serde(with = "crate::serde::display")]
+ Invalid(metadata::error::Verification),
+}
+
+impl From<Result<(), metadata::error::Verification>> for Status {
+ fn from(r: Result<(), metadata::error::Verification>) -> Self {
+ r.map(|()| Self::Verified).unwrap_or_else(Self::Invalid)
+ }
+}
+
+pub fn show(args: Show) -> cmd::Result<Output> {
+ let Common { git_dir, .. } = args.common;
+ let drop_ref = args.drop_ref;
+
+ let repo = git::repo::open(git_dir)?;
+
+ let GitDrop {
+ hash,
+ signed: metadata::Signed {
+ signed: drop,
+ signatures,
+ },
+ } = metadata::Drop::from_tip(&repo, &drop_ref)?;
+
+ let mut signer_cache = SignerCache::new(&repo, &drop_ref)?;
+ let status = drop
+ .verify(
+ &signatures,
+ cmd::find_parent(&repo),
+ find_signer(&mut signer_cache),
+ )
+ .into();
+
+ let mut mirrors = None;
+ let mut alternates = None;
+
+ let tree = repo.find_reference(&drop_ref)?.peel_to_commit()?.tree()?;
+ if let Some(entry) = tree.get_name(META_FILE_MIRRORS) {
+ let blob = entry.to_object(&repo)?.peel_to_blob()?;
+ let GitMirrors { hash, signed } = metadata::Mirrors::from_blob(&blob)?;
+ let status = drop
+ .verify_mirrors(&signed, find_signer(&mut signer_cache))
+ .into();
+
+ mirrors = Some(Data {
+ hash,
+ status,
+ json: signed.signed,
+ });
+ }
+
+ if let Some(entry) = tree.get_name(META_FILE_ALTERNATES) {
+ let blob = entry.to_object(&repo)?.peel_to_blob()?;
+ let GitAlternates { hash, signed } = metadata::Alternates::from_blob(&blob)?;
+ let status = drop
+ .verify_alternates(&signed, find_signer(&mut signer_cache))
+ .into();
+
+ alternates = Some(Data {
+ hash,
+ status,
+ json: signed.signed,
+ });
+ }
+
+ Ok(Output {
+ repo: repo.path().to_owned(),
+ refname: drop_ref,
+ drop: Data {
+ hash,
+ status,
+ json: drop,
+ },
+ mirrors,
+ alternates,
+ })
+}
+
+struct SignerCache<'a> {
+ repo: &'a git2::Repository,
+ root: git2::Tree<'a>,
+ keys: BTreeMap<IdentityId, KeySet<'static>>,
+}
+
+impl<'a> SignerCache<'a> {
+ pub(self) fn new(repo: &'a git2::Repository, refname: &Refname) -> git::Result<Self> {
+ let root = {
+ let id = repo
+ .find_reference(refname)?
+ .peel_to_tree()?
+ .get_name("ids")
+ .ok_or_else(|| {
+ git2::Error::new(
+ git2::ErrorCode::NotFound,
+ git2::ErrorClass::Tree,
+ "'ids' tree not found",
+ )
+ })?
+ .id();
+ repo.find_tree(id)?
+ };
+ let keys = BTreeMap::new();
+
+ Ok(Self { repo, root, keys })
+ }
+}
+
+fn find_signer<'a>(
+ cache: &'a mut SignerCache,
+) -> impl FnMut(&IdentityId) -> io::Result<KeySet<'static>> + 'a {
+ fn go(
+ repo: &git2::Repository,
+ root: &git2::Tree,
+ keys: &mut BTreeMap<IdentityId, KeySet<'static>>,
+ id: &IdentityId,
+ ) -> cmd::Result<KeySet<'static>> {
+ match keys.get(id) {
+ Some(keys) => Ok(keys.clone()),
+ None => {
+ let (id, verified) = metadata::identity::find_in_tree(repo, root, id)
+ .with_context(|| format!("identity {id} failed to verify"))?
+ .into_parts();
+ keys.insert(id, verified.keys.clone());
+ Ok(verified.keys)
+ },
+ }
+ }
+
+ |id| go(cache.repo, &cache.root, &mut cache.keys, id).map_err(as_io)
+}
+
+fn as_io<E>(e: E) -> io::Error
+where
+ E: Into<Box<dyn std::error::Error + Send + Sync>>,
+{
+ io::Error::new(io::ErrorKind::Other, e)
+}
diff --git a/src/cmd/drop/snapshot.rs b/src/cmd/drop/snapshot.rs
new file mode 100644
index 0000000..b1348d3
--- /dev/null
+++ b/src/cmd/drop/snapshot.rs
@@ -0,0 +1,20 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::{
+ cmd::{
+ self,
+ patch,
+ },
+ patches,
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Snapshot {
+ #[clap(flatten)]
+ common: patch::Common,
+}
+
+pub fn snapshot(Snapshot { common }: Snapshot) -> cmd::Result<patches::Record> {
+ patch::create(patch::Kind::Snapshot { common })
+}
diff --git a/src/cmd/drop/unbundle.rs b/src/cmd/drop/unbundle.rs
new file mode 100644
index 0000000..a9c9f77
--- /dev/null
+++ b/src/cmd/drop/unbundle.rs
@@ -0,0 +1,93 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeMap,
+ path::PathBuf,
+};
+
+use anyhow::anyhow;
+use clap::ValueHint;
+
+use crate::{
+ cmd,
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ Refname,
+ },
+ patches::{
+ self,
+ iter::dropped,
+ Bundle,
+ REF_IT_BUNDLES,
+ REF_IT_PATCHES,
+ },
+ paths,
+};
+
+// TODO:
+//
+// - require drop metadata verification
+// - abort if existing ref would be set to a different target (or --force)
+// - honour snapshots
+//
+
+#[derive(Debug, clap::Args)]
+pub struct Unbundle {
+ #[clap(from_global)]
+ git_dir: PathBuf,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// The drop history to find the topic in
+ #[clap(value_parser)]
+ drop: Option<String>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ updated: BTreeMap<Refname, git::serde::oid::Oid>,
+}
+
+pub fn unbundle(args: Unbundle) -> cmd::Result<Output> {
+ let repo = git::repo::open(&args.git_dir)?;
+ let bundle_dir = if args.bundle_dir.is_relative() {
+ repo.path().join(args.bundle_dir)
+ } else {
+ args.bundle_dir
+ };
+ let drop = match args.drop {
+ Some(rev) => if_not_found_none(repo.resolve_reference_from_short_name(&rev))?
+ .ok_or_else(|| anyhow!("no ref matching {rev} found"))?
+ .name()
+ .ok_or_else(|| anyhow!("invalid drop"))?
+ .to_owned(),
+ None => REF_IT_PATCHES.to_owned(),
+ };
+
+ let odb = repo.odb()?;
+ let mut tx = refs::Transaction::new(&repo)?;
+ let mut up = BTreeMap::new();
+ for rec in dropped::records_rev(&repo, &drop) {
+ let rec = rec?;
+ let bundle = Bundle::from_stored(&bundle_dir, rec.bundle_info().as_expect())?;
+ bundle.packdata()?.index(&odb)?;
+ let updated = patches::unbundle(&odb, &mut tx, REF_IT_BUNDLES, &rec)?;
+ for (name, oid) in updated {
+ up.insert(name, oid.into());
+ }
+ }
+ tx.commit()?;
+
+ Ok(Output { updated: up })
+}
diff --git a/src/cmd/id.rs b/src/cmd/id.rs
new file mode 100644
index 0000000..7504489
--- /dev/null
+++ b/src/cmd/id.rs
@@ -0,0 +1,188 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeSet,
+ num::NonZeroUsize,
+ path::PathBuf,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+};
+use clap::ValueHint;
+use either::{
+ Either,
+ Left,
+ Right,
+};
+use url::Url;
+
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ args::Refname,
+ },
+ git,
+ metadata::{
+ self,
+ git::META_FILE_ID,
+ IdentityId,
+ },
+ paths,
+};
+
+mod edit;
+pub use edit::{
+ edit,
+ Edit,
+};
+
+mod init;
+pub use init::{
+ init,
+ Init,
+};
+
+mod show;
+pub use show::{
+ show,
+ Show,
+};
+
+mod sign;
+pub use sign::{
+ sign,
+ Sign,
+};
+
+#[derive(Debug, clap::Subcommand)]
+#[allow(clippy::large_enum_variant)]
+pub enum Cmd {
+ /// Initialise a fresh identity
+ Init(Init),
+ /// Display the identity docment
+ Show(Show),
+ /// Edit the identity document
+ Edit(Edit),
+ /// Sign a proposed identity document
+ Sign(Sign),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Init(args) => init(args).map(cmd::IntoOutput::into_output),
+ Self::Show(args) => show(args).map(cmd::IntoOutput::into_output),
+ Self::Edit(args) => edit(args).map(cmd::IntoOutput::into_output),
+ Self::Sign(args) => sign(args).map(cmd::IntoOutput::into_output),
+ }
+ }
+}
+
+#[derive(Clone, Debug, clap::Args)]
+pub struct Common {
+ /// Path to the 'keyring' repository
+ // nb. not using from_global here -- current_dir doesn't make sense here as
+ // the default
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ env = "GIT_DIR",
+ default_value_os_t = paths::ids(),
+ value_hint = ValueHint::DirPath,
+ )]
+ git_dir: PathBuf,
+ /// Identity to operate on
+ ///
+ /// If not set as an option nor in the environment, the value of `it.id` in
+ /// the git config is tried.
+ #[clap(short = 'I', long = "identity", value_name = "ID", env = "IT_ID")]
+ id: Option<IdentityId>,
+}
+
+impl Common {
+ pub fn resolve(&self) -> cmd::Result<(git2::Repository, Refname)> {
+ let repo = git::repo::open(&self.git_dir)?;
+ let refname = identity_ref(
+ match self.id {
+ Some(id) => Left(id),
+ None => Right(repo.config()?),
+ }
+ .as_ref(),
+ )?;
+
+ Ok((repo, refname))
+ }
+}
+
+pub fn identity_ref(id: Either<&IdentityId, &git2::Config>) -> cmd::Result<Refname> {
+ let id = id.either(
+ |iid| Ok(iid.to_string()),
+ |cfg| {
+ cfg::git::identity(cfg)?
+ .ok_or_else(|| anyhow!("'{}' not set", cfg::git::IT_ID))
+ .map(|iid| iid.to_string())
+ },
+ )?;
+ Ok(Refname::try_from(format!("refs/heads/it/ids/{id}"))?)
+}
+
+#[derive(serde::Serialize, serde::Deserialize)]
+struct Editable {
+ keys: metadata::KeySet<'static>,
+ threshold: NonZeroUsize,
+ mirrors: BTreeSet<Url>,
+ expires: Option<metadata::DateTime>,
+ custom: metadata::Custom,
+}
+
+impl From<metadata::Identity> for Editable {
+ fn from(
+ metadata::Identity {
+ keys,
+ threshold,
+ mirrors,
+ expires,
+ custom,
+ ..
+ }: metadata::Identity,
+ ) -> Self {
+ Self {
+ keys,
+ threshold,
+ mirrors,
+ expires,
+ custom,
+ }
+ }
+}
+
+impl TryFrom<Editable> for metadata::Identity {
+ type Error = crate::Error;
+
+ fn try_from(
+ Editable {
+ keys,
+ threshold,
+ mirrors,
+ expires,
+ custom,
+ }: Editable,
+ ) -> Result<Self, Self::Error> {
+ ensure!(!keys.is_empty(), "keys cannot be empty");
+
+ Ok(Self {
+ spec_version: crate::SPEC_VERSION,
+ prev: None,
+ keys,
+ threshold,
+ mirrors,
+ expires,
+ custom,
+ })
+ }
+}
diff --git a/src/cmd/id/edit.rs b/src/cmd/id/edit.rs
new file mode 100644
index 0000000..02687b8
--- /dev/null
+++ b/src/cmd/id/edit.rs
@@ -0,0 +1,209 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::File,
+ iter,
+ path::Path,
+};
+
+use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+ Context,
+};
+
+use super::{
+ Common,
+ Editable,
+ META_FILE_ID,
+};
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ args::Refname,
+ ui::{
+ self,
+ edit_commit_message,
+ edit_metadata,
+ info,
+ warn,
+ },
+ Aborted,
+ FromGit as _,
+ GitIdentity,
+ },
+ git::{
+ self,
+ refs,
+ },
+ json,
+ metadata::{
+ self,
+ Metadata,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+#[allow(rustdoc::bare_urls)]
+pub struct Edit {
+ #[clap(flatten)]
+ common: Common,
+ /// Commit to this branch to propose the update
+ ///
+ /// If not given, the edit is performed in-place if the signature threshold
+ /// is met using the supplied keys.
+ #[clap(long, value_parser)]
+ propose_as: Option<Refname>,
+ /// Check out the committed changes
+ ///
+ /// Only has an effect if the repository is non-bare.
+ #[clap(long, value_parser)]
+ checkout: bool,
+ /// Don't commit anything to disk
+ #[clap(long, value_parser)]
+ dry_run: bool,
+ /// Commit message for this edit
+ ///
+ /// Like git, $EDITOR will be invoked if not specified.
+ #[clap(short, long, value_parser)]
+ message: Option<String>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ #[serde(rename = "ref")]
+ refname: Refname,
+ #[serde(with = "crate::git::serde::oid")]
+ commit: git2::Oid,
+}
+
+pub fn edit(args: Edit) -> cmd::Result<Output> {
+ let (repo, refname) = args.common.resolve()?;
+
+ let GitIdentity {
+ hash: parent_hash,
+ signed: metadata::Signed { signed: parent, .. },
+ } = metadata::Identity::from_tip(&repo, &refname)?;
+
+ let mut id: metadata::Identity = edit_metadata(Editable::from(parent.clone()))?.try_into()?;
+ if id.canonicalise()? == parent.canonicalise()? {
+ info!("Document unchanged");
+ cmd::abort!();
+ }
+ id.prev = Some(parent_hash.clone());
+
+ let cfg = repo.config()?;
+ let mut signer = cfg::signer(&cfg, ui::askpass)?;
+ let keyid = metadata::KeyId::from(signer.ident());
+ ensure!(
+ parent.keys.contains_key(&keyid) || id.keys.contains_key(&keyid),
+ "signing key {keyid} is not eligible to sign the document"
+ );
+ let signed = Metadata::identity(&id).sign(iter::once(&mut signer))?;
+
+ let commit_to = match id.verify(&signed.signatures, cmd::find_parent(&repo)) {
+ Ok(_) => args.propose_as.as_ref().unwrap_or(&refname),
+ Err(metadata::error::Verification::SignatureThreshold) => match &args.propose_as {
+ None => bail!("cannot update {refname} in place as signature threshold is not met"),
+ Some(tgt) => {
+ warn!("Signature threshold is not met");
+ tgt
+ },
+ },
+ Err(e) => bail!(e),
+ };
+
+ let mut tx = refs::Transaction::new(&repo)?;
+
+ let _tip = tx.lock_ref(refname.clone())?;
+ let tip = repo.find_reference(_tip.name())?;
+ let parent_commit = tip.peel_to_commit()?;
+ let parent_tree = parent_commit.tree()?;
+ // check that parent is valid
+ {
+ let entry = parent_tree.get_name(META_FILE_ID).ok_or_else(|| {
+ anyhow!("{refname} was modified concurrently, {META_FILE_ID} not found in tree")
+ })?;
+ ensure!(
+ parent_hash == entry.to_object(&repo)?.peel_to_blob()?.id(),
+ "{refname} was modified concurrently",
+ );
+ }
+ let commit_to = tx.lock_ref(commit_to.clone())?;
+ let on_head =
+ !repo.is_bare() && git2::Branch::wrap(repo.find_reference(commit_to.name())?).is_head();
+
+ let tree = if on_head {
+ write_tree(&repo, &signed)
+ } else {
+ write_tree_bare(&repo, &signed, Some(&parent_tree))
+ }?;
+ let msg = args
+ .message
+ .map(Ok)
+ .unwrap_or_else(|| edit_commit_message(&repo, commit_to.name(), &parent_tree, &tree))?;
+ let commit = git::commit_signed(&mut signer, &repo, msg, &tree, &[&parent_commit])?;
+ commit_to.set_target(commit, "it: edit identity");
+
+ tx.commit()?;
+
+ if args.checkout && repo.is_bare() {
+ bail!("repository is bare, refusing checkout");
+ }
+ if args.checkout || on_head {
+ repo.checkout_tree(
+ tree.as_object(),
+ Some(git2::build::CheckoutBuilder::new().safe()),
+ )?;
+ repo.set_head(commit_to.name())?;
+ info!("Switched to branch '{commit_to}'");
+ }
+
+ Ok(Output {
+ refname: commit_to.into(),
+ commit,
+ })
+}
+
+pub(super) fn write_tree<'a>(
+ repo: &'a git2::Repository,
+ meta: &metadata::Signed<metadata::Metadata>,
+) -> crate::Result<git2::Tree<'a>> {
+ ensure!(
+ repo.statuses(None)?.is_empty(),
+ "uncommitted changes in working tree. Please commit or stash them before proceeding"
+ );
+ let id_json = repo
+ .workdir()
+ .expect("non-bare repo ought to have a workdir")
+ .join(META_FILE_ID);
+ let out = File::options()
+ .write(true)
+ .truncate(true)
+ .open(&id_json)
+ .with_context(|| format!("error opening {} for writing", id_json.display()))?;
+ serde_json::to_writer_pretty(&out, meta)
+ .with_context(|| format!("serialising to {} failed", id_json.display()))?;
+
+ let mut index = repo.index()?;
+ index.add_path(Path::new(META_FILE_ID))?;
+ let oid = index.write_tree()?;
+
+ Ok(repo.find_tree(oid)?)
+}
+
+pub(super) fn write_tree_bare<'a>(
+ repo: &'a git2::Repository,
+ meta: &metadata::Signed<metadata::Metadata>,
+ from: Option<&git2::Tree>,
+) -> crate::Result<git2::Tree<'a>> {
+ let blob = json::to_blob(repo, meta)?;
+ let mut bld = repo.treebuilder(from)?;
+ bld.insert(META_FILE_ID, blob, git2::FileMode::Blob.into())?;
+ let oid = bld.write()?;
+
+ Ok(repo.find_tree(oid)?)
+}
diff --git a/src/cmd/id/init.rs b/src/cmd/id/init.rs
new file mode 100644
index 0000000..a0ed119
--- /dev/null
+++ b/src/cmd/id/init.rs
@@ -0,0 +1,230 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::{
+ iter,
+ num::NonZeroUsize,
+};
+use std::path::PathBuf;
+
+use anyhow::ensure;
+use clap::ValueHint;
+use url::Url;
+
+use super::{
+ Editable,
+ META_FILE_ID,
+};
+use crate::{
+ cfg::{
+ self,
+ paths,
+ },
+ cmd::{
+ self,
+ args::Refname,
+ ui::{
+ self,
+ edit_metadata,
+ info,
+ },
+ },
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ },
+ json,
+ metadata::{
+ self,
+ DateTime,
+ Key,
+ KeySet,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Init {
+ /// Path to the 'keyring' repository
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ env = "GIT_DIR",
+ default_value_os_t = paths::ids(),
+ value_hint = ValueHint::DirPath,
+ )]
+ git_dir: PathBuf,
+ /// If the repository does not already exist, initialise it as non-bare
+ ///
+ /// Having the identity files checked out into a work tree may make it
+ /// easier to manipulate them with external tooling. Note, however, that
+ /// only committed files are considered by `it`.
+ #[clap(long, value_parser)]
+ no_bare: bool,
+ /// Set this identity as the default in the user git config
+ #[clap(long, value_parser)]
+ set_default: bool,
+ /// Additional public key to add to the identity; may be given multiple
+ /// times
+ #[clap(short, long, value_parser)]
+ public: Vec<Key<'static>>,
+ /// Threshold of keys required to sign the next revision
+ #[clap(long, value_parser)]
+ threshold: Option<NonZeroUsize>,
+ /// Alternate location where the identity history is published to; may be
+ /// given multiple times
+ #[clap(
+ long = "mirror",
+ value_parser,
+ value_name = "URL",
+ value_hint = ValueHint::Url,
+ )]
+ mirrors: Vec<Url>,
+ /// Optional date/time after which the current revision of the identity
+ /// should no longer be considered valid
+ #[clap(long, value_parser, value_name = "DATETIME")]
+ expires: Option<DateTime>,
+ /// Custom data
+ ///
+ /// The data must be parseable as canonical JSON, ie. not contain any
+ /// floating point values.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "FILE",
+ value_hint = ValueHint::FilePath,
+ )]
+ custom: Option<PathBuf>,
+ /// Stop for editing the metadata in $EDITOR
+ #[clap(long, value_parser)]
+ edit: bool,
+ /// Don't commit anything to disk
+ #[clap(long, value_parser)]
+ dry_run: bool,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ committed: Option<Committed>,
+ data: metadata::Signed<metadata::Metadata<'static>>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Committed {
+ repo: PathBuf,
+ #[serde(rename = "ref")]
+ refname: Refname,
+ #[serde(with = "crate::git::serde::oid")]
+ commit: git2::Oid,
+}
+
+pub fn init(args: Init) -> cmd::Result<Output> {
+ let git_dir = args.git_dir;
+ info!("Initialising fresh identity at {}", git_dir.display());
+
+ let custom = args.custom.map(json::load).transpose()?.unwrap_or_default();
+ let cfg = git2::Config::open_default()?;
+ let mut signer = cfg::signer(&cfg, ui::askpass)?;
+ let threshold = match args.threshold {
+ None => NonZeroUsize::new(1)
+ .unwrap()
+ .saturating_add(args.public.len() / 2),
+ Some(t) => {
+ ensure!(
+ t.get() < args.public.len(),
+ "threshold must be smaller than the number of keys"
+ );
+ t
+ },
+ };
+
+ let signer_id = signer.ident().to_owned();
+ let keys = iter::once(signer_id.clone())
+ .map(metadata::Key::from)
+ .chain(args.public)
+ .collect::<KeySet>();
+
+ let meta = {
+ let id = metadata::Identity {
+ spec_version: crate::SPEC_VERSION,
+ prev: None,
+ keys,
+ threshold,
+ mirrors: args.mirrors.into_iter().collect(),
+ expires: args.expires,
+ custom,
+ };
+
+ if args.edit {
+ edit_metadata(Editable::from(id))?.try_into()?
+ } else {
+ id
+ }
+ };
+ let sigid = metadata::IdentityId::try_from(&meta).unwrap();
+ let signed = metadata::Metadata::identity(meta).sign(iter::once(&mut signer))?;
+
+ let out = if !args.dry_run {
+ let id_ref = Refname::try_from(format!("refs/heads/it/ids/{}", sigid)).unwrap();
+ let repo = git::repo::open_or_init(
+ git_dir,
+ git::repo::InitOpts {
+ bare: !args.no_bare,
+ description: "`it` keyring",
+ initial_head: &id_ref,
+ },
+ )?;
+
+ let mut tx = refs::Transaction::new(&repo)?;
+ let id_ref = tx.lock_ref(id_ref)?;
+ ensure!(
+ if_not_found_none(repo.refname_to_id(id_ref.name()))?.is_none(),
+ "{id_ref} already exists",
+ );
+
+ let blob = json::to_blob(&repo, &signed)?;
+ let tree = {
+ let mut bld = repo.treebuilder(None)?;
+ bld.insert(META_FILE_ID, blob, git2::FileMode::Blob.into())?;
+ let oid = bld.write()?;
+ repo.find_tree(oid)?
+ };
+ let msg = format!("Create identity {}", sigid);
+ let oid = git::commit_signed(&mut signer, &repo, msg, &tree, &[])?;
+ id_ref.set_target(oid, "it: create");
+
+ let mut cfg = repo.config()?;
+ cfg.set_str(
+ cfg::git::USER_SIGNING_KEY,
+ &format!("key::{}", signer_id.to_openssh()?),
+ )?;
+ let idstr = sigid.to_string();
+ cfg.set_str(cfg::git::IT_ID, &idstr)?;
+ if args.set_default {
+ cfg.open_global()?.set_str(cfg::git::IT_ID, &idstr)?;
+ }
+
+ tx.commit()?;
+ if !repo.is_bare() {
+ repo.checkout_head(None).ok();
+ }
+
+ Output {
+ committed: Some(Committed {
+ repo: repo.path().to_owned(),
+ refname: id_ref.into(),
+ commit: oid,
+ }),
+ data: signed,
+ }
+ } else {
+ Output {
+ committed: None,
+ data: signed,
+ }
+ };
+
+ Ok(out)
+}
diff --git a/src/cmd/id/show.rs b/src/cmd/id/show.rs
new file mode 100644
index 0000000..4a25455
--- /dev/null
+++ b/src/cmd/id/show.rs
@@ -0,0 +1,75 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::path::PathBuf;
+
+use super::Common;
+use crate::{
+ cmd::{
+ self,
+ args::Refname,
+ FromGit as _,
+ GitIdentity,
+ },
+ metadata::{
+ self,
+ ContentHash,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Show {
+ #[clap(flatten)]
+ common: Common,
+ /// Blob hash to show
+ ///
+ /// Instead of looking for an id.json in the tree --ref points to, load a
+ /// particular id.json by hash. If given, --ref is ignored.
+ #[clap(long = "hash", value_parser, value_name = "OID")]
+ blob_hash: Option<git2::Oid>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ repo: PathBuf,
+ #[serde(rename = "ref")]
+ refname: Refname,
+ hash: ContentHash,
+ status: Status,
+ data: metadata::Signed<metadata::Identity>,
+}
+
+#[derive(serde::Serialize)]
+#[serde(rename_all = "UPPERCASE")]
+pub enum Status {
+ Verified {
+ id: metadata::IdentityId,
+ },
+ #[serde(with = "crate::serde::display")]
+ Invalid(metadata::error::Verification),
+}
+
+impl From<Result<metadata::IdentityId, metadata::error::Verification>> for Status {
+ fn from(r: Result<metadata::IdentityId, metadata::error::Verification>) -> Self {
+ r.map(|id| Self::Verified { id })
+ .unwrap_or_else(Self::Invalid)
+ }
+}
+
+pub fn show(args: Show) -> cmd::Result<Output> {
+ let (repo, refname) = args.common.resolve()?;
+
+ let GitIdentity { hash, signed } = match args.blob_hash {
+ None => metadata::Identity::from_tip(&repo, &refname)?,
+ Some(oid) => metadata::Identity::from_blob(&repo.find_blob(oid)?)?,
+ };
+ let status = signed.verify(cmd::find_parent(&repo)).into();
+
+ Ok(Output {
+ repo: repo.path().to_owned(),
+ refname,
+ hash,
+ status,
+ data: signed,
+ })
+}
diff --git a/src/cmd/id/sign.rs b/src/cmd/id/sign.rs
new file mode 100644
index 0000000..b63ef94
--- /dev/null
+++ b/src/cmd/id/sign.rs
@@ -0,0 +1,221 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::collections::BTreeMap;
+
+use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+ Context as _,
+};
+
+use super::{
+ edit,
+ Common,
+};
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ args::Refname,
+ id::META_FILE_ID,
+ ui::{
+ self,
+ edit_commit_message,
+ info,
+ },
+ FromGit as _,
+ GitIdentity,
+ },
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ },
+ metadata,
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Sign {
+ #[clap(flatten)]
+ common: Common,
+ /// Commit to this branch if the signature threshold is met
+ #[clap(short = 'b', long, value_parser, value_name = "REF")]
+ commit_to: Refname,
+ /// Check out the committed changes
+ ///
+ /// Only has an effect if the repository is non-bare.
+ #[clap(long, value_parser)]
+ checkout: bool,
+ /// Don't commit anything to disk
+ #[clap(long, value_parser)]
+ dry_run: bool,
+ /// Commit message for this edit
+ ///
+ /// Like git, $EDITOR will be invoked if not specified.
+ #[clap(short, long, value_parser)]
+ message: Option<String>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ #[serde(rename = "ref")]
+ refname: Refname,
+ #[serde(with = "crate::git::serde::oid")]
+ commit: git2::Oid,
+}
+
+pub fn sign(args: Sign) -> cmd::Result<Output> {
+ let (repo, refname) = args.common.resolve()?;
+ let mut tx = refs::Transaction::new(&repo)?;
+ let _tip = tx.lock_ref(refname.clone())?;
+
+ let GitIdentity {
+ signed:
+ metadata::Signed {
+ signed: proposed,
+ signatures: proposed_signatures,
+ },
+ ..
+ } = metadata::Identity::from_tip(&repo, &refname)?;
+ let prev_hash: git2::Oid = proposed
+ .prev
+ .as_ref()
+ .ok_or_else(|| anyhow!("cannot sign a genesis revision"))?
+ .into();
+ let (parent, target_ref) = if refname == args.commit_to {
+ // Signing in-place is only legal if the proposed update already
+ // meets the signature threshold
+ let _ = proposed
+ .verify(&proposed_signatures, cmd::find_parent(&repo))
+ .context("proposed update does not meet the signature threshold")?;
+ (proposed.clone(), repo.find_reference(&args.commit_to)?)
+ } else {
+ let target_ref = if_not_found_none(repo.find_reference(&args.commit_to))?;
+ match target_ref {
+ // If the target ref exists, it must yield a verified id.json whose
+ // blob hash equals the 'prev' hash of the proposed update
+ Some(tgt) => {
+ let parent_commit = tgt.peel_to_commit()?;
+ let GitIdentity {
+ hash: parent_hash,
+ signed:
+ metadata::Signed {
+ signed: parent,
+ signatures: parent_signatures,
+ },
+ } = metadata::Identity::from_commit(&repo, &parent_commit).with_context(|| {
+ format!("failed to load {} from {}", META_FILE_ID, &args.commit_to)
+ })?;
+ let _ = parent
+ .verify(&parent_signatures, cmd::find_parent(&repo))
+ .with_context(|| format!("target {} could not be verified", &args.commit_to))?;
+ ensure!(
+ parent_hash == prev_hash,
+ "parent hash (.prev) doesn't match"
+ );
+
+ (parent, tgt)
+ },
+
+ // If the target ref is unborn, the proposed's parent commit must
+ // yield a verified id.json, as we will create the target from
+ // HEAD^1
+ None => {
+ let parent_commit = repo
+ .find_reference(&refname)?
+ .peel_to_commit()?
+ .parents()
+ .next()
+ .ok_or_else(|| anyhow!("cannot sign an initial commit"))?;
+ let GitIdentity {
+ hash: parent_hash,
+ signed:
+ metadata::Signed {
+ signed: parent,
+ signatures: parent_signatures,
+ },
+ } = metadata::Identity::from_commit(&repo, &parent_commit)?;
+ let _ = parent
+ .verify(&parent_signatures, cmd::find_parent(&repo))
+ .with_context(|| {
+ format!(
+ "parent commit {} of {} could not be verified",
+ parent_commit.id(),
+ refname
+ )
+ })?;
+ ensure!(
+ parent_hash == prev_hash,
+ "parent hash (.prev) doesn't match"
+ );
+
+ let tgt = repo.reference(
+ &args.commit_to,
+ parent_commit.id(),
+ false,
+ &format!("branch: Created from {}^1", refname),
+ )?;
+
+ (parent, tgt)
+ },
+ }
+ };
+ let commit_to = tx.lock_ref(args.commit_to)?;
+
+ let canonical = proposed.canonicalise()?;
+ let mut signer = cfg::signer(&repo.config()?, ui::askpass)?;
+ let mut signatures = BTreeMap::new();
+ let keyid = metadata::KeyId::from(signer.ident());
+ if !parent.keys.contains_key(&keyid) && !proposed.keys.contains_key(&keyid) {
+ bail!("key {} is not eligible to sign the document", keyid);
+ }
+ if proposed_signatures.contains_key(&keyid) {
+ bail!("proposed update is already signed with key {}", keyid);
+ }
+
+ let signature = signer.sign(&canonical)?;
+ signatures.insert(keyid, metadata::Signature::from(signature));
+ signatures.extend(proposed_signatures);
+
+ let _ = proposed
+ .verify(&signatures, cmd::find_parent(&repo))
+ .context("proposal could not be verified after signing")?;
+
+ let signed = metadata::Signed {
+ signed: metadata::Metadata::identity(proposed),
+ signatures,
+ };
+
+ let parent_commit = target_ref.peel_to_commit()?;
+ let parent_tree = parent_commit.tree()?;
+ let on_head = !repo.is_bare() && git2::Branch::wrap(target_ref).is_head();
+
+ let tree = if on_head {
+ edit::write_tree(&repo, &signed)
+ } else {
+ edit::write_tree_bare(&repo, &signed, Some(&parent_tree))
+ }?;
+ let msg = args
+ .message
+ .map(Ok)
+ .unwrap_or_else(|| edit_commit_message(&repo, commit_to.name(), &parent_tree, &tree))?;
+ let commit = git::commit_signed(&mut signer, &repo, msg, &tree, &[&parent_commit])?;
+ commit_to.set_target(commit, "it: identity signoff");
+
+ tx.commit()?;
+
+ if on_head {
+ repo.checkout_tree(
+ tree.as_object(),
+ Some(git2::build::CheckoutBuilder::new().safe()),
+ )?;
+ info!("Checked out tree {}", tree.id());
+ }
+
+ Ok(Output {
+ refname: commit_to.into(),
+ commit,
+ })
+}
diff --git a/src/cmd/mergepoint.rs b/src/cmd/mergepoint.rs
new file mode 100644
index 0000000..2bf4f79
--- /dev/null
+++ b/src/cmd/mergepoint.rs
@@ -0,0 +1,75 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::{
+ cmd::{
+ self,
+ patch,
+ },
+ patches,
+};
+
+#[derive(Debug, clap::Subcommand)]
+pub enum Cmd {
+ /// Record a mergepoint in a local repository
+ Record(Record),
+ /// Submit a mergepoint to a remote drop
+ Submit(Submit),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Record(args) => record(args),
+ Self::Submit(args) => submit(args),
+ }
+ .map(cmd::IntoOutput::into_output)
+ }
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Record {
+ #[clap(flatten)]
+ common: patch::Common,
+ /// Allow branches to be uneven with their upstream (if any)
+ #[clap(long, visible_alias = "force", value_parser)]
+ ignore_upstream: bool,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Submit {
+ #[clap(flatten)]
+ common: patch::Common,
+ #[clap(flatten)]
+ remote: patch::Remote,
+ /// Allow branches to be uneven with their upstream (if any)
+ #[clap(long, visible_alias = "force", value_parser)]
+ ignore_upstream: bool,
+}
+
+pub fn record(
+ Record {
+ common,
+ ignore_upstream,
+ }: Record,
+) -> cmd::Result<patches::Record> {
+ patch::create(patch::Kind::Merges {
+ common,
+ remote: None,
+ force: ignore_upstream,
+ })
+}
+
+pub fn submit(
+ Submit {
+ common,
+ remote,
+ ignore_upstream,
+ }: Submit,
+) -> cmd::Result<patches::Record> {
+ patch::create(patch::Kind::Merges {
+ common,
+ remote: Some(remote),
+ force: ignore_upstream,
+ })
+}
diff --git a/src/cmd/patch.rs b/src/cmd/patch.rs
new file mode 100644
index 0000000..a1b781d
--- /dev/null
+++ b/src/cmd/patch.rs
@@ -0,0 +1,77 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::{
+ cmd,
+ patches,
+};
+
+mod create;
+mod prepare;
+
+pub use create::{
+ create,
+ Comment,
+ Common,
+ Kind,
+ Patch,
+ Remote,
+};
+
+#[derive(Debug, clap::Subcommand)]
+pub enum Cmd {
+ /// Record a patch in a local drop history
+ Record(Record),
+ /// Submit a patch to a remote drop
+ Submit(Submit),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Record(args) => record(args),
+ Self::Submit(args) => submit(args),
+ }
+ .map(cmd::IntoOutput::into_output)
+ }
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Record {
+ #[clap(flatten)]
+ common: Common,
+ #[clap(flatten)]
+ patch: Patch,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Submit {
+ #[clap(flatten)]
+ common: Common,
+ #[clap(flatten)]
+ patch: Patch,
+ #[clap(flatten)]
+ remote: Remote,
+}
+
+pub fn record(Record { common, patch }: Record) -> cmd::Result<patches::Record> {
+ create(Kind::Patch {
+ common,
+ remote: None,
+ patch,
+ })
+}
+
+pub fn submit(
+ Submit {
+ common,
+ patch,
+ remote,
+ }: Submit,
+) -> cmd::Result<patches::Record> {
+ create(Kind::Patch {
+ common,
+ remote: Some(remote),
+ patch,
+ })
+}
diff --git a/src/cmd/patch/create.rs b/src/cmd/patch/create.rs
new file mode 100644
index 0000000..7527364
--- /dev/null
+++ b/src/cmd/patch/create.rs
@@ -0,0 +1,483 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ collections::BTreeMap,
+ env,
+ path::PathBuf,
+};
+
+use anyhow::anyhow;
+use clap::ValueHint;
+use globset::{
+ GlobSet,
+ GlobSetBuilder,
+};
+use once_cell::sync::Lazy;
+use url::Url;
+
+use super::prepare;
+use crate::{
+ cfg,
+ cmd::{
+ self,
+ ui::{
+ self,
+ debug,
+ info,
+ },
+ util::args::IdSearchPath,
+ Aborted,
+ },
+ git::{
+ self,
+ Refname,
+ },
+ metadata::IdentityId,
+ patches::{
+ self,
+ iter,
+ DropHead,
+ Topic,
+ TrackingBranch,
+ GLOB_IT_BUNDLES,
+ GLOB_IT_IDS,
+ GLOB_IT_TOPICS,
+ REF_HEADS_PATCHES,
+ REF_IT_BUNDLES,
+ REF_IT_PATCHES,
+ REF_IT_SEEN,
+ },
+ paths,
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Common {
+ /// Path to the drop repository
+ #[clap(from_global)]
+ git_dir: PathBuf,
+ /// Path to the source repository
+ ///
+ /// If set, the patch bundle will be created from objects residing in an
+ /// external repository. The main use case for this is to allow a bare
+ /// drop to pull in checkpoints from a local repo with a regular layout
+ /// (ie. non it-aware).
+ #[clap(
+ long = "source-dir",
+ alias = "src-dir",
+ value_parser,
+ value_name = "DIR",
+ value_hint = ValueHint::DirPath,
+ )]
+ src_dir: Option<PathBuf>,
+ /// Identity to assume
+ ///
+ /// If not set as an option nor in the environment, the value of `it.id` in
+ /// the git config is tried.
+ #[clap(short = 'I', long = "identity", value_name = "ID", env = "IT_ID")]
+ id: Option<IdentityId>,
+ /// A list of paths to search for identity repositories
+ #[clap(
+ long,
+ value_parser,
+ value_name = "PATH",
+ env = "IT_ID_PATH",
+ default_value_t,
+ value_hint = ValueHint::DirPath,
+ )]
+ id_path: IdSearchPath,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// IPFS API to publish the patch bundle to
+ ///
+ /// Currently has no effect when submitting a patch to a remote drop. When
+ /// running `ipfs daemon`, the default API address is 'http://127.0.0.1:5001'.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "URL",
+ value_hint = ValueHint::Url,
+ )]
+ ipfs_api: Option<Url>,
+ /// Additional identities to include, eg. to allow commit verification
+ #[clap(long = "add-id", value_parser, value_name = "ID")]
+ ids: Vec<IdentityId>,
+ /// Message to attach to the patch (cover letter, comment)
+ ///
+ /// If not set, $EDITOR will be invoked to author one.
+ #[clap(short, long, value_parser, value_name = "STRING")]
+ message: Option<String>,
+ /// Create the patch, but stop short of submitting / recording it
+ #[clap(long, value_parser)]
+ dry_run: bool,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Remote {
+ /// Url to submit the patch to
+ ///
+ /// Usually one of the alternates from the drop metadata. If not set,
+ /// GIT_DIR is assumed to contain a drop with which the patch can be
+ /// recorded without any network access.
+ #[clap(long, visible_alias = "submit-to", value_parser, value_name = "URL")]
+ url: Url,
+ /// Refname of the drop to record the patch with
+ ///
+ /// We need to pick a local (remote-tracking) drop history in order to
+ /// compute delta bases for the patch. The value is interpreted
+ /// according to "DWIM" rules, i.e. shorthand forms like 'it/patches',
+ /// 'origin/patches' are attempted to be resolved.
+ #[clap(long = "drop", value_parser, value_name = "STRING")]
+ drop_ref: String,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Patch {
+ /// Base branch the patch is against
+ ///
+ /// If --topic is given, the branch must exist in the patch bundle
+ /// --reply-to refers to, or the default entry to reply to on that
+ /// topic. Otherwise, the branch must exist in the drop
+ /// metadata. Shorthand branch names are accepted.
+ ///
+ /// If not given, "main" or "master" is tried, in that order.
+ #[clap(long = "base", value_parser, value_name = "REF")]
+ base: Option<String>,
+ /// Head revision of the patch, in 'git rev-parse' syntax
+ #[clap(
+ long = "head",
+ value_parser,
+ value_name = "REVSPEC",
+ default_value = "HEAD"
+ )]
+ head: String,
+ /// Post the patch to a previously recorded topic
+ #[clap(long, value_parser, value_name = "TOPIC")]
+ topic: Option<Topic>,
+ /// Reply to a particular entry within a topic
+ ///
+ /// Only considered if --topic is given.
+ #[clap(long, value_parser, value_name = "ID")]
+ reply_to: Option<git2::Oid>,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Comment {
+ /// The topic to comment on
+ #[clap(value_parser, value_name = "TOPIC")]
+ topic: Topic,
+ /// Reply to a particular entry within the topic
+ #[clap(long, value_parser, value_name = "ID")]
+ reply_to: Option<git2::Oid>,
+}
+
+pub enum Kind {
+ Merges {
+ common: Common,
+ remote: Option<Remote>,
+ force: bool,
+ },
+ Snapshot {
+ common: Common,
+ },
+ Comment {
+ common: Common,
+ remote: Option<Remote>,
+ comment: Comment,
+ },
+ Patch {
+ common: Common,
+ remote: Option<Remote>,
+ patch: Patch,
+ },
+}
+
+impl Kind {
+ fn common(&self) -> &Common {
+ match self {
+ Self::Merges { common, .. }
+ | Self::Snapshot { common }
+ | Self::Comment { common, .. }
+ | Self::Patch { common, .. } => common,
+ }
+ }
+
+ fn remote(&self) -> Option<&Remote> {
+ match self {
+ Self::Merges { remote, .. }
+ | Self::Comment { remote, .. }
+ | Self::Patch { remote, .. } => remote.as_ref(),
+ Self::Snapshot { .. } => None,
+ }
+ }
+
+ fn accept_options(&self, drop: &DropHead) -> patches::AcceptOptions {
+ let mut options = patches::AcceptOptions::default();
+ match self {
+ Self::Merges { common, .. } => {
+ options.allow_fat_pack = true;
+ options.max_branches = drop.meta.roles.branches.len();
+ options.max_refs = options.max_branches + common.ids.len() + 1;
+ options.max_commits = 100_000;
+ },
+ Self::Snapshot { .. } => {
+ options.allow_fat_pack = true;
+ options.allowed_refs = SNAPSHOT_REFS.clone();
+ options.max_branches = usize::MAX;
+ options.max_refs = usize::MAX;
+ options.max_commits = usize::MAX;
+ options.max_notes = usize::MAX;
+ options.max_tags = usize::MAX;
+ },
+
+ _ => {},
+ }
+
+ options
+ }
+}
+
+struct Resolved {
+ repo: prepare::Repo,
+ signer_id: IdentityId,
+ bundle_dir: PathBuf,
+}
+
+impl Common {
+ fn resolve(&self) -> cmd::Result<Resolved> {
+ let drp = git::repo::open(&self.git_dir)?;
+ let ids = self.id_path.open_git();
+ let src = match self.src_dir.as_ref() {
+ None => {
+ let cwd = env::current_dir()?;
+ (cwd != self.git_dir).then_some(cwd)
+ },
+ Some(dir) => Some(dir.to_owned()),
+ }
+ .as_deref()
+ .map(git::repo::open_bare)
+ .transpose()?;
+
+ debug!(
+ "drop: {}, src: {:?}, ids: {:?}",
+ drp.path().display(),
+ src.as_ref().map(|r| r.path().display()),
+ env::join_paths(ids.iter().map(|r| r.path()))
+ );
+
+ // IT_ID_PATH could differ from what was used at initialisation
+ git::add_alternates(&drp, &ids)?;
+
+ let repo = prepare::Repo::new(drp, ids, src);
+ let signer_id = match self.id {
+ Some(id) => id,
+ None => cfg::git::identity(&repo.source().config()?)?
+ .ok_or_else(|| anyhow!("no identity configured for signer"))?,
+ };
+ let bundle_dir = if self.bundle_dir.is_absolute() {
+ self.bundle_dir.clone()
+ } else {
+ repo.target().path().join(&self.bundle_dir)
+ };
+
+ Ok(Resolved {
+ repo,
+ signer_id,
+ bundle_dir,
+ })
+ }
+}
+
+static SNAPSHOT_REFS: Lazy<GlobSet> = Lazy::new(|| {
+ GlobSetBuilder::new()
+ .add(GLOB_IT_TOPICS.clone())
+ .add(GLOB_IT_BUNDLES.clone())
+ .add(GLOB_IT_IDS.clone())
+ .build()
+ .unwrap()
+});
+
+pub fn create(args: Kind) -> cmd::Result<patches::Record> {
+ let Resolved {
+ repo,
+ signer_id,
+ bundle_dir,
+ } = args.common().resolve()?;
+ let drop_ref: Cow<str> = match args.remote() {
+ Some(remote) => {
+ let full = repo
+ .source()
+ .resolve_reference_from_short_name(&remote.drop_ref)?;
+ full.name()
+ .ok_or_else(|| anyhow!("invalid drop ref"))?
+ .to_owned()
+ .into()
+ },
+ None if repo.target().is_bare() => REF_HEADS_PATCHES.into(),
+ None => REF_IT_PATCHES.into(),
+ };
+
+ let mut signer = cfg::git::signer(&repo.source().config()?, ui::askpass)?;
+ let drop = patches::DropHead::from_refname(repo.target(), &drop_ref)?;
+
+ let spec = match &args {
+ Kind::Merges { force, .. } => prepare::Kind::Mergepoint { force: *force },
+ Kind::Snapshot { .. } => prepare::Kind::Snapshot { incremental: true },
+ Kind::Comment { comment, .. } => prepare::Kind::Comment {
+ topic: comment.topic.clone(),
+ reply: comment.reply_to,
+ },
+ Kind::Patch { patch, .. } => {
+ let (name, base_ref) = dwim_base(
+ repo.target(),
+ &drop,
+ patch.topic.as_ref(),
+ patch.reply_to,
+ patch.base.as_deref(),
+ )?
+ .ok_or_else(|| anyhow!("unable to determine base branch"))?;
+ let base = repo
+ .target()
+ .find_reference(&base_ref)?
+ .peel_to_commit()?
+ .id();
+ let head = repo
+ .source()
+ .revparse_single(&patch.head)?
+ .peel_to_commit()?
+ .id();
+
+ prepare::Kind::Patch {
+ head,
+ base,
+ name,
+ re: patch.topic.as_ref().map(|t| (t.clone(), patch.reply_to)),
+ }
+ },
+ };
+
+ let mut patch = prepare::Preparator::new(
+ &repo,
+ &drop,
+ prepare::Submitter {
+ signer: &mut signer,
+ id: signer_id,
+ },
+ )
+ .prepare_patch(
+ &bundle_dir,
+ spec,
+ args.common().message.clone(),
+ &args.common().ids,
+ )?;
+
+ if args.common().dry_run {
+ info!("--dry-run given, stopping here");
+ cmd::abort!();
+ }
+
+ match args.remote() {
+ Some(remote) => patch.submit(remote.url.clone()),
+ None => patch.try_accept(patches::AcceptArgs {
+ unbundle_prefix: REF_IT_BUNDLES,
+ drop_ref: &drop_ref,
+ seen_ref: REF_IT_SEEN,
+ repo: repo.target(),
+ signer: &mut signer,
+ ipfs_api: args.common().ipfs_api.as_ref(),
+ options: args.accept_options(&drop),
+ }),
+ }
+}
+
+fn dwim_base(
+ repo: &git2::Repository,
+ drop: &DropHead,
+ topic: Option<&Topic>,
+ reply_to: Option<git2::Oid>,
+ base: Option<&str>,
+) -> cmd::Result<Option<(Refname, Refname)>> {
+ let mut candidates = BTreeMap::new();
+ match topic {
+ Some(topic) => {
+ let reply_to = reply_to.map(Ok).unwrap_or_else(|| {
+ iter::topic::default_reply_to(repo, topic)?
+ .ok_or_else(|| anyhow!("topic {topic} not found"))
+ })?;
+ let mut patch_id = None;
+ for note in iter::topic(repo, topic) {
+ let note = note?;
+ if note.header.id == reply_to {
+ patch_id = Some(note.header.patch.id);
+ break;
+ }
+ }
+ let patch_id = patch_id.ok_or_else(|| {
+ anyhow!("no patch found corresponding to topic: {topic}, reply-to: {reply_to}")
+ })?;
+
+ let prefix = format!("{REF_IT_BUNDLES}/{patch_id}/");
+ let mut iter = repo.references_glob(&format!("{prefix}**"))?;
+ for candidate in iter.names() {
+ let candidate = candidate?;
+ if let Some(suf) = candidate.strip_prefix(&prefix) {
+ if !suf.starts_with("it/") {
+ candidates.insert(format!("refs/{suf}"), candidate.parse()?);
+ }
+ }
+ }
+ },
+
+ None => candidates.extend(
+ drop.meta
+ .roles
+ .branches
+ .keys()
+ .cloned()
+ .map(|name| (name.to_string(), name)),
+ ),
+ };
+
+ const FMTS: &[fn(&str) -> String] = &[
+ |s| s.to_owned(),
+ |s| format!("refs/{}", s),
+ |s| format!("refs/heads/{}", s),
+ |s| format!("refs/tags/{}", s),
+ ];
+
+ debug!("dwim candidates: {candidates:#?}");
+
+ match base {
+ Some(base) => {
+ for (virt, act) in candidates {
+ for f in FMTS {
+ let name = f(base);
+ if name == virt {
+ let refname = name.parse()?;
+ return Ok(Some((refname, act)));
+ }
+ }
+ }
+ Ok(None)
+ },
+
+ // nb. biased towards "main" because we use a BTreeMap
+ None => Ok(candidates.into_iter().find_map(|(k, _)| match k.as_str() {
+ "refs/heads/main" => Some((Refname::main(), TrackingBranch::main().into_refname())),
+ "refs/heads/master" => {
+ Some((Refname::master(), TrackingBranch::master().into_refname()))
+ },
+ _ => None,
+ })),
+ }
+}
diff --git a/src/cmd/patch/prepare.rs b/src/cmd/patch/prepare.rs
new file mode 100644
index 0000000..06d5ec9
--- /dev/null
+++ b/src/cmd/patch/prepare.rs
@@ -0,0 +1,615 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::path::{
+ Path,
+ PathBuf,
+};
+
+use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+};
+use either::Either::Left;
+use sha2::{
+ Digest,
+ Sha256,
+};
+
+use crate::{
+ bundle,
+ cmd::{
+ self,
+ ui::{
+ debug,
+ edit_comment,
+ edit_cover_letter,
+ info,
+ warn,
+ },
+ },
+ git::{
+ self,
+ if_not_found_none,
+ Refname,
+ },
+ keys::Signer,
+ metadata::{
+ self,
+ git::{
+ FromGit,
+ GitMeta,
+ META_FILE_ID,
+ },
+ identity::{
+ self,
+ IdentityId,
+ },
+ ContentHash,
+ KeyId,
+ },
+ patches::{
+ self,
+ iter::{
+ dropped,
+ topic,
+ },
+ notes,
+ record,
+ Topic,
+ REF_IT_BUNDLES,
+ REF_IT_PATCHES,
+ TOPIC_MERGES,
+ TOPIC_SNAPSHOTS,
+ },
+};
+
+pub enum Kind {
+ Mergepoint {
+ force: bool,
+ },
+ Snapshot {
+ incremental: bool,
+ },
+ Patch {
+ head: git2::Oid,
+ base: git2::Oid,
+ name: Refname,
+ re: Option<(Topic, Option<git2::Oid>)>,
+ },
+ Comment {
+ topic: Topic,
+ reply: Option<git2::Oid>,
+ },
+}
+
+pub struct Submitter<'a, S: ?Sized> {
+ pub signer: &'a mut S,
+ pub id: IdentityId,
+}
+
+pub struct Repo {
+ drp: git2::Repository,
+ src: Option<git2::Repository>,
+ ids: Vec<git2::Repository>,
+}
+
+impl Repo {
+ pub fn new(
+ drp: git2::Repository,
+ ids: Vec<git2::Repository>,
+ src: Option<git2::Repository>,
+ ) -> Self {
+ Self { drp, ids, src }
+ }
+
+ /// Repository containing the patch objects
+ pub fn source(&self) -> &git2::Repository {
+ self.src.as_ref().unwrap_or(&self.drp)
+ }
+
+ /// Repository containing the drop state
+ pub fn target(&self) -> &git2::Repository {
+ &self.drp
+ }
+
+ /// Repositories containing identity histories
+ pub fn id_path(&self) -> &[git2::Repository] {
+ &self.ids
+ }
+}
+
+pub struct Preparator<'a, S: ?Sized> {
+ repo: &'a Repo,
+ drop: &'a patches::DropHead<'a>,
+ submitter: Submitter<'a, S>,
+}
+
+impl<'a, S: Signer> Preparator<'a, S> {
+ pub fn new(
+ repo: &'a Repo,
+ drop: &'a patches::DropHead<'a>,
+ submitter: Submitter<'a, S>,
+ ) -> Self {
+ Self {
+ repo,
+ drop,
+ submitter,
+ }
+ }
+
+ pub fn prepare_patch(
+ &mut self,
+ bundle_dir: &Path,
+ kind: Kind,
+ message: Option<String>,
+ additional_ids: &[IdentityId],
+ ) -> cmd::Result<patches::Submission> {
+ let mut header = bundle::Header::default();
+
+ match kind {
+ Kind::Mergepoint { force } => {
+ mergepoint(self.repo, &self.drop.meta, &mut header, force)?;
+ ensure!(
+ !header.references.is_empty(),
+ "refusing to create empty checkpoint"
+ );
+ self.annotate_checkpoint(&mut header, &TOPIC_MERGES, message)?;
+ },
+ Kind::Snapshot { incremental } => {
+ snapshot(self.repo, &mut header, incremental)?;
+ ensure!(
+ !header.references.is_empty(),
+ "refusing to create empty snapshot"
+ );
+ self.annotate_checkpoint(&mut header, &TOPIC_SNAPSHOTS, message)?;
+ },
+ Kind::Patch {
+ head,
+ base,
+ name,
+ re,
+ } => {
+ ensure!(base != head, "refusing to create empty patch");
+ ensure!(
+ if_not_found_none(self.repo.source().merge_base(base, head))?.is_some(),
+ "{base} is not reachable from {head}"
+ );
+ info!("Adding patch for {name}: {base}..{head}");
+ header.add_prerequisite(&base);
+ header.add_reference(name, &head);
+ self.annotate_patch(&mut header, message, re)?;
+ },
+ Kind::Comment { topic, reply } => {
+ self.annotate_comment(&mut header, topic, message, reply)?;
+ },
+ }
+
+ for id in additional_ids {
+ Identity::find(
+ self.repo.target(),
+ &self.drop.ids,
+ self.repo.id_path(),
+ cmd::id::identity_ref(Left(id))?,
+ )?
+ .update(&mut header);
+ }
+
+ let signer_hash = {
+ let keyid = self.submitter.signer.ident().keyid();
+ let id_ref = cmd::id::identity_ref(Left(&self.submitter.id))?;
+ let id = Identity::find(
+ self.repo.target(),
+ &self.drop.ids,
+ self.repo.id_path(),
+ id_ref,
+ )?;
+ ensure!(
+ id.contains(&keyid),
+ "signing key {keyid} not in identity {}",
+ id.id()
+ );
+ id.update(&mut header);
+
+ id.hash().clone()
+ };
+
+ let bundle = patches::Bundle::create(bundle_dir, self.repo.source(), header)?;
+ let signature = bundle
+ .sign(self.submitter.signer)
+ .map(|signature| patches::Signature {
+ signer: signer_hash,
+ signature: signature.into(),
+ })?;
+
+ Ok(patches::Submission { signature, bundle })
+ }
+
+ fn annotate_checkpoint(
+ &mut self,
+ bundle: &mut bundle::Header,
+ topic: &Topic,
+ message: Option<String>,
+ ) -> cmd::Result<()> {
+ let kind = if topic == &*TOPIC_MERGES {
+ notes::CheckpointKind::Merge
+ } else if topic == &*TOPIC_SNAPSHOTS {
+ notes::CheckpointKind::Snapshot
+ } else {
+ bail!("not a checkpoint topic: {topic}")
+ };
+ let note = notes::Simple::checkpoint(kind, bundle.references.clone(), message);
+ let parent = topic::default_reply_to(self.repo.target(), topic)?
+ .map(|id| self.repo.source().find_commit(id))
+ .transpose()?;
+
+ self.annotate(bundle, topic, parent, &note)
+ }
+
+ fn annotate_patch(
+ &mut self,
+ bundle: &mut bundle::Header,
+ cover: Option<String>,
+ re: Option<(Topic, Option<git2::Oid>)>,
+ ) -> cmd::Result<()> {
+ let cover = cover
+ .map(notes::Simple::new)
+ .map(Ok)
+ .unwrap_or_else(|| edit_cover_letter(self.repo.source()))?;
+ let (topic, parent) = match re {
+ Some((topic, reply_to)) => {
+ let parent = find_reply_to(self.repo, &topic, reply_to)?;
+ (topic, Some(parent))
+ },
+ None => {
+ // This is pretty arbitrary -- just use a random string instead?
+ let topic = {
+ let mut hasher = Sha256::new();
+ hasher.update(record::Heads::from(bundle as &bundle::Header));
+ serde_json::to_writer(&mut hasher, &cover)?;
+ hasher.update(self.submitter.signer.ident().keyid());
+ Topic::from(hasher.finalize())
+ };
+ let parent = topic::default_reply_to(self.repo.target(), &topic)?
+ .map(|id| self.repo.source().find_commit(id))
+ .transpose()?;
+
+ (topic, parent)
+ },
+ };
+
+ self.annotate(bundle, &topic, parent, &cover)
+ }
+
+ fn annotate_comment(
+ &mut self,
+ bundle: &mut bundle::Header,
+ topic: Topic,
+ message: Option<String>,
+ reply_to: Option<git2::Oid>,
+ ) -> cmd::Result<()> {
+ let parent = find_reply_to(self.repo, &topic, reply_to)?;
+ let edit = || -> cmd::Result<notes::Simple> {
+ let re = notes::Simple::from_commit(self.repo.target(), &parent)?;
+ edit_comment(self.repo.source(), Some(&re))
+ };
+ let comment = message
+ .map(notes::Simple::new)
+ .map(Ok)
+ .unwrap_or_else(edit)?;
+
+ self.annotate(bundle, &topic, Some(parent), &comment)
+ }
+
+ fn annotate(
+ &mut self,
+ bundle: &mut bundle::Header,
+ topic: &Topic,
+ parent: Option<git2::Commit>,
+ note: &notes::Simple,
+ ) -> cmd::Result<()> {
+ let repo = self.repo.source();
+ let topic_ref = topic.as_refname();
+ let tree = {
+ let mut tb = repo.treebuilder(None)?;
+ patches::to_tree(repo, &mut tb, note)?;
+ repo.find_tree(tb.write()?)?
+ };
+ let msg = match note.subject() {
+ Some(s) => format!("{}\n\n{}", s, topic.as_trailer()),
+ None => topic.as_trailer(),
+ };
+ let commit = git::commit_signed(
+ self.submitter.signer,
+ repo,
+ &msg,
+ &tree,
+ parent.as_ref().into_iter().collect::<Vec<_>>().as_slice(),
+ )?;
+
+ if let Some(commit) = parent {
+ bundle.add_prerequisite(&commit.id());
+ }
+ bundle.add_reference(topic_ref, &commit);
+
+ Ok(())
+ }
+}
+
+fn mergepoint(
+ repos: &Repo,
+ meta: &metadata::drop::Verified,
+ bundle: &mut bundle::Header,
+ force: bool,
+) -> git::Result<()> {
+ for branch in meta.roles.branches.keys() {
+ let sandboxed = match patches::TrackingBranch::try_from(branch) {
+ Ok(tracking) => tracking,
+ Err(e) => {
+ warn!("Skipping invalid branch {branch}: {e}");
+ continue;
+ },
+ };
+ let head = {
+ let local = repos.source().find_reference(branch)?;
+ let head = local.peel_to_commit()?.id();
+ if !force {
+ if let Some(upstream) = if_not_found_none(git2::Branch::wrap(local).upstream())? {
+ let upstream_head = upstream.get().peel_to_commit()?.id();
+ if head != upstream_head {
+ warn!(
+ "Upstream {} is not even with {branch}; you may want to push first",
+ String::from_utf8_lossy(upstream.name_bytes()?)
+ );
+ info!("Skipping {branch}");
+ continue;
+ }
+ }
+ }
+
+ head
+ };
+ match if_not_found_none(repos.target().find_reference(&sandboxed))? {
+ Some(base) => {
+ let base = base.peel_to_commit()?.id();
+ if base == head {
+ info!("Skipping empty checkpoint");
+ } else if if_not_found_none(repos.source().merge_base(base, head))?.is_some() {
+ info!("Adding thin checkpoint for branch {branch}: {base}..{head}");
+ bundle.add_prerequisite(&base);
+ bundle.add_reference(branch.clone(), &head);
+ } else {
+ warn!(
+ "{branch} diverges from drop state: no merge base between {base}..{head}"
+ );
+ }
+ },
+
+ None => {
+ info!("Adding full checkpoint for branch {branch}: {head}");
+ bundle.add_reference(branch.clone(), &head);
+ },
+ }
+ }
+
+ Ok(())
+}
+
+fn snapshot(repo: &Repo, bundle: &mut bundle::Header, incremental: bool) -> cmd::Result<()> {
+ for record in dropped::records(repo.target(), REF_IT_PATCHES) {
+ let record = record?;
+ let bundle_hash = record.bundle_hash();
+ if record.is_encrypted() {
+ warn!("Skipping encrypted patch bundle {bundle_hash}",);
+ continue;
+ }
+
+ if record.topic == *TOPIC_SNAPSHOTS {
+ if !incremental {
+ debug!("Full snapshot: skipping previous snapshot {bundle_hash}");
+ continue;
+ } else {
+ info!("Incremental snapshot: found previous snapshot {bundle_hash}");
+ for oid in record.meta.bundle.references.values().copied() {
+ info!("Adding prerequisite {oid} from {bundle_hash}");
+ bundle.add_prerequisite(oid);
+ }
+ break;
+ }
+ }
+
+ info!("Including {bundle_hash} in snapshot");
+ for (name, oid) in &record.meta.bundle.references {
+ info!("Adding {oid} {name}");
+ let name = patches::unbundled_ref(REF_IT_BUNDLES, &record, name)?;
+ bundle.add_reference(name, *oid);
+ }
+ }
+
+ Ok(())
+}
+
+fn find_reply_to<'a>(
+ repo: &'a Repo,
+ topic: &Topic,
+ reply_to: Option<git2::Oid>,
+) -> cmd::Result<git2::Commit<'a>> {
+ let tip = if_not_found_none(repo.target().refname_to_id(&topic.as_refname()))?
+ .ok_or_else(|| anyhow!("topic {topic} does not exist"))?;
+ let id = match reply_to {
+ Some(id) => {
+ ensure!(
+ repo.target().graph_descendant_of(tip, id)?,
+ "{id} not found in topic {topic}, cannot reply"
+ );
+ id
+ },
+ None => topic::default_reply_to(repo.target(), topic)?.expect("impossible: empty topic"),
+ };
+
+ Ok(repo.source().find_commit(id)?)
+}
+
+struct Identity {
+ hash: ContentHash,
+ verified: identity::Verified,
+ update: Option<Range>,
+}
+
+impl Identity {
+ pub fn find(
+ repo: &git2::Repository,
+ ids: &git2::Tree,
+ id_path: &[git2::Repository],
+ refname: Refname,
+ ) -> cmd::Result<Self> {
+ let find_parent = metadata::git::find_parent(repo);
+
+ struct Meta {
+ hash: ContentHash,
+ id: identity::Verified,
+ }
+
+ impl Meta {
+ fn identity(&self) -> &metadata::Identity {
+ self.id.identity()
+ }
+ }
+
+ let (ours_in, ours) =
+ metadata::Identity::from_search_path(id_path, &refname).and_then(|data| {
+ let signer = data.meta.signed.verified(&find_parent)?;
+ Ok((
+ data.repo,
+ Meta {
+ hash: data.meta.hash,
+ id: signer,
+ },
+ ))
+ })?;
+
+ let tree_path = PathBuf::from(ours.id.id().to_string()).join(META_FILE_ID);
+ let newer = match if_not_found_none(ids.get_path(&tree_path))? {
+ None => {
+ let start = ours_in.refname_to_id(&refname)?;
+ let range = Range {
+ refname,
+ start,
+ end: None,
+ };
+ Self {
+ hash: ours.hash,
+ verified: ours.id,
+ update: Some(range),
+ }
+ },
+ Some(in_tree) if ours.hash == in_tree.id() => Self {
+ hash: ours.hash,
+ verified: ours.id,
+ update: None,
+ },
+ Some(in_tree) => {
+ let theirs = metadata::Identity::from_blob(&repo.find_blob(in_tree.id())?)
+ .and_then(|GitMeta { hash, signed }| {
+ let signer = signed.verified(&find_parent)?;
+ Ok(Meta { hash, id: signer })
+ })?;
+
+ if ours.identity().has_ancestor(&theirs.hash, &find_parent)? {
+ let range = Range::compute(ours_in, refname, theirs.hash.as_oid())?;
+ Self {
+ hash: ours.hash,
+ verified: ours.id,
+ update: range,
+ }
+ } else if theirs.identity().has_ancestor(&ours.hash, &find_parent)? {
+ Self {
+ hash: theirs.hash,
+ verified: theirs.id,
+ update: None,
+ }
+ } else {
+ bail!(
+ "provided identity at {} diverges from in-tree at {}",
+ ours.hash,
+ theirs.hash,
+ )
+ }
+ },
+ };
+
+ Ok(newer)
+ }
+
+ pub fn id(&self) -> &IdentityId {
+ self.verified.id()
+ }
+
+ pub fn hash(&self) -> &ContentHash {
+ &self.hash
+ }
+
+ pub fn contains(&self, key: &KeyId) -> bool {
+ self.verified.identity().keys.contains_key(key)
+ }
+
+ pub fn update(&self, bundle: &mut bundle::Header) {
+ if let Some(range) = &self.update {
+ range.add_to_bundle(bundle);
+ }
+ }
+}
+
+struct Range {
+ refname: Refname,
+ start: git2::Oid,
+ end: Option<git2::Oid>,
+}
+
+impl Range {
+ fn compute(
+ repo: &git2::Repository,
+ refname: Refname,
+ known: git2::Oid,
+ ) -> cmd::Result<Option<Self>> {
+ let start = repo.refname_to_id(&refname)?;
+
+ let mut walk = repo.revwalk()?;
+ walk.push(start)?;
+ for oid in walk {
+ let oid = oid?;
+ let blob_id = repo
+ .find_commit(oid)?
+ .tree()?
+ .get_name(META_FILE_ID)
+ .ok_or_else(|| anyhow!("corrupt identity: missing {META_FILE_ID}"))?
+ .id();
+
+ if blob_id == known {
+ return Ok(if oid == start {
+ None
+ } else {
+ Some(Self {
+ refname,
+ start,
+ end: Some(oid),
+ })
+ });
+ }
+ }
+
+ Ok(Some(Self {
+ refname,
+ start,
+ end: None,
+ }))
+ }
+
+ fn add_to_bundle(&self, header: &mut bundle::Header) {
+ header.add_reference(self.refname.clone(), &self.start);
+ if let Some(end) = self.end {
+ header.add_prerequisite(&end);
+ }
+ }
+}
diff --git a/src/cmd/topic.rs b/src/cmd/topic.rs
new file mode 100644
index 0000000..fe4e2df
--- /dev/null
+++ b/src/cmd/topic.rs
@@ -0,0 +1,58 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::path::PathBuf;
+
+use crate::cmd;
+
+pub mod comment;
+
+mod ls;
+pub use ls::{
+ ls,
+ Ls,
+};
+
+mod show;
+pub use show::{
+ show,
+ Show,
+};
+
+mod unbundle;
+pub use unbundle::{
+ unbundle,
+ Unbundle,
+};
+
+#[derive(Debug, clap::Subcommand)]
+#[allow(clippy::large_enum_variant)]
+pub enum Cmd {
+ /// List the recorded topics
+ Ls(Ls),
+ /// Show a topic
+ Show(Show),
+ /// Comment on a topic
+ #[clap(subcommand)]
+ Comment(comment::Cmd),
+ /// Unbundle a topic
+ Unbundle(Unbundle),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Ls(args) => ls(args).map(cmd::Output::iter),
+ Self::Show(args) => show(args).map(cmd::Output::iter),
+ Self::Comment(cmd) => cmd.run(),
+ Self::Unbundle(args) => unbundle(args).map(cmd::Output::val),
+ }
+ }
+}
+
+#[derive(Debug, clap::Args)]
+struct Common {
+ /// Path to the drop repository
+ #[clap(from_global)]
+ git_dir: PathBuf,
+}
diff --git a/src/cmd/topic/comment.rs b/src/cmd/topic/comment.rs
new file mode 100644
index 0000000..121dabb
--- /dev/null
+++ b/src/cmd/topic/comment.rs
@@ -0,0 +1,68 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::{
+ cmd::{
+ self,
+ patch,
+ },
+ patches,
+};
+
+#[derive(Debug, clap::Subcommand)]
+pub enum Cmd {
+ /// Record the comment with a local drop history
+ Record(Record),
+ /// Submit the comment to a remote drop
+ Submit(Submit),
+}
+
+impl Cmd {
+ pub fn run(self) -> cmd::Result<cmd::Output> {
+ match self {
+ Self::Record(args) => record(args),
+ Self::Submit(args) => submit(args),
+ }
+ .map(cmd::IntoOutput::into_output)
+ }
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Record {
+ #[clap(flatten)]
+ common: patch::Common,
+ #[clap(flatten)]
+ comment: patch::Comment,
+}
+
+#[derive(Debug, clap::Args)]
+pub struct Submit {
+ #[clap(flatten)]
+ common: patch::Common,
+ #[clap(flatten)]
+ comment: patch::Comment,
+ #[clap(flatten)]
+ remote: patch::Remote,
+}
+
+pub fn record(Record { common, comment }: Record) -> cmd::Result<patches::Record> {
+ patch::create(patch::Kind::Comment {
+ common,
+ remote: None,
+ comment,
+ })
+}
+
+pub fn submit(
+ Submit {
+ common,
+ comment,
+ remote,
+ }: Submit,
+) -> cmd::Result<patches::Record> {
+ patch::create(patch::Kind::Comment {
+ common,
+ remote: Some(remote),
+ comment,
+ })
+}
diff --git a/src/cmd/topic/ls.rs b/src/cmd/topic/ls.rs
new file mode 100644
index 0000000..430cc6e
--- /dev/null
+++ b/src/cmd/topic/ls.rs
@@ -0,0 +1,32 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::{
+ git,
+ patches::{
+ self,
+ Topic,
+ },
+};
+
+use super::Common;
+use crate::cmd;
+
+#[derive(Debug, clap::Args)]
+pub struct Ls {
+ #[clap(flatten)]
+ common: Common,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ topic: Topic,
+ subject: String,
+}
+
+pub fn ls(args: Ls) -> cmd::Result<Vec<cmd::Result<Output>>> {
+ let repo = git::repo::open(&args.common.git_dir)?;
+ Ok(patches::iter::unbundled::topics_with_subject(&repo)
+ .map(|i| i.map(|(topic, subject)| Output { topic, subject }))
+ .collect())
+}
diff --git a/src/cmd/topic/show.rs b/src/cmd/topic/show.rs
new file mode 100644
index 0000000..1d19720
--- /dev/null
+++ b/src/cmd/topic/show.rs
@@ -0,0 +1,34 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use super::Common;
+use crate::{
+ cmd,
+ git,
+ patches::{
+ self,
+ iter::Note,
+ Topic,
+ },
+};
+
+#[derive(Debug, clap::Args)]
+pub struct Show {
+ #[clap(flatten)]
+ common: Common,
+ /// Traverse the topic in reverse order, ie. oldest first
+ #[clap(long, value_parser)]
+ reverse: bool,
+ #[clap(value_parser)]
+ topic: Topic,
+}
+
+pub fn show(args: Show) -> cmd::Result<Vec<cmd::Result<Note>>> {
+ let repo = git::repo::open(&args.common.git_dir)?;
+ let iter = patches::iter::topic(&repo, &args.topic);
+ if args.reverse {
+ Ok(iter.rev().collect())
+ } else {
+ Ok(iter.collect())
+ }
+}
diff --git a/src/cmd/topic/unbundle.rs b/src/cmd/topic/unbundle.rs
new file mode 100644
index 0000000..3aab54b
--- /dev/null
+++ b/src/cmd/topic/unbundle.rs
@@ -0,0 +1,174 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::{
+ BTreeMap,
+ BTreeSet,
+ },
+ path::PathBuf,
+};
+
+use anyhow::anyhow;
+use clap::ValueHint;
+
+use super::Common;
+use crate::{
+ cmd::{
+ self,
+ ui::{
+ debug,
+ info,
+ warn,
+ },
+ Aborted,
+ },
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ Refname,
+ },
+ metadata::{
+ self,
+ git::FromGit,
+ },
+ patches::{
+ self,
+ iter::dropped,
+ Bundle,
+ Record,
+ Topic,
+ REF_IT_BUNDLES,
+ REF_IT_PATCHES,
+ TOPIC_MERGES,
+ TOPIC_SNAPSHOTS,
+ },
+ paths,
+};
+
+// TODO:
+//
+// - don't require patch bundle to be present on-disk when snapshots would do
+
+#[derive(Debug, clap::Args)]
+pub struct Unbundle {
+ #[clap(flatten)]
+ common: Common,
+ /// The directory where to write the bundle to
+ ///
+ /// Unless this is an absolute path, it is treated as relative to $GIT_DIR.
+ #[clap(
+ long,
+ value_parser,
+ value_name = "DIR",
+ default_value_os_t = paths::bundles().to_owned(),
+ value_hint = ValueHint::DirPath,
+ )]
+ bundle_dir: PathBuf,
+ /// The topic to unbundle
+ #[clap(value_parser)]
+ topic: Topic,
+ /// The drop history to find the topic in
+ #[clap(value_parser)]
+ drop: Option<String>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Output {
+ updated: BTreeMap<Refname, git::serde::oid::Oid>,
+}
+
+pub fn unbundle(args: Unbundle) -> cmd::Result<Output> {
+ let repo = git::repo::open(&args.common.git_dir)?;
+ let bundle_dir = if args.bundle_dir.is_relative() {
+ repo.path().join(args.bundle_dir)
+ } else {
+ args.bundle_dir
+ };
+ let drop = match args.drop {
+ Some(rev) => if_not_found_none(repo.resolve_reference_from_short_name(&rev))?
+ .ok_or_else(|| anyhow!("no ref matching {rev} found"))?
+ .name()
+ .ok_or_else(|| anyhow!("invalid drop"))?
+ .to_owned(),
+ None => REF_IT_PATCHES.to_owned(),
+ };
+
+ let filter = [&args.topic, &TOPIC_MERGES, &TOPIC_SNAPSHOTS];
+ let mut on_topic: Vec<Record> = Vec::new();
+ let mut checkpoints: Vec<Record> = Vec::new();
+ for row in dropped::topics(&repo, &drop) {
+ let (t, id) = row?;
+
+ if filter.into_iter().any(|f| f == &t) {
+ let commit = repo.find_commit(id)?;
+ let record = Record::from_commit(&repo, &commit)?;
+ if t == args.topic {
+ on_topic.push(record);
+ continue;
+ }
+
+ // Skip checkpoint which came after the most recent record on the topic
+ if !on_topic.is_empty() {
+ checkpoints.push(record);
+ }
+ }
+ }
+
+ let odb = repo.odb()?;
+
+ info!("Indexing checkpoints...");
+ for rec in checkpoints.into_iter().rev() {
+ Bundle::from_stored(&bundle_dir, rec.bundle_info().as_expect())?
+ .packdata()?
+ .index(&odb)?
+ }
+
+ let mut missing = BTreeSet::new();
+ for oid in on_topic
+ .iter()
+ .flat_map(|rec| &rec.bundle_info().prerequisites)
+ {
+ let oid = git2::Oid::try_from(oid)?;
+ if !odb.exists(oid) {
+ missing.insert(oid);
+ }
+ }
+
+ if !missing.is_empty() {
+ warn!("Unable to satisfy all prerequisites");
+ info!("The following prerequisite commits are missing:\n");
+ for oid in missing {
+ info!("{oid}");
+ }
+ info!("\nYou may try to unbundle the entire drop history");
+ cmd::abort!();
+ }
+
+ info!("Unbundling topic records...");
+ let mut tx = refs::Transaction::new(&repo)?;
+ let topic_ref = tx.lock_ref(args.topic.as_refname())?;
+ let mut up = BTreeMap::new();
+ for rec in on_topic.into_iter().rev() {
+ let hash = rec.bundle_hash();
+ let bundle = Bundle::from_stored(&bundle_dir, rec.bundle_info().as_expect())?;
+ if bundle.is_encrypted() {
+ warn!("Skipping encrypted bundle {hash}");
+ continue;
+ }
+ bundle.packdata()?.index(&odb)?;
+ debug!("{hash}: unbundle");
+ let updated = patches::unbundle(&odb, &mut tx, REF_IT_BUNDLES, &rec)?;
+ for (name, oid) in updated {
+ up.insert(name, oid.into());
+ }
+ debug!("{hash}: merge notes");
+ let submitter = metadata::Identity::from_content_hash(&repo, &rec.meta.signature.signer)?
+ .verified(metadata::git::find_parent(&repo))?;
+ patches::merge_notes(&repo, &submitter, &topic_ref, &rec)?;
+ }
+ tx.commit()?;
+
+ Ok(Output { updated: up })
+}
diff --git a/src/cmd/ui.rs b/src/cmd/ui.rs
new file mode 100644
index 0000000..c1ad214
--- /dev/null
+++ b/src/cmd/ui.rs
@@ -0,0 +1,131 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ env,
+ ffi::OsStr,
+ io,
+ process::{
+ self,
+ Command,
+ Stdio,
+ },
+};
+
+use anyhow::ensure;
+use console::Term;
+use zeroize::Zeroizing;
+
+use crate::{
+ cmd::{
+ self,
+ Aborted,
+ },
+ patches::notes,
+};
+
+mod editor;
+mod output;
+pub use output::{
+ debug,
+ error,
+ info,
+ warn,
+ Output,
+};
+
+pub fn edit_commit_message(
+ repo: &git2::Repository,
+ branch: &str,
+ old: &git2::Tree,
+ new: &git2::Tree,
+) -> cmd::Result<String> {
+ let diff = repo.diff_tree_to_tree(
+ Some(old),
+ Some(new),
+ Some(
+ git2::DiffOptions::new()
+ .patience(true)
+ .minimal(true)
+ .context_lines(5),
+ ),
+ )?;
+ abort_if_empty(
+ "commit message",
+ editor::Commit::new(repo.path())?.edit(branch, diff),
+ )
+}
+
+pub fn edit_cover_letter(repo: &git2::Repository) -> cmd::Result<notes::Simple> {
+ abort_if_empty(
+ "cover letter",
+ editor::CoverLetter::new(repo.path())?.edit(),
+ )
+}
+
+pub fn edit_comment(
+ repo: &git2::Repository,
+ re: Option<&notes::Simple>,
+) -> cmd::Result<notes::Simple> {
+ abort_if_empty("comment", editor::Comment::new(repo.path())?.edit(re))
+}
+
+pub fn edit_metadata<T>(template: T) -> cmd::Result<T>
+where
+ T: serde::Serialize + serde::de::DeserializeOwned,
+{
+ abort_if_empty("metadata", editor::Metadata::new()?.edit(template))
+}
+
+fn abort_if_empty<T>(ctx: &str, edit: io::Result<Option<T>>) -> cmd::Result<T> {
+ edit?.map(Ok).unwrap_or_else(|| {
+ info!("Aborting due to empty {ctx}");
+ cmd::abort!()
+ })
+}
+
+pub fn askpass(prompt: &str) -> cmd::Result<Zeroizing<Vec<u8>>> {
+ const DEFAULT_ASKPASS: &str = "ssh-askpass";
+
+ fn ssh_askpass() -> Cow<'static, OsStr> {
+ env::var_os("SSH_ASKPASS")
+ .map(Into::into)
+ .unwrap_or_else(|| OsStr::new(DEFAULT_ASKPASS).into())
+ }
+
+ let ssh = env::var_os("SSH_ASKPASS_REQUIRE").and_then(|require| {
+ if require == "force" {
+ Some(ssh_askpass())
+ } else if require == "prefer" {
+ env::var_os("DISPLAY").map(|_| ssh_askpass())
+ } else {
+ None
+ }
+ });
+
+ match ssh {
+ Some(cmd) => {
+ let process::Output { status, stdout, .. } = Command::new(&cmd)
+ .arg(prompt)
+ .stderr(Stdio::inherit())
+ .output()?;
+ ensure!(
+ status.success(),
+ "{} failed with {:?}",
+ cmd.to_string_lossy(),
+ status.code()
+ );
+ Ok(Zeroizing::new(stdout))
+ },
+ None => {
+ let tty = Term::stderr();
+ if tty.is_term() {
+ tty.write_line(prompt)?;
+ }
+ tty.read_secure_line()
+ .map(|s| Zeroizing::new(s.into_bytes()))
+ .map_err(Into::into)
+ },
+ }
+}
diff --git a/src/cmd/ui/editor.rs b/src/cmd/ui/editor.rs
new file mode 100644
index 0000000..a2a7a5e
--- /dev/null
+++ b/src/cmd/ui/editor.rs
@@ -0,0 +1,228 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ env,
+ ffi::OsString,
+ io::{
+ self,
+ BufRead as _,
+ Write as _,
+ },
+ path::{
+ Path,
+ PathBuf,
+ },
+ process::Command,
+};
+
+use tempfile::TempPath;
+
+use crate::{
+ fs::LockedFile,
+ patches::notes,
+};
+
+const SCISSORS: &str = "# ------------------------ >8 ------------------------";
+
+pub struct Commit(Editmsg);
+
+impl Commit {
+ pub fn new<P: AsRef<Path>>(git_dir: P) -> io::Result<Self> {
+ Editmsg::new(git_dir.as_ref().join("COMMIT_EDITMSG")).map(Self)
+ }
+
+ pub fn edit(self, branch: &str, diff: git2::Diff) -> io::Result<Option<String>> {
+ let branch = branch.strip_prefix("refs/heads/").unwrap_or(branch);
+ self.0.edit(|buf| {
+ write!(
+ buf,
+ "
+# Please enter the commit message for your changes. Lines starting
+# with '#' will be ignored, and an empty message aborts the commit.
+#
+# On branch {branch}
+#
+{SCISSORS}
+# Do not modify or remove the line above.
+# Everything below it will be ignored.
+#
+# Changes to be committed:
+"
+ )?;
+ diff.print(git2::DiffFormat::Patch, |_delta, _hunk, line| {
+ use git2::DiffLineType::{
+ Addition,
+ Context,
+ Deletion,
+ };
+ let ok = if matches!(line.origin_value(), Context | Addition | Deletion) {
+ write!(buf, "{}", line.origin()).is_ok()
+ } else {
+ true
+ };
+ ok && buf.write_all(line.content()).is_ok()
+ })
+ .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+ Ok(())
+ })
+ }
+}
+
+pub struct CoverLetter(Editmsg);
+
+impl CoverLetter {
+ pub fn new<P: AsRef<Path>>(git_dir: P) -> io::Result<Self> {
+ Editmsg::new(git_dir.as_ref().join("NOTES_EDITMSG")).map(Self)
+ }
+
+ // TODO: render patch series a la git log
+ pub fn edit(self) -> io::Result<Option<notes::Simple>> {
+ let txt = self.0.edit(|buf| {
+ writeln!(
+ buf,
+ "
+# Please describe your patch as you would in a cover letter or PR.
+# Lines starting with '#' will be ignored, and an empty message
+# aborts the patch creation.
+#
+{SCISSORS}
+# Do not modify or remove the line above.
+# Everything below it will be ignored.
+#
+# Changes to be committed:
+
+TODO (sorry)
+"
+ )?;
+
+ Ok(())
+ })?;
+
+ Ok(txt.map(notes::Simple::new))
+ }
+}
+
+pub struct Comment(Editmsg);
+
+impl Comment {
+ pub fn new<P: AsRef<Path>>(git_dir: P) -> io::Result<Self> {
+ Editmsg::new(git_dir.as_ref().join("NOTES_EDITMSG")).map(Self)
+ }
+
+ pub fn edit(self, re: Option<&notes::Simple>) -> io::Result<Option<notes::Simple>> {
+ let txt = self.0.edit(|buf| {
+ write!(
+ buf,
+ "
+# Enter your comment above. Lines starting with '#' will be ignored,
+# and an empty message aborts the comment creation.
+"
+ )?;
+
+ if let Some(prev) = re {
+ write!(
+ buf,
+ "#
+{SCISSORS}
+# Do not modify or remove the line above.
+# Everything below it will be ignored.
+#
+# Replying to:
+"
+ )?;
+
+ serde_json::to_writer_pretty(buf, prev)?;
+ }
+
+ Ok(())
+ })?;
+
+ Ok(txt.map(notes::Simple::new))
+ }
+}
+
+pub struct Metadata {
+ _tmp: TempPath,
+ msg: Editmsg,
+}
+
+impl Metadata {
+ pub fn new() -> io::Result<Self> {
+ let _tmp = tempfile::Builder::new()
+ .suffix(".json")
+ .tempfile()?
+ .into_temp_path();
+ let msg = Editmsg::new(&_tmp)?;
+
+ Ok(Self { _tmp, msg })
+ }
+
+ // TODO: explainers, edit errors
+ pub fn edit<T>(self, template: T) -> io::Result<Option<T>>
+ where
+ T: serde::Serialize + serde::de::DeserializeOwned,
+ {
+ let txt = self.msg.edit(|buf| {
+ serde_json::to_writer_pretty(buf, &template)?;
+
+ Ok(())
+ })?;
+
+ Ok(txt.as_deref().map(serde_json::from_str).transpose()?)
+ }
+}
+
+struct Editmsg {
+ file: LockedFile,
+}
+
+impl Editmsg {
+ fn new<P: Into<PathBuf>>(path: P) -> io::Result<Self> {
+ LockedFile::in_place(path, true, 0o644).map(|file| Self { file })
+ }
+
+ fn edit<F>(mut self, pre_fill: F) -> io::Result<Option<String>>
+ where
+ F: FnOnce(&mut LockedFile) -> io::Result<()>,
+ {
+ pre_fill(&mut self.file)?;
+ Command::new(editor())
+ .arg(self.file.edit_path())
+ .spawn()?
+ .wait()?;
+ self.file.reopen()?;
+ let mut msg = String::new();
+ for line in io::BufReader::new(self.file).lines() {
+ let line = line?;
+ if line == SCISSORS {
+ break;
+ }
+ if line.starts_with('#') {
+ continue;
+ }
+
+ msg.push_str(&line);
+ msg.push('\n');
+ }
+ let len = msg.trim_end().len();
+ msg.truncate(len);
+
+ Ok(if msg.is_empty() { None } else { Some(msg) })
+ }
+}
+
+fn editor() -> OsString {
+ #[cfg(windows)]
+ const DEFAULT_EDITOR: &str = "notepad.exe";
+ #[cfg(not(windows))]
+ const DEFAULT_EDITOR: &str = "vi";
+
+ if let Some(exe) = env::var_os("VISUAL") {
+ return exe;
+ }
+ if let Some(exe) = env::var_os("EDITOR") {
+ return exe;
+ }
+ DEFAULT_EDITOR.into()
+}
diff --git a/src/cmd/ui/output.rs b/src/cmd/ui/output.rs
new file mode 100644
index 0000000..f1ad598
--- /dev/null
+++ b/src/cmd/ui/output.rs
@@ -0,0 +1,44 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub use log::{
+ debug,
+ error,
+ info,
+ warn,
+};
+
+pub struct Output;
+
+impl log::Log for Output {
+ fn enabled(&self, metadata: &log::Metadata) -> bool {
+ metadata.level() <= log::max_level()
+ }
+
+ fn log(&self, record: &log::Record) {
+ let meta = record.metadata();
+ if !self.enabled(meta) {
+ return;
+ }
+ let level = meta.level();
+ let style = {
+ let s = console::Style::new().for_stderr();
+ if level < log::Level::Info
+ && console::user_attended_stderr()
+ && console::colors_enabled_stderr()
+ {
+ match level {
+ log::Level::Error => s.red(),
+ log::Level::Warn => s.yellow(),
+ log::Level::Info | log::Level::Debug | log::Level::Trace => unreachable!(),
+ }
+ } else {
+ s
+ }
+ };
+
+ eprintln!("{}", style.apply_to(record.args()));
+ }
+
+ fn flush(&self) {}
+}
diff --git a/src/cmd/util.rs b/src/cmd/util.rs
new file mode 100644
index 0000000..27654d8
--- /dev/null
+++ b/src/cmd/util.rs
@@ -0,0 +1,4 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub mod args;
diff --git a/src/cmd/util/args.rs b/src/cmd/util/args.rs
new file mode 100644
index 0000000..e372c82
--- /dev/null
+++ b/src/cmd/util/args.rs
@@ -0,0 +1,139 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::{
+ fmt,
+ slice,
+ str::FromStr,
+};
+use std::{
+ borrow::Borrow,
+ convert::Infallible,
+ env,
+ path::PathBuf,
+ vec,
+};
+
+pub use crate::git::Refname;
+use crate::{
+ cfg::paths,
+ git,
+};
+
+/// Search path akin to the `PATH` environment variable.
+#[derive(Clone, Debug)]
+pub struct SearchPath(Vec<PathBuf>);
+
+impl SearchPath {
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+}
+
+impl fmt::Display for SearchPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ std::env::join_paths(&self.0)
+ .unwrap()
+ .to_string_lossy()
+ .fmt(f)
+ }
+}
+
+impl FromStr for SearchPath {
+ type Err = Infallible;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Ok(Self(env::split_paths(s).collect()))
+ }
+}
+
+impl IntoIterator for SearchPath {
+ type Item = PathBuf;
+ type IntoIter = vec::IntoIter<PathBuf>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
+impl<'a> IntoIterator for &'a SearchPath {
+ type Item = &'a PathBuf;
+ type IntoIter = slice::Iter<'a, PathBuf>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.iter()
+ }
+}
+
+/// A [`SearchPath`] with a [`Default`] appropriate for `it` identity
+/// repositories.
+#[derive(Clone, Debug)]
+pub struct IdSearchPath(SearchPath);
+
+impl IdSearchPath {
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ /// Attempt to open each path element as a git repository
+ ///
+ /// The repositories will be opened as bare, even if they aren't. No error
+ /// is returned if a repo could not be opened (e.g. because it is not a git
+ /// repository).
+ pub fn open_git(&self) -> Vec<git2::Repository> {
+ let mut rs = Vec::with_capacity(self.len());
+ for path in self {
+ if let Ok(repo) = git::repo::open_bare(path) {
+ rs.push(repo);
+ }
+ }
+
+ rs
+ }
+}
+
+impl Default for IdSearchPath {
+ fn default() -> Self {
+ Self(SearchPath(vec![paths::ids()]))
+ }
+}
+
+impl fmt::Display for IdSearchPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl FromStr for IdSearchPath {
+ type Err = <SearchPath as FromStr>::Err;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ s.parse().map(Self)
+ }
+}
+
+impl IntoIterator for IdSearchPath {
+ type Item = <SearchPath as IntoIterator>::Item;
+ type IntoIter = <SearchPath as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
+impl<'a> IntoIterator for &'a IdSearchPath {
+ type Item = <&'a SearchPath as IntoIterator>::Item;
+ type IntoIter = <&'a SearchPath as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.borrow().into_iter()
+ }
+}
diff --git a/src/error.rs b/src/error.rs
new file mode 100644
index 0000000..e202dfa
--- /dev/null
+++ b/src/error.rs
@@ -0,0 +1,12 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub type Error = anyhow::Error;
+pub type Result<T> = anyhow::Result<T>;
+
+#[derive(Debug, thiserror::Error)]
+#[error("{what} not found in {whence}")]
+pub struct NotFound<T, U> {
+ pub what: T,
+ pub whence: U,
+}
diff --git a/src/fs.rs b/src/fs.rs
new file mode 100644
index 0000000..436ec83
--- /dev/null
+++ b/src/fs.rs
@@ -0,0 +1,192 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::{
+ remove_file,
+ rename,
+ File,
+ },
+ io::{
+ self,
+ Read,
+ Seek,
+ Write,
+ },
+ path::{
+ Path,
+ PathBuf,
+ },
+};
+
+/// A [`File`] which is protected by a git-style lock file
+///
+/// When a [`LockedFile`] is created, a lock file named after its path with
+/// suffix ".lock" is created with `O_EXCL`. That is, if the lock file already
+/// exists, the operation will fail.
+///
+/// Then, either the lock file (when using [`LockedFile::atomic`]) or the base
+/// file (when using [`LockedFile::in_place`] is opened for writing.
+/// [`LockedFile`] implements [`Write`], [`Read`], and [`Seek`].
+///
+/// When a [`LockedFile`] is dropped, the lock file is unlinked. **NOTE** that
+/// this may leave the lock file in place if the process exits forcefully.
+///
+/// When using [`LockedFile::atomic`], the modified lock file is renamed to the
+/// base file atomically. For this to happen, [`LockedFile::persist`] must be
+/// called explicitly.
+pub struct LockedFile {
+ /// Path to the lock file
+ lock: PathBuf,
+ /// Path to the file being edited
+ path: PathBuf,
+ /// File being edited
+ edit: File,
+ /// Commit mode
+ mode: Commit,
+}
+
+enum Commit {
+ Atomic,
+ InPlace,
+}
+
+impl Drop for LockedFile {
+ fn drop(&mut self) {
+ remove_file(&self.lock).ok();
+ }
+}
+
+impl LockedFile {
+ pub const DEFAULT_PERMISSIONS: u32 = 0o644;
+
+ pub fn atomic<P, M>(path: P, truncate: bool, mode: M) -> io::Result<Self>
+ where
+ P: Into<PathBuf>,
+ M: Into<Option<u32>>,
+ {
+ let path = path.into();
+ let perm = mode.into().unwrap_or(Self::DEFAULT_PERMISSIONS);
+ let lock = path.with_extension("lock");
+ let mut edit = File::options()
+ .read(true)
+ .write(true)
+ .create_new(true)
+ .permissions(perm)
+ .open(&lock)?;
+ if !truncate && path.exists() {
+ std::fs::copy(&path, &lock)?;
+ edit = File::options().read(true).append(true).open(&lock)?;
+ }
+ let mode = Commit::Atomic;
+
+ Ok(Self {
+ lock,
+ path,
+ edit,
+ mode,
+ })
+ }
+
+ pub fn in_place<P, M>(path: P, truncate: bool, mode: M) -> io::Result<Self>
+ where
+ P: Into<PathBuf>,
+ M: Into<Option<u32>>,
+ {
+ let path = path.into();
+ let perm = mode.into().unwrap_or(Self::DEFAULT_PERMISSIONS);
+ let lock = path.with_extension("lock");
+ let _ = File::options()
+ .read(true)
+ .write(true)
+ .create_new(true)
+ .permissions(perm)
+ .open(&lock)?;
+ let edit = File::options()
+ .read(true)
+ .write(true)
+ .truncate(truncate)
+ .create(true)
+ .permissions(perm)
+ .open(&path)?;
+ let mode = Commit::InPlace;
+
+ Ok(Self {
+ lock,
+ path,
+ edit,
+ mode,
+ })
+ }
+
+ /// Reopen the file handle
+ ///
+ /// This is sometimes necessary, eg. when launching an editor to let the
+ /// user modify the file, in which case the file descriptor of the
+ /// handle is invalidated.
+ pub fn reopen(&mut self) -> io::Result<()> {
+ self.edit = File::options()
+ .read(true)
+ .write(true)
+ .open(self.edit_path())?;
+ Ok(())
+ }
+
+ pub fn edit_path(&self) -> &Path {
+ match self.mode {
+ Commit::Atomic => &self.lock,
+ Commit::InPlace => &self.path,
+ }
+ }
+
+ #[allow(unused)]
+ pub fn target_path(&self) -> &Path {
+ &self.path
+ }
+
+ pub fn persist(self) -> io::Result<()> {
+ match self.mode {
+ Commit::Atomic => rename(&self.lock, &self.path),
+ Commit::InPlace => remove_file(&self.lock),
+ }
+ }
+}
+
+impl Read for LockedFile {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ self.edit.read(buf)
+ }
+}
+
+impl Write for LockedFile {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ self.edit.write(buf)
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ self.edit.flush()
+ }
+}
+
+impl Seek for LockedFile {
+ fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {
+ self.edit.seek(pos)
+ }
+}
+
+pub(crate) trait PermissionsExt {
+ fn permissions(&mut self, mode: u32) -> &mut Self;
+}
+
+impl PermissionsExt for std::fs::OpenOptions {
+ #[cfg(unix)]
+ fn permissions(&mut self, mode: u32) -> &mut Self {
+ use std::os::unix::fs::OpenOptionsExt as _;
+ self.mode(mode)
+ }
+
+ #[cfg(not(unix))]
+ fn permissions(&mut self, mode: u32) -> &mut Self {
+ self
+ }
+}
diff --git a/src/git.rs b/src/git.rs
new file mode 100644
index 0000000..f837711
--- /dev/null
+++ b/src/git.rs
@@ -0,0 +1,111 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::process::{
+ self,
+ Command,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+ Context,
+};
+use once_cell::sync::Lazy;
+use sha2::{
+ Digest,
+ Sha256,
+};
+
+mod commit;
+pub use commit::{
+ commit_signed,
+ verify_commit_signature,
+};
+
+pub mod config;
+
+pub mod refs;
+pub use refs::{
+ ReferenceNames,
+ Refname,
+};
+pub mod repo;
+pub use repo::add_alternates;
+pub mod serde;
+
+pub static EMPTY_TREE: Lazy<git2::Oid> =
+ Lazy::new(|| git2::Oid::from_str("4b825dc642cb6eb9a060e54bf8d69288fbee4904").unwrap());
+
+pub type Result<T> = core::result::Result<T, git2::Error>;
+
+pub fn empty_tree(repo: &git2::Repository) -> Result<git2::Tree> {
+ repo.find_tree(*EMPTY_TREE)
+}
+
+pub fn if_not_found_none<T>(r: Result<T>) -> Result<Option<T>> {
+ if_not_found_then(r.map(Some), || Ok(None))
+}
+
+pub fn if_not_found_then<F, T>(r: Result<T>, f: F) -> Result<T>
+where
+ F: FnOnce() -> Result<T>,
+{
+ r.or_else(|e| match e.code() {
+ git2::ErrorCode::NotFound => f(),
+ _ => Err(e),
+ })
+}
+
+pub fn blob_hash(data: &[u8]) -> Result<git2::Oid> {
+ // very minimally faster than going through libgit2. not sure yet if that's
+ // worth the dependency.
+ #[cfg(feature = "sha1dc")]
+ {
+ use sha1collisiondetection::Sha1CD;
+
+ let mut hasher = Sha1CD::default();
+ hasher.update("blob ");
+ hasher.update(data.len().to_string().as_bytes());
+ hasher.update(b"\0");
+ hasher.update(data);
+ let hash = hasher.finalize_cd().expect("sha1 collision detected");
+ git2::Oid::from_bytes(&hash)
+ }
+ #[cfg(not(feature = "sha1dc"))]
+ git2::Oid::hash_object(git2::ObjectType::Blob, data)
+}
+
+pub fn blob_hash_sha2(data: &[u8]) -> [u8; 32] {
+ let mut hasher = Sha256::new();
+ hasher.update("blob ");
+ hasher.update(data.len().to_string().as_bytes());
+ hasher.update(b"\0");
+ hasher.update(data);
+ hasher.finalize().into()
+}
+
+/// Look up `key` from config and run the value as a command
+pub fn config_command(cfg: &git2::Config, key: &str) -> crate::Result<Option<String>> {
+ if_not_found_none(cfg.get_string(key))?
+ .map(|cmd| {
+ let process::Output { status, stdout, .. } = {
+ let invalid = || anyhow!("'{cmd}' is not a valid command");
+ let lex = shlex::split(&cmd).ok_or_else(invalid)?;
+ let (bin, args) = lex.split_first().ok_or_else(invalid)?;
+ Command::new(bin)
+ .args(args)
+ .stderr(process::Stdio::inherit())
+ .output()?
+ };
+ ensure!(status.success(), "'{cmd}' failed");
+ const NL: u8 = b'\n';
+ let line1 = stdout
+ .into_iter()
+ .take_while(|b| b != &NL)
+ .collect::<Vec<_>>();
+ ensure!(!line1.is_empty(), "no output from '{cmd}'");
+ String::from_utf8(line1).with_context(|| format!("invalid output from '{cmd}'"))
+ })
+ .transpose()
+}
diff --git a/src/git/commit.rs b/src/git/commit.rs
new file mode 100644
index 0000000..cb4a516
--- /dev/null
+++ b/src/git/commit.rs
@@ -0,0 +1,46 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use crate::ssh;
+
+const SSHSIG_NAMESPACE: &str = "git";
+
+pub fn commit_signed<'a, S>(
+ signer: &mut S,
+ repo: &'a git2::Repository,
+ msg: impl AsRef<str>,
+ tree: &git2::Tree<'a>,
+ parents: &[&git2::Commit<'a>],
+) -> crate::Result<git2::Oid>
+where
+ S: crate::keys::Signer + ?Sized,
+{
+ let aut = repo.signature()?;
+ let buf = repo.commit_create_buffer(&aut, &aut, msg.as_ref(), tree, parents)?;
+ let sig = {
+ let hash = ssh::HashAlg::Sha512;
+ let data = ssh::SshSig::signed_data(SSHSIG_NAMESPACE, hash, &buf)?;
+ let sig = signer.sign(&data)?;
+ ssh::SshSig::new(signer.ident().key_data(), SSHSIG_NAMESPACE, hash, sig)?
+ .to_pem(ssh::LineEnding::LF)?
+ };
+ let oid = repo.commit_signed(
+ buf.as_str().expect("commit buffer to be utf8"),
+ sig.as_str(),
+ None,
+ )?;
+
+ Ok(oid)
+}
+
+pub fn verify_commit_signature(
+ repo: &git2::Repository,
+ oid: &git2::Oid,
+) -> crate::Result<ssh::PublicKey> {
+ let (sig, data) = repo.extract_signature(oid, None)?;
+ let sig = ssh::SshSig::from_pem(&*sig)?;
+ let pk = ssh::PublicKey::from(sig.public_key().clone());
+ pk.verify(SSHSIG_NAMESPACE, &data, &sig)?;
+
+ Ok(pk)
+}
diff --git a/src/git/config.rs b/src/git/config.rs
new file mode 100644
index 0000000..bc8dfcc
--- /dev/null
+++ b/src/git/config.rs
@@ -0,0 +1,31 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::ops::Deref;
+
+/// A read-only snapshot of a [`git2::Config`]
+pub struct Snapshot(git2::Config);
+
+impl Deref for Snapshot {
+ type Target = git2::Config;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl TryFrom<git2::Config> for Snapshot {
+ type Error = git2::Error;
+
+ fn try_from(mut cfg: git2::Config) -> Result<Self, Self::Error> {
+ cfg.snapshot().map(Self)
+ }
+}
+
+impl TryFrom<&mut git2::Config> for Snapshot {
+ type Error = git2::Error;
+
+ fn try_from(cfg: &mut git2::Config) -> Result<Self, Self::Error> {
+ cfg.snapshot().map(Self)
+ }
+}
diff --git a/src/git/refs.rs b/src/git/refs.rs
new file mode 100644
index 0000000..5960434
--- /dev/null
+++ b/src/git/refs.rs
@@ -0,0 +1,327 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::{
+ fmt,
+ ops::Deref,
+ str::FromStr,
+};
+use std::{
+ borrow::Cow,
+ cell::Cell,
+ collections::HashMap,
+ path::Path,
+ rc::Rc,
+};
+
+pub const MAX_FILENAME: usize = 255;
+
+#[derive(Clone, Copy)]
+pub struct Options {
+ pub allow_onelevel: bool,
+ pub allow_pattern: bool,
+}
+
+pub mod error {
+ use thiserror::Error;
+
+ #[derive(Debug, Error)]
+ pub enum RefFormat {
+ #[error("empty input")]
+ Empty,
+ #[error("name too long")]
+ NameTooLong,
+ #[error("invalid character {0:?}")]
+ InvalidChar(char),
+ #[error("invalid character sequence {0:?}")]
+ InvalidSeq(&'static str),
+ #[error("must contain at least one '/'")]
+ OneLevel,
+ #[error("must contain at most one '*'")]
+ Pattern,
+ }
+}
+
+pub fn check_ref_format(opts: Options, s: &str) -> Result<(), error::RefFormat> {
+ use error::RefFormat::*;
+
+ match s {
+ "" => Err(Empty),
+ "@" => Err(InvalidChar('@')),
+ "." => Err(InvalidChar('.')),
+ _ => {
+ let mut globs = 0;
+ let mut parts = 0;
+
+ for x in s.split('/') {
+ if x.is_empty() {
+ return Err(InvalidSeq("//"));
+ }
+ if x.len() > MAX_FILENAME {
+ return Err(NameTooLong);
+ }
+
+ parts += 1;
+
+ if x.ends_with(".lock") {
+ return Err(InvalidSeq(".lock"));
+ }
+
+ let last_char = x.len() - 1;
+ for (i, y) in x.chars().zip(x.chars().cycle().skip(1)).enumerate() {
+ match y {
+ ('.', '.') => return Err(InvalidSeq("..")),
+ ('@', '{') => return Err(InvalidSeq("@{")),
+ ('*', _) => globs += 1,
+ (z, _) => match z {
+ '\0' | '\\' | '~' | '^' | ':' | '?' | '[' | ' ' => {
+ return Err(InvalidChar(z))
+ },
+ '.' if i == 0 || i == last_char => return Err(InvalidChar('.')),
+ _ if z.is_ascii_control() => return Err(InvalidChar(z)),
+
+ _ => continue,
+ },
+ }
+ }
+ }
+
+ if parts < 2 && !opts.allow_onelevel {
+ Err(OneLevel)
+ } else if globs > 1 && opts.allow_pattern {
+ Err(Pattern)
+ } else if globs > 0 && !opts.allow_pattern {
+ Err(InvalidChar('*'))
+ } else {
+ Ok(())
+ }
+ },
+ }
+}
+
+/// A valid git refname.
+///
+/// If the input starts with 'refs/`, it is taken verbatim (after validation),
+/// otherwise `refs/heads/' is prepended (ie. the input is considered a branch
+/// name).
+#[derive(
+ Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, ::serde::Serialize, ::serde::Deserialize,
+)]
+#[serde(try_from = "String")]
+pub struct Refname(String);
+
+impl Refname {
+ pub fn main() -> Self {
+ Self("refs/heads/main".into())
+ }
+
+ pub fn master() -> Self {
+ Self("refs/heads/master".into())
+ }
+}
+
+impl fmt::Display for Refname {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self)
+ }
+}
+
+impl Deref for Refname {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl AsRef<str> for Refname {
+ fn as_ref(&self) -> &str {
+ self
+ }
+}
+
+impl AsRef<Path> for Refname {
+ fn as_ref(&self) -> &Path {
+ Path::new(self.0.as_str())
+ }
+}
+
+impl From<Refname> for String {
+ fn from(r: Refname) -> Self {
+ r.0
+ }
+}
+
+impl FromStr for Refname {
+ type Err = error::RefFormat;
+
+ fn from_str(s: &str) -> core::result::Result<Self, Self::Err> {
+ Self::try_from(s.to_owned())
+ }
+}
+
+impl TryFrom<String> for Refname {
+ type Error = error::RefFormat;
+
+ fn try_from(value: String) -> core::result::Result<Self, Self::Error> {
+ const OPTIONS: Options = Options {
+ allow_onelevel: true,
+ allow_pattern: false,
+ };
+
+ check_ref_format(OPTIONS, &value)?;
+ let name = if value.starts_with("refs/") {
+ value
+ } else {
+ format!("refs/heads/{}", value)
+ };
+
+ Ok(Self(name))
+ }
+}
+
+/// Iterator over reference names
+///
+/// [`git2::ReferenceNames`] is advertised as more efficient if only the
+/// reference names are needed, and not a full [`git2::Reference`]. However,
+/// that type has overly restrictive lifetime constraints (because,
+/// inexplicably, it does **not** consume [`git2::References`] even though
+/// the documentation claims so).
+///
+/// We can work around this by transforming the reference `&str` into some other
+/// type which is not subject to its lifetime.
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct ReferenceNames<'a, F> {
+ inner: git2::References<'a>,
+ trans: F,
+}
+
+impl<'a, F> ReferenceNames<'a, F> {
+ pub fn new(refs: git2::References<'a>, trans: F) -> Self {
+ Self { inner: refs, trans }
+ }
+}
+
+impl<'a, F, E, T> Iterator for ReferenceNames<'a, F>
+where
+ F: FnMut(&str) -> core::result::Result<T, E>,
+ E: From<git2::Error>,
+{
+ type Item = core::result::Result<T, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner
+ .names()
+ .next()
+ .map(|r| r.map_err(E::from).and_then(|name| (self.trans)(name)))
+ }
+}
+
+pub struct Transaction<'a> {
+ tx: git2::Transaction<'a>,
+ locked: HashMap<Refname, Rc<Cell<Op>>>,
+}
+
+impl<'a> Transaction<'a> {
+ pub fn new(repo: &'a git2::Repository) -> super::Result<Self> {
+ let tx = repo.transaction()?;
+ Ok(Self {
+ tx,
+ locked: HashMap::new(),
+ })
+ }
+
+ pub fn lock_ref(&mut self, name: Refname) -> super::Result<LockedRef> {
+ use std::collections::hash_map::Entry;
+
+ let lref = match self.locked.entry(name) {
+ Entry::Vacant(v) => {
+ let name = v.key().clone();
+ self.tx.lock_ref(&name)?;
+ let op = Rc::new(Cell::new(Op::default()));
+ v.insert(Rc::clone(&op));
+ LockedRef { name, op }
+ },
+ Entry::Occupied(v) => LockedRef {
+ name: v.key().clone(),
+ op: Rc::clone(v.get()),
+ },
+ };
+
+ Ok(lref)
+ }
+
+ pub fn commit(mut self) -> super::Result<()> {
+ for (name, op) in self.locked {
+ match op.take() {
+ Op::None => continue,
+ Op::DirTarget { target, reflog } => {
+ self.tx.set_target(&name, target, None, &reflog)?
+ },
+ Op::SymTarget { target, reflog } => {
+ self.tx.set_symbolic_target(&name, &target, None, &reflog)?
+ },
+ Op::Remove => self.tx.remove(&name)?,
+ }
+ }
+ self.tx.commit()
+ }
+}
+
+#[derive(Debug, Default)]
+enum Op {
+ #[default]
+ None,
+ DirTarget {
+ target: git2::Oid,
+ reflog: Cow<'static, str>,
+ },
+ SymTarget {
+ target: Refname,
+ reflog: Cow<'static, str>,
+ },
+ #[allow(unused)]
+ Remove,
+}
+
+pub struct LockedRef {
+ name: Refname,
+ op: Rc<Cell<Op>>,
+}
+
+impl LockedRef {
+ pub fn name(&self) -> &Refname {
+ &self.name
+ }
+
+ pub fn set_target<S: Into<Cow<'static, str>>>(&self, target: git2::Oid, reflog: S) {
+ self.op.set(Op::DirTarget {
+ target,
+ reflog: reflog.into(),
+ })
+ }
+
+ pub fn set_symbolic_target<S: Into<Cow<'static, str>>>(&self, target: Refname, reflog: S) {
+ self.op.set(Op::SymTarget {
+ target,
+ reflog: reflog.into(),
+ })
+ }
+
+ #[allow(unused)]
+ pub fn remove(&self) {
+ self.op.set(Op::Remove)
+ }
+}
+
+impl fmt::Display for LockedRef {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.name())
+ }
+}
+
+impl From<LockedRef> for Refname {
+ fn from(LockedRef { name, .. }: LockedRef) -> Self {
+ name
+ }
+}
diff --git a/src/git/repo.rs b/src/git/repo.rs
new file mode 100644
index 0000000..3fb8a16
--- /dev/null
+++ b/src/git/repo.rs
@@ -0,0 +1,93 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::HashSet,
+ ffi::OsString,
+ io::{
+ BufReader,
+ Seek,
+ Write,
+ },
+ iter,
+ path::Path,
+ result::Result as StdResult,
+};
+
+use super::{
+ if_not_found_then,
+ Result,
+};
+use crate::{
+ fs::LockedFile,
+ io::Lines,
+};
+
+pub fn open<P: AsRef<Path>>(path: P) -> Result<git2::Repository> {
+ git2::Repository::open_ext(
+ path,
+ git2::RepositoryOpenFlags::FROM_ENV,
+ iter::empty::<OsString>(),
+ )
+}
+
+pub fn open_bare<P: AsRef<Path>>(path: P) -> Result<git2::Repository> {
+ git2::Repository::open_ext(
+ path,
+ git2::RepositoryOpenFlags::FROM_ENV | git2::RepositoryOpenFlags::BARE,
+ iter::empty::<OsString>(),
+ )
+}
+
+pub fn open_or_init<P: AsRef<Path>>(path: P, opts: InitOpts) -> Result<git2::Repository> {
+ if_not_found_then(open(path.as_ref()), || init(path, opts))
+}
+
+pub struct InitOpts<'a> {
+ pub bare: bool,
+ pub description: &'a str,
+ pub initial_head: &'a str,
+}
+
+pub fn init<P: AsRef<Path>>(path: P, opts: InitOpts) -> Result<git2::Repository> {
+ git2::Repository::init_opts(
+ path,
+ git2::RepositoryInitOptions::new()
+ .no_reinit(true)
+ .mkdir(true)
+ .mkpath(true)
+ .bare(opts.bare)
+ .description(opts.description)
+ .initial_head(opts.initial_head),
+ )
+}
+
+pub fn add_alternates<'a, I>(repo: &git2::Repository, alt: I) -> crate::Result<()>
+where
+ I: IntoIterator<Item = &'a git2::Repository>,
+{
+ let (mut persistent, known) = {
+ let mut lock = LockedFile::atomic(
+ repo.path().join("objects").join("info").join("alternates"),
+ false,
+ LockedFile::DEFAULT_PERMISSIONS,
+ )?;
+ lock.seek(std::io::SeekFrom::Start(0))?;
+ let mut bufread = BufReader::new(lock);
+ let known = Lines::new(&mut bufread).collect::<StdResult<HashSet<String>, _>>()?;
+ (bufread.into_inner(), known)
+ };
+ {
+ let odb = repo.odb()?;
+ for alternate in alt {
+ let path = format!("{}", alternate.path().join("objects").display());
+ odb.add_disk_alternate(&path)?;
+ if !known.contains(&path) {
+ writeln!(&mut persistent, "{}", path)?
+ }
+ }
+ }
+ persistent.persist()?;
+
+ Ok(())
+}
diff --git a/src/git/serde.rs b/src/git/serde.rs
new file mode 100644
index 0000000..e20df47
--- /dev/null
+++ b/src/git/serde.rs
@@ -0,0 +1,61 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::str::FromStr;
+
+use serde::{
+ Deserialize,
+ Deserializer,
+ Serialize,
+ Serializer,
+};
+
+pub mod oid {
+ use super::*;
+
+ #[derive(serde::Serialize, serde::Deserialize)]
+ pub struct Oid(#[serde(with = "self")] pub git2::Oid);
+
+ impl From<git2::Oid> for Oid {
+ fn from(oid: git2::Oid) -> Self {
+ Self(oid)
+ }
+ }
+
+ pub fn serialize<S>(oid: &git2::Oid, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.serialize_str(&oid.to_string())
+ }
+
+ pub fn deserialize<'de, D>(deserializer: D) -> Result<git2::Oid, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ let hex: &str = Deserialize::deserialize(deserializer)?;
+ git2::Oid::from_str(hex).map_err(serde::de::Error::custom)
+ }
+
+ pub mod option {
+ use super::*;
+
+ pub fn serialize<S>(oid: &Option<git2::Oid>, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ oid.as_ref().map(ToString::to_string).serialize(serializer)
+ }
+
+ #[allow(unused)]
+ pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<git2::Oid>, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ let hex: Option<&str> = Deserialize::deserialize(deserializer)?;
+ hex.map(FromStr::from_str)
+ .transpose()
+ .map_err(serde::de::Error::custom)
+ }
+ }
+}
diff --git a/src/http.rs b/src/http.rs
new file mode 100644
index 0000000..d52ef8f
--- /dev/null
+++ b/src/http.rs
@@ -0,0 +1,355 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::File,
+ io::Cursor,
+ net::ToSocketAddrs,
+ path::{
+ Path,
+ PathBuf,
+ },
+ sync::{
+ Arc,
+ Mutex,
+ },
+};
+
+use log::{
+ debug,
+ error,
+};
+use once_cell::sync::Lazy;
+use sha2::{
+ Digest,
+ Sha256,
+};
+use threadpool::ThreadPool;
+use tiny_http::{
+ Header,
+ HeaderField,
+ Method,
+ Request,
+ Response,
+ ServerConfig,
+ StatusCode,
+};
+use url::Url;
+
+use crate::{
+ bundle,
+ git,
+ keys,
+ patches::{
+ self,
+ AcceptArgs,
+ AcceptOptions,
+ },
+ ssh::agent,
+};
+
+pub use tiny_http::SslConfig;
+
+pub struct Options {
+ /// Directory of the drop repo
+ pub git_dir: PathBuf,
+ /// Directory from where to serve bundles
+ ///
+ /// Unless absolute, treated as relative to GIT_DIR.
+ pub bundle_dir: PathBuf,
+ /// Ref prefix under which to store the refs contained in patch bundles
+ pub unbundle_prefix: String,
+ /// The refname of the drop history
+ pub drop_ref: String,
+ /// The refname anchoring the seen objects tree
+ pub seen_ref: String,
+ /// Size of the server's threadpool
+ ///
+ /// If `None`, the number of available CPUs is used.
+ pub threads: Option<usize>,
+ /// Certificate and key for `serve`ing over TLS.
+ ///
+ /// It is generally recommended to proxy behind a terminating web server and
+ /// set this to `None`.
+ pub tls: Option<SslConfig>,
+ /// IPFS API to publish received bundles to
+ pub ipfs_api: Option<Url>,
+}
+
+pub fn serve<A>(addr: A, opts: Options) -> !
+where
+ A: ToSocketAddrs,
+{
+ let executor = ThreadPool::new(opts.threads.unwrap_or_else(num_cpus::get));
+ let server = tiny_http::Server::new(ServerConfig {
+ addr,
+ ssl: opts.tls,
+ })
+ .unwrap();
+
+ let repo = git::repo::open(&opts.git_dir).unwrap();
+ let config = repo.config().unwrap();
+
+ let git_dir = repo.path().to_owned();
+ let bundle_dir = if opts.bundle_dir.is_relative() {
+ git_dir.join(opts.bundle_dir)
+ } else {
+ opts.bundle_dir
+ };
+
+ let signer = keys::Agent::from_gitconfig(&config).unwrap();
+
+ let handler = Arc::new(Handler {
+ repo: Mutex::new(repo),
+ signer: Mutex::new(signer),
+ bundle_dir,
+ unbundle_prefix: opts.unbundle_prefix,
+ drop_ref: opts.drop_ref,
+ seen_ref: opts.seen_ref,
+ ipfs_api: opts.ipfs_api,
+ });
+ for req in server.incoming_requests() {
+ let handler = Arc::clone(&handler);
+ executor.execute(move || handler.route(req))
+ }
+
+ panic!("server died unexpectedly");
+}
+
+static CONTENT_TYPE: Lazy<HeaderField> = Lazy::new(|| "Content-Type".parse().unwrap());
+
+static OCTET_STREAM: Lazy<Header> = Lazy::new(|| Header {
+ field: CONTENT_TYPE.clone(),
+ value: "application/octet-stream".parse().unwrap(),
+});
+static TEXT_PLAIN: Lazy<Header> = Lazy::new(|| Header {
+ field: CONTENT_TYPE.clone(),
+ value: "text/plain".parse().unwrap(),
+});
+static JSON: Lazy<Header> = Lazy::new(|| Header {
+ field: CONTENT_TYPE.clone(),
+ value: "application/json".parse().unwrap(),
+});
+static SERVER: Lazy<Header> = Lazy::new(|| Header {
+ field: "Server".parse().unwrap(),
+ value: format!("it/{}", env!("CARGO_PKG_VERSION", "unknown"))
+ .parse()
+ .unwrap(),
+});
+
+enum Resp {
+ Empty {
+ code: StatusCode,
+ },
+ Text {
+ code: StatusCode,
+ body: String,
+ },
+ File {
+ file: File,
+ },
+ Json {
+ code: StatusCode,
+ body: Box<dyn erased_serde::Serialize>,
+ },
+}
+
+impl Resp {
+ const OK: Self = Self::Empty {
+ code: StatusCode(200),
+ };
+ const NOT_FOUND: Self = Self::Empty {
+ code: StatusCode(404),
+ };
+ const METHOD_NOT_ALLOWED: Self = Self::Empty {
+ code: StatusCode(405),
+ };
+ const INTERNAL_SERVER_ERROR: Self = Self::Empty {
+ code: StatusCode(500),
+ };
+
+ fn respond_to(self, req: Request) {
+ let remote_addr = *req.remote_addr();
+ let response = Response::empty(500).with_header(SERVER.clone());
+ let res = match self {
+ Self::Empty { code } => req.respond(response.with_status_code(code)),
+ Self::Text { code, body } => {
+ let len = body.len();
+ req.respond(
+ response
+ .with_status_code(code)
+ .with_header(TEXT_PLAIN.clone())
+ .with_data(Cursor::new(body.into_bytes()), Some(len)),
+ )
+ },
+ Self::File { file } => {
+ let len = file.metadata().ok().and_then(|v| v.len().try_into().ok());
+ req.respond(
+ response
+ .with_status_code(200)
+ .with_header(OCTET_STREAM.clone())
+ .with_data(file, len),
+ )
+ },
+ Self::Json { code, body } => {
+ let json = serde_json::to_vec(&body).unwrap();
+ let len = json.len();
+ req.respond(
+ response
+ .with_status_code(code)
+ .with_header(JSON.clone())
+ .with_data(Cursor::new(json), Some(len)),
+ )
+ },
+ };
+
+ if let Err(e) = res {
+ error!("failed to send response to {remote_addr}: {e}");
+ }
+ }
+}
+
+impl From<StatusCode> for Resp {
+ fn from(code: StatusCode) -> Self {
+ Self::Empty { code }
+ }
+}
+
+struct Handler {
+ repo: Mutex<git2::Repository>,
+ signer: Mutex<keys::Agent<agent::UnixStream>>,
+ bundle_dir: PathBuf,
+ unbundle_prefix: String,
+ drop_ref: String,
+ seen_ref: String,
+ ipfs_api: Option<Url>,
+}
+
+impl Handler {
+ fn route(&self, mut req: Request) {
+ use Method::*;
+
+ debug!("{} {}", req.method(), req.url());
+ let resp = match req.method() {
+ Get => match &request_target(&req)[..] {
+ ["-", "status"] => Resp::OK,
+ ["bundles", hash] => self.get_bundle(hash),
+ _ => Resp::NOT_FOUND,
+ },
+
+ Post => match &request_target(&req)[..] {
+ ["patches"] => self.post_patch(&mut req),
+ _ => Resp::NOT_FOUND,
+ },
+
+ _ => Resp::METHOD_NOT_ALLOWED,
+ };
+
+ resp.respond_to(req)
+ }
+
+ fn get_bundle(&self, hash: &str) -> Resp {
+ fn base_path(root: &Path, s: &str) -> Result<PathBuf, Resp> {
+ bundle::Hash::is_valid(s)
+ .then(|| root.join(s))
+ .ok_or_else(|| Resp::Text {
+ code: 400.into(),
+ body: "invalid bundle hash".into(),
+ })
+ }
+
+ if let Some(hash) = hash.strip_suffix(bundle::list::DOT_FILE_EXTENSION) {
+ base_path(&self.bundle_dir, hash).map_or_else(
+ |x| x,
+ |base| {
+ let path = base.with_extension(bundle::list::FILE_EXTENSION);
+ if !path.exists() && base.with_extension(bundle::FILE_EXTENSION).exists() {
+ default_bundle_list(hash)
+ } else {
+ serve_file(path)
+ }
+ },
+ )
+ } else if let Some(hash) = hash.strip_suffix(bundle::DOT_FILE_EXTENSION) {
+ base_path(&self.bundle_dir, hash).map_or_else(
+ |x| x,
+ |mut path| {
+ path.set_extension(bundle::FILE_EXTENSION);
+ serve_file(path)
+ },
+ )
+ } else {
+ base_path(&self.bundle_dir, hash).map_or_else(
+ |x| x,
+ |mut base| {
+ base.set_extension(bundle::FILE_EXTENSION);
+ if !base.exists() {
+ base.set_extension(bundle::list::FILE_EXTENSION);
+ }
+ serve_file(base)
+ },
+ )
+ }
+ }
+
+ fn post_patch(&self, req: &mut Request) -> Resp {
+ patches::Submission::from_http(&self.bundle_dir, req)
+ .and_then(|mut sub| {
+ let repo = self.repo.lock().unwrap();
+ let mut signer = self.signer.lock().unwrap();
+ sub.try_accept(AcceptArgs {
+ unbundle_prefix: &self.unbundle_prefix,
+ drop_ref: &self.drop_ref,
+ seen_ref: &self.seen_ref,
+ repo: &repo,
+ signer: &mut *signer,
+ ipfs_api: self.ipfs_api.as_ref(),
+ options: AcceptOptions::default(),
+ })
+ })
+ .map(|record| Resp::Json {
+ code: 200.into(),
+ body: Box::new(record),
+ })
+ .unwrap_or_else(|e| Resp::Text {
+ code: 400.into(),
+ body: e.to_string(),
+ })
+ }
+}
+
+// We've been calling this "request URL", but acc. to RFC7230 it is the
+// "request-target".
+fn request_target(req: &Request) -> Vec<&str> {
+ req.url().split('/').filter(|s| !s.is_empty()).collect()
+}
+
+fn serve_file<P: AsRef<Path>>(path: P) -> Resp {
+ let path = path.as_ref();
+ if path.exists() {
+ File::open(path)
+ .map(|file| Resp::File { file })
+ .unwrap_or_else(|e| {
+ error!("failed to open file {}: {e}", path.display());
+ Resp::INTERNAL_SERVER_ERROR
+ })
+ } else {
+ Resp::NOT_FOUND
+ }
+}
+
+fn default_bundle_list(hash: &str) -> Resp {
+ let uri = bundle::Uri::Relative(format!("/bundle/{}.bundle", hash));
+ let id = hex::encode(Sha256::digest(uri.as_str()));
+
+ let body = bundle::List {
+ bundles: vec![bundle::Location::new(id, uri)],
+ ..bundle::List::any()
+ }
+ .to_str();
+
+ Resp::Text {
+ code: 200.into(),
+ body,
+ }
+}
diff --git a/src/io.rs b/src/io.rs
new file mode 100644
index 0000000..86f91c6
--- /dev/null
+++ b/src/io.rs
@@ -0,0 +1,146 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use sha2::{
+ digest::generic_array::GenericArray,
+ Digest,
+};
+
+/// Created by [`Lines::until_blank`], stops iteration at the first blank line.
+pub struct UntilBlank<B> {
+ inner: Lines<B>,
+}
+
+impl<B: std::io::BufRead> Iterator for UntilBlank<B> {
+ type Item = std::io::Result<String>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().and_then(|res| match res {
+ Ok(line) => {
+ if line.is_empty() {
+ None
+ } else {
+ Some(Ok(line))
+ }
+ },
+ Err(e) => Some(Err(e)),
+ })
+ }
+}
+
+impl<B: std::io::Seek> std::io::Seek for UntilBlank<B> {
+ fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
+ self.inner.seek(pos)
+ }
+}
+
+/// Like [`std::io::Lines`], but allows to retain ownership of the underlying
+/// [`std::io::BufRead`].
+pub struct Lines<B> {
+ buf: B,
+}
+
+impl<B: std::io::BufRead> Lines<B> {
+ pub fn new(buf: B) -> Self {
+ Self { buf }
+ }
+
+ pub fn until_blank(self) -> UntilBlank<B> {
+ UntilBlank { inner: self }
+ }
+}
+
+impl<B: std::io::BufRead> Iterator for Lines<B> {
+ type Item = std::io::Result<String>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let mut buf = String::new();
+ match self.buf.read_line(&mut buf) {
+ Ok(0) => None,
+ Ok(_) => {
+ if buf.ends_with('\n') {
+ buf.pop();
+ if buf.ends_with('\r') {
+ buf.pop();
+ }
+ }
+ Some(Ok(buf))
+ },
+ Err(e) => Some(Err(e)),
+ }
+ }
+}
+
+impl<B: std::io::Seek> std::io::Seek for Lines<B> {
+ fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
+ self.buf.seek(pos)
+ }
+}
+
+/// A [`std::io::Write`] which also computes a hash digest from the bytes
+/// written to it.
+pub struct HashWriter<D, W> {
+ hasher: D,
+ writer: W,
+}
+
+impl<D, W> HashWriter<D, W> {
+ pub fn new(hasher: D, writer: W) -> Self {
+ Self { hasher, writer }
+ }
+}
+
+impl<D, W> HashWriter<D, W>
+where
+ D: Digest,
+{
+ pub fn hash(self) -> GenericArray<u8, D::OutputSize> {
+ self.hasher.finalize()
+ }
+}
+
+impl<D, W> std::io::Write for HashWriter<D, W>
+where
+ D: Digest,
+ W: std::io::Write,
+{
+ fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
+ self.hasher.update(buf);
+ self.writer.write(buf)
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ self.writer.flush()
+ }
+}
+
+/// A [`std::io::Write`] which keeps track of the number of bytes written to it
+pub struct LenWriter<W> {
+ written: u64,
+ writer: W,
+}
+
+impl<W> LenWriter<W> {
+ pub fn new(writer: W) -> Self {
+ Self { written: 0, writer }
+ }
+
+ pub fn bytes_written(&self) -> u64 {
+ self.written
+ }
+}
+
+impl<W> std::io::Write for LenWriter<W>
+where
+ W: std::io::Write,
+{
+ fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
+ let n = self.writer.write(buf)?;
+ self.written += n as u64;
+ Ok(n)
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ self.writer.flush()
+ }
+}
diff --git a/src/iter.rs b/src/iter.rs
new file mode 100644
index 0000000..1289c52
--- /dev/null
+++ b/src/iter.rs
@@ -0,0 +1,109 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::ops::ControlFlow;
+
+/// Iterator with a lazy fallible initialiser
+///
+/// It is a common pattern that instantiating an effectful iterator is fallible,
+/// while traversing it is fallible, too. This yields unwieldy signatures like:
+///
+/// ```no_run
+/// fn my_iterator() -> Result<impl Iterator<Item = Result<T, F>>, E>
+/// ```
+///
+/// Often, however, we can unify the error types (`E` and `F` above), which
+/// allows for the more pleasant pattern that constructing the iterator is
+/// infallible, but an initialiser error is returned upon the first call to
+/// `next()`. Ie.:
+///
+/// ```no_run
+/// fn my_iterator() -> impl Iterator<Item = Result<T, E>>
+/// ```
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct Iter<E, F, I, G> {
+ init: Option<F>,
+ iter: Option<Result<I, E>>,
+ next: G,
+}
+
+impl<E, F, I, G> Iter<E, F, I, G> {
+ pub fn new(init: F, next: G) -> Self {
+ Self {
+ init: Some(init),
+ iter: None,
+ next,
+ }
+ }
+}
+
+impl<E, F, I, G, T, U> Iterator for Iter<E, F, I, G>
+where
+ F: FnOnce() -> Result<I, E>,
+ I: Iterator<Item = Result<T, E>>,
+ G: FnMut(Result<T, E>) -> Option<Result<U, E>>,
+{
+ type Item = Result<U, E>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.iter.take() {
+ None => {
+ let init = self.init.take()?;
+ self.iter = Some(init());
+ self.next()
+ },
+ Some(Err(e)) => Some(Err(e)),
+ Some(Ok(mut iter)) => {
+ let item = iter.next()?;
+ let next = (self.next)(item);
+ self.iter = Some(Ok(iter));
+ next
+ },
+ }
+ }
+}
+
+impl<E, F, I, G, T, U> DoubleEndedIterator for Iter<E, F, I, G>
+where
+ F: FnOnce() -> Result<I, E>,
+ I: Iterator<Item = Result<T, E>> + DoubleEndedIterator,
+ G: FnMut(Result<T, E>) -> Option<Result<U, E>>,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ match self.iter.take() {
+ None => {
+ let init = self.init.take()?;
+ self.iter = Some(init());
+ self.next_back()
+ },
+ Some(Err(e)) => Some(Err(e)),
+ Some(Ok(mut iter)) => {
+ let item = iter.next_back()?;
+ let next = (self.next)(item);
+ self.iter = Some(Ok(iter));
+ next
+ },
+ }
+ }
+}
+
+pub(crate) trait IteratorExt {
+ fn try_find_map<F, T, E>(&mut self, mut f: F) -> crate::Result<Option<T>>
+ where
+ Self: Iterator + Sized,
+ F: FnMut(Self::Item) -> Result<Option<T>, E>,
+ E: Into<crate::Error>,
+ {
+ let x = self.try_fold((), |(), i| match f(i) {
+ Err(e) => ControlFlow::Break(Err(e.into())),
+ Ok(v) if v.is_some() => ControlFlow::Break(Ok(v)),
+ Ok(_) => ControlFlow::Continue(()),
+ });
+ match x {
+ ControlFlow::Continue(()) => Ok(None),
+ ControlFlow::Break(v) => v,
+ }
+ }
+}
+
+impl<T: Iterator> IteratorExt for T {}
diff --git a/src/json.rs b/src/json.rs
new file mode 100644
index 0000000..52f6215
--- /dev/null
+++ b/src/json.rs
@@ -0,0 +1,49 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::File,
+ io::BufReader,
+ path::Path,
+};
+
+use serde::{
+ de::DeserializeOwned,
+ Deserialize,
+ Serialize,
+};
+
+pub mod canonical;
+
+pub fn from_blob<'a, T>(blob: &'a git2::Blob) -> crate::Result<T>
+where
+ T: Deserialize<'a>,
+{
+ Ok(serde_json::from_slice(blob.content())?)
+}
+
+pub fn to_blob<T>(repo: &git2::Repository, data: &T) -> crate::Result<git2::Oid>
+where
+ T: Serialize,
+{
+ let mut writer = repo.blob_writer(None)?;
+ serde_json::to_writer_pretty(&mut writer, data)?;
+ Ok(writer.commit()?)
+}
+
+pub fn from_file<P, T>(path: P) -> crate::Result<T>
+where
+ P: AsRef<Path>,
+ T: DeserializeOwned,
+{
+ let file = File::open(path)?;
+ Ok(serde_json::from_reader(BufReader::new(file))?)
+}
+
+pub fn load<P, T>(path: P) -> crate::Result<T>
+where
+ P: AsRef<Path>,
+ T: DeserializeOwned,
+{
+ from_file(path)
+}
diff --git a/src/json/canonical.rs b/src/json/canonical.rs
new file mode 100644
index 0000000..6de9517
--- /dev/null
+++ b/src/json/canonical.rs
@@ -0,0 +1,166 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeMap,
+ io::Write,
+};
+
+use unicode_normalization::{
+ is_nfc_quick,
+ IsNormalized,
+ UnicodeNormalization as _,
+};
+
+use crate::metadata;
+
+pub mod error {
+ use std::io;
+
+ use thiserror::Error;
+
+ #[derive(Debug, Error)]
+ pub enum Canonicalise {
+ #[error(transparent)]
+ Cjson(#[from] Float),
+
+ #[error(transparent)]
+ Json(#[from] serde_json::Error),
+
+ #[error(transparent)]
+ Io(#[from] io::Error),
+ }
+
+ #[derive(Debug, Error)]
+ #[error("cannot canonicalise floating-point number")]
+ pub struct Float;
+}
+
+pub(crate) enum Value {
+ Null,
+ Bool(bool),
+ Number(Number),
+ String(String),
+ Array(Vec<Value>),
+ Object(BTreeMap<String, Value>),
+}
+
+impl TryFrom<&serde_json::Value> for Value {
+ type Error = error::Float;
+
+ fn try_from(js: &serde_json::Value) -> Result<Self, Self::Error> {
+ match js {
+ serde_json::Value::Null => Ok(Self::Null),
+ serde_json::Value::Bool(b) => Ok(Self::Bool(*b)),
+ serde_json::Value::Number(n) => n
+ .as_i64()
+ .map(Number::I64)
+ .or_else(|| n.as_u64().map(Number::U64))
+ .map(Self::Number)
+ .ok_or(error::Float),
+ serde_json::Value::String(s) => Ok(Self::String(to_nfc(s))),
+ serde_json::Value::Array(v) => {
+ let mut out = Vec::with_capacity(v.len());
+ for w in v.iter().map(TryFrom::try_from) {
+ out.push(w?);
+ }
+ Ok(Self::Array(out))
+ },
+ serde_json::Value::Object(m) => {
+ let mut out = BTreeMap::new();
+ for (k, v) in m {
+ out.insert(to_nfc(k), Self::try_from(v)?);
+ }
+ Ok(Self::Object(out))
+ },
+ }
+ }
+}
+
+impl TryFrom<&metadata::Custom> for Value {
+ type Error = error::Float;
+
+ fn try_from(js: &metadata::Custom) -> Result<Self, Self::Error> {
+ let mut out = BTreeMap::new();
+ for (k, v) in js {
+ out.insert(to_nfc(k), Self::try_from(v)?);
+ }
+ Ok(Self::Object(out))
+ }
+}
+
+impl serde::Serialize for Value {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ match self {
+ Value::Null => serializer.serialize_unit(),
+ Value::Bool(b) => serializer.serialize_bool(*b),
+ Value::Number(n) => n.serialize(serializer),
+ Value::String(s) => serializer.serialize_str(s),
+ Value::Array(v) => v.serialize(serializer),
+ Value::Object(m) => {
+ use serde::ser::SerializeMap;
+
+ let mut map = serializer.serialize_map(Some(m.len()))?;
+ for (k, v) in m {
+ map.serialize_entry(k, v)?;
+ }
+ map.end()
+ },
+ }
+ }
+}
+
+pub(crate) enum Number {
+ I64(i64),
+ U64(u64),
+}
+
+impl serde::Serialize for Number {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ match self {
+ Number::I64(n) => serializer.serialize_i64(*n),
+ Number::U64(n) => serializer.serialize_u64(*n),
+ }
+ }
+}
+
+fn to_nfc(s: &String) -> String {
+ match is_nfc_quick(s.chars()) {
+ IsNormalized::Yes => s.clone(),
+ IsNormalized::No | IsNormalized::Maybe => s.nfc().collect(),
+ }
+}
+
+pub fn to_writer<W, T>(out: W, v: T) -> Result<(), error::Canonicalise>
+where
+ W: Write,
+ T: serde::Serialize,
+{
+ let js = serde_json::to_value(v)?;
+ let cj = Value::try_from(&js)?;
+ serde_json::to_writer(out, &cj).map_err(|e| {
+ if e.is_io() {
+ error::Canonicalise::Io(e.into())
+ } else {
+ error::Canonicalise::Json(e)
+ }
+ })?;
+
+ Ok(())
+}
+
+pub fn to_vec<T>(v: T) -> Result<Vec<u8>, error::Canonicalise>
+where
+ T: serde::Serialize,
+{
+ let mut buf = Vec::new();
+ to_writer(&mut buf, v)?;
+
+ Ok(buf)
+}
diff --git a/src/keys.rs b/src/keys.rs
new file mode 100644
index 0000000..c6be894
--- /dev/null
+++ b/src/keys.rs
@@ -0,0 +1,206 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::fmt;
+use std::{
+ borrow::Cow,
+ io,
+ ops::{
+ Deref,
+ DerefMut,
+ },
+ str::FromStr,
+};
+
+use anyhow::anyhow;
+use signature::SignerMut;
+
+use crate::{
+ cfg,
+ metadata,
+ ssh::{
+ self,
+ agent,
+ },
+};
+
+pub type Signature = ssh::Signature;
+
+pub trait Signer {
+ fn ident(&self) -> VerificationKey;
+ fn sign(&mut self, msg: &[u8]) -> Result<ssh::Signature, signature::Error>;
+}
+
+impl<T> Signer for Box<T>
+where
+ T: Signer + ?Sized,
+{
+ fn ident(&self) -> VerificationKey {
+ self.deref().ident()
+ }
+
+ fn sign(&mut self, msg: &[u8]) -> Result<ssh::Signature, signature::Error> {
+ self.deref_mut().sign(msg)
+ }
+}
+
+impl Signer for ssh::PrivateKey {
+ fn ident(&self) -> VerificationKey {
+ self.public_key().into()
+ }
+
+ fn sign(&mut self, msg: &[u8]) -> Result<ssh::Signature, signature::Error> {
+ self.try_sign(msg)
+ }
+}
+
+pub struct Agent<T> {
+ client: agent::Client<T>,
+ ident: ssh::PublicKey,
+}
+
+impl Agent<agent::UnixStream> {
+ pub fn from_gitconfig(cfg: &git2::Config) -> crate::Result<Self> {
+ let client = agent::Client::from_env()?;
+ let ident = VerificationKey::from_gitconfig(cfg)?.0.into_owned();
+
+ Ok(Self { client, ident })
+ }
+
+ pub fn boxed(self) -> Box<dyn Signer> {
+ Box::new(self)
+ }
+
+ pub fn as_dyn(&mut self) -> &mut dyn Signer {
+ self
+ }
+}
+
+impl<T> Agent<T> {
+ pub fn new(client: agent::Client<T>, key: VerificationKey<'_>) -> Self {
+ let ident = key.0.into_owned();
+ Self { client, ident }
+ }
+
+ pub fn verification_key(&self) -> VerificationKey {
+ VerificationKey::from(&self.ident)
+ }
+}
+
+impl<T> Signer for Agent<T>
+where
+ T: io::Read + io::Write,
+{
+ fn ident(&self) -> VerificationKey {
+ self.verification_key()
+ }
+
+ fn sign(&mut self, msg: &[u8]) -> Result<ssh::Signature, signature::Error> {
+ self.client
+ .sign(&self.ident, msg)
+ .map_err(signature::Error::from_source)
+ }
+}
+
+impl<T> Signer for &mut Agent<T>
+where
+ T: io::Read + io::Write,
+{
+ fn ident(&self) -> VerificationKey {
+ self.verification_key()
+ }
+
+ fn sign(&mut self, msg: &[u8]) -> Result<ssh::Signature, signature::Error> {
+ self.client
+ .sign(&self.ident, msg)
+ .map_err(signature::Error::from_source)
+ }
+}
+
+#[derive(Clone)]
+pub struct VerificationKey<'a>(Cow<'a, ssh::PublicKey>);
+
+impl<'a> VerificationKey<'a> {
+ pub fn from_openssh(key: &str) -> Result<Self, ssh::Error> {
+ ssh::PublicKey::from_openssh(key).map(Cow::Owned).map(Self)
+ }
+
+ pub fn to_openssh(&self) -> Result<String, ssh::Error> {
+ self.0.to_openssh()
+ }
+
+ pub fn from_gitconfig(cfg: &git2::Config) -> crate::Result<Self> {
+ let key = cfg::git::signing_key(cfg)?
+ .ok_or_else(|| anyhow!("unable to determine signing key from git config"))?
+ .public()
+ .to_owned();
+ Ok(Self(Cow::Owned(key)))
+ }
+
+ pub fn algorithm(&self) -> ssh::Algorithm {
+ self.0.algorithm()
+ }
+
+ pub fn strip_comment(&mut self) {
+ self.0.to_mut().set_comment("")
+ }
+
+ pub fn without_comment(mut self) -> Self {
+ self.strip_comment();
+ self
+ }
+
+ pub fn sha256(&self) -> [u8; 32] {
+ self.0.fingerprint(ssh::HashAlg::Sha256).sha256().unwrap()
+ }
+
+ pub fn to_owned<'b>(&self) -> VerificationKey<'b> {
+ VerificationKey(Cow::Owned(self.0.clone().into_owned()))
+ }
+
+ pub fn keyid(&self) -> metadata::KeyId {
+ metadata::KeyId::from(self)
+ }
+
+ pub(crate) fn key_data(&self) -> ssh::public::KeyData {
+ self.as_ref().into()
+ }
+}
+
+impl AsRef<ssh::PublicKey> for VerificationKey<'_> {
+ fn as_ref(&self) -> &ssh::PublicKey {
+ &self.0
+ }
+}
+
+impl fmt::Display for VerificationKey<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(&self.0.to_string())
+ }
+}
+
+impl From<ssh::PublicKey> for VerificationKey<'_> {
+ fn from(key: ssh::PublicKey) -> Self {
+ Self(Cow::Owned(key))
+ }
+}
+
+impl<'a> From<&'a ssh::PublicKey> for VerificationKey<'a> {
+ fn from(key: &'a ssh::PublicKey) -> Self {
+ Self(Cow::Borrowed(key))
+ }
+}
+
+impl FromStr for VerificationKey<'_> {
+ type Err = ssh::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Self::from_openssh(s)
+ }
+}
+
+impl signature::Verifier<ssh::Signature> for VerificationKey<'_> {
+ fn verify(&self, msg: &[u8], signature: &ssh::Signature) -> Result<(), signature::Error> {
+ signature::Verifier::verify(&*self.0, msg, signature)
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..789f99f
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,33 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+mod bundle;
+mod cfg;
+mod fs;
+mod git;
+mod http;
+mod io;
+mod iter;
+mod json;
+mod keys;
+mod metadata;
+mod patches;
+mod serde;
+mod ssh;
+mod str;
+
+pub const SPEC_VERSION: metadata::SpecVersion = metadata::SpecVersion::current();
+
+pub mod cmd;
+pub use cmd::{
+ ui::Output,
+ Cmd,
+};
+
+pub mod error;
+pub use error::{
+ Error,
+ Result,
+};
+
+pub use cfg::paths;
diff --git a/src/metadata.rs b/src/metadata.rs
new file mode 100644
index 0000000..9caee96
--- /dev/null
+++ b/src/metadata.rs
@@ -0,0 +1,749 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::{
+ convert::TryFrom,
+ fmt,
+ ops::Deref,
+ str::FromStr,
+};
+use std::{
+ borrow::Cow,
+ collections::BTreeMap,
+ io,
+ marker::PhantomData,
+ ops::DerefMut,
+};
+
+use serde::ser::SerializeSeq;
+use sha2::{
+ Digest,
+ Sha512,
+};
+use time::{
+ Duration,
+ OffsetDateTime,
+ UtcOffset,
+};
+use versions::SemVer;
+
+use crate::{
+ git::blob_hash_sha2,
+ json::canonical,
+ keys::{
+ Signer,
+ VerificationKey,
+ },
+ ssh,
+};
+
+pub mod drop;
+pub use drop::Drop;
+
+pub mod error;
+pub mod git;
+
+mod mirrors;
+pub use mirrors::{
+ Alternates,
+ Mirrors,
+};
+
+pub mod identity;
+pub use identity::{
+ Identity,
+ IdentityId,
+};
+
+#[derive(Clone, Eq, Ord, PartialEq, PartialOrd)]
+pub struct SpecVersion(SemVer);
+
+impl SpecVersion {
+ pub const fn current() -> Self {
+ Self::new(0, 1, 0)
+ }
+
+ const fn new(major: u32, minor: u32, patch: u32) -> Self {
+ Self(SemVer {
+ major,
+ minor,
+ patch,
+ pre_rel: None,
+ meta: None,
+ })
+ }
+
+ /// This spec version is compatible if its major version is greater than or
+ /// equal to `other`'s
+ pub fn is_compatible(&self, other: &Self) -> bool {
+ self.0.major >= other.major()
+ }
+
+ pub fn major(&self) -> u32 {
+ self.0.major
+ }
+
+ pub fn minor(&self) -> u32 {
+ self.0.minor
+ }
+
+ pub fn patch(&self) -> u32 {
+ self.0.patch
+ }
+}
+
+impl Default for SpecVersion {
+ fn default() -> Self {
+ Self::current()
+ }
+}
+
+impl fmt::Display for SpecVersion {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl FromStr for SpecVersion {
+ type Err = <SemVer as FromStr>::Err;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ SemVer::from_str(s).map(Self)
+ }
+}
+
+impl<'a> TryFrom<&'a str> for SpecVersion {
+ type Error = <SemVer as TryFrom<&'a str>>::Error;
+
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ SemVer::try_from(value).map(Self)
+ }
+}
+
+impl AsRef<SemVer> for SpecVersion {
+ fn as_ref(&self) -> &SemVer {
+ &self.0
+ }
+}
+
+impl serde::Serialize for SpecVersion {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(&self.to_string())
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for SpecVersion {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s: &str = serde::Deserialize::deserialize(deserializer)?;
+ Self::try_from(s).map_err(|_| serde::de::Error::custom("invalid version string"))
+ }
+}
+
+pub type Custom = serde_json::Map<String, serde_json::Value>;
+
+#[derive(
+ Clone, Copy, Eq, Ord, PartialEq, PartialOrd, Hash, serde::Serialize, serde::Deserialize,
+)]
+pub struct KeyId(#[serde(with = "hex::serde")] [u8; 32]);
+
+impl KeyId {
+ pub fn as_bytes(&self) -> &[u8] {
+ self.as_ref()
+ }
+}
+
+impl AsRef<[u8]> for KeyId {
+ fn as_ref(&self) -> &[u8] {
+ &self.0
+ }
+}
+
+impl From<&Key<'_>> for KeyId {
+ fn from(key: &Key<'_>) -> Self {
+ Self::from(&key.0)
+ }
+}
+
+impl From<Key<'_>> for KeyId {
+ fn from(key: Key<'_>) -> Self {
+ Self::from(key.0)
+ }
+}
+
+impl From<&VerificationKey<'_>> for KeyId {
+ fn from(key: &VerificationKey<'_>) -> Self {
+ Self(key.sha256())
+ }
+}
+
+impl From<VerificationKey<'_>> for KeyId {
+ fn from(key: VerificationKey<'_>) -> Self {
+ Self(key.sha256())
+ }
+}
+
+impl fmt::Display for KeyId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl fmt::Debug for KeyId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("KeyId").field(&hex::encode(self.0)).finish()
+ }
+}
+
+#[derive(Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
+pub struct ContentHash {
+ #[serde(with = "hex::serde")]
+ pub sha1: [u8; 20],
+ #[serde(with = "hex::serde")]
+ pub sha2: [u8; 32],
+}
+
+impl ContentHash {
+ pub fn as_oid(&self) -> git2::Oid {
+ self.into()
+ }
+}
+
+impl From<&git2::Blob<'_>> for ContentHash {
+ fn from(blob: &git2::Blob) -> Self {
+ let sha1 = blob
+ .id()
+ .as_bytes()
+ .try_into()
+ .expect("libgit2 to support only sha1 oids");
+ let sha2 = blob_hash_sha2(blob.content());
+
+ Self { sha1, sha2 }
+ }
+}
+
+impl From<&ContentHash> for git2::Oid {
+ fn from(ContentHash { sha1, .. }: &ContentHash) -> Self {
+ Self::from_bytes(sha1).expect("20 bytes are a valid git2::Oid")
+ }
+}
+
+impl PartialEq<git2::Oid> for ContentHash {
+ fn eq(&self, other: &git2::Oid) -> bool {
+ self.sha1.as_slice() == other.as_bytes()
+ }
+}
+
+impl fmt::Debug for ContentHash {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("ContentHash")
+ .field("sha1", &hex::encode(self.sha1))
+ .field("sha2", &hex::encode(self.sha2))
+ .finish()
+ }
+}
+
+impl fmt::Display for ContentHash {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(&hex::encode(self.sha1))
+ }
+}
+
+#[derive(
+ Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize,
+)]
+pub struct DateTime(#[serde(with = "time::serde::rfc3339")] OffsetDateTime);
+
+impl DateTime {
+ pub fn now() -> Self {
+ Self(time::OffsetDateTime::now_utc())
+ }
+
+ pub const fn checked_add(self, duration: Duration) -> Option<Self> {
+ // `map` is not const yet
+ match self.0.checked_add(duration) {
+ None => None,
+ Some(x) => Some(Self(x)),
+ }
+ }
+}
+
+impl FromStr for DateTime {
+ type Err = time::error::Parse;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ OffsetDateTime::parse(s, &time::format_description::well_known::Rfc3339)
+ .map(|dt| dt.to_offset(UtcOffset::UTC))
+ .map(Self)
+ }
+}
+
+impl Deref for DateTime {
+ type Target = time::OffsetDateTime;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+#[derive(serde::Serialize, serde::Deserialize)]
+#[serde(tag = "_type")]
+pub enum Metadata<'a> {
+ #[serde(rename = "eagain.io/it/identity")]
+ Identity(Cow<'a, Identity>),
+ #[serde(rename = "eagain.io/it/drop")]
+ Drop(Cow<'a, Drop>),
+ #[serde(rename = "eagain.io/it/mirrors")]
+ Mirrors(Cow<'a, Mirrors>),
+ #[serde(rename = "eagain.io/it/alternates")]
+ Alternates(Cow<'a, Alternates>),
+}
+
+impl<'a> Metadata<'a> {
+ pub fn identity<T>(s: T) -> Self
+ where
+ T: Into<Cow<'a, Identity>>,
+ {
+ Self::Identity(s.into())
+ }
+
+ pub fn drop<T>(d: T) -> Self
+ where
+ T: Into<Cow<'a, Drop>>,
+ {
+ Self::Drop(d.into())
+ }
+
+ pub fn mirrors<T>(a: T) -> Self
+ where
+ T: Into<Cow<'a, Mirrors>>,
+ {
+ Self::Mirrors(a.into())
+ }
+
+ pub fn alternates<T>(a: T) -> Self
+ where
+ T: Into<Cow<'a, Alternates>>,
+ {
+ Self::Alternates(a.into())
+ }
+
+ pub fn sign<'b, I, S>(self, keys: I) -> crate::Result<Signed<Self>>
+ where
+ I: IntoIterator<Item = &'b mut S>,
+ S: Signer + ?Sized + 'b,
+ {
+ let payload = Sha512::digest(canonical::to_vec(&self)?);
+ let signatures = keys
+ .into_iter()
+ .map(|signer| {
+ let keyid = KeyId::from(signer.ident());
+ let sig = signer.sign(&payload)?;
+ Ok::<_, crate::Error>((keyid, Signature::from(sig)))
+ })
+ .collect::<Result<_, _>>()?;
+
+ Ok(Signed {
+ signed: self,
+ signatures,
+ })
+ }
+}
+
+impl From<Identity> for Metadata<'static> {
+ fn from(s: Identity) -> Self {
+ Self::identity(s)
+ }
+}
+
+impl<'a> From<&'a Identity> for Metadata<'a> {
+ fn from(s: &'a Identity) -> Self {
+ Self::identity(s)
+ }
+}
+
+impl From<Drop> for Metadata<'static> {
+ fn from(d: Drop) -> Self {
+ Self::drop(d)
+ }
+}
+
+impl<'a> From<&'a Drop> for Metadata<'a> {
+ fn from(d: &'a Drop) -> Self {
+ Self::drop(d)
+ }
+}
+
+impl From<Mirrors> for Metadata<'static> {
+ fn from(m: Mirrors) -> Self {
+ Self::mirrors(m)
+ }
+}
+
+impl<'a> From<&'a Mirrors> for Metadata<'a> {
+ fn from(m: &'a Mirrors) -> Self {
+ Self::mirrors(m)
+ }
+}
+
+impl From<Alternates> for Metadata<'static> {
+ fn from(a: Alternates) -> Self {
+ Self::alternates(a)
+ }
+}
+
+impl<'a> From<&'a Alternates> for Metadata<'a> {
+ fn from(a: &'a Alternates) -> Self {
+ Self::alternates(a)
+ }
+}
+
+impl<'a> TryFrom<Metadata<'a>> for Cow<'a, Identity> {
+ type Error = Metadata<'a>;
+
+ fn try_from(value: Metadata<'a>) -> Result<Self, Self::Error> {
+ match value {
+ Metadata::Identity(inner) => Ok(inner),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'a> TryFrom<Metadata<'a>> for Cow<'a, Drop> {
+ type Error = Metadata<'a>;
+
+ fn try_from(value: Metadata<'a>) -> Result<Self, Self::Error> {
+ match value {
+ Metadata::Drop(inner) => Ok(inner),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'a> TryFrom<Metadata<'a>> for Cow<'a, Mirrors> {
+ type Error = Metadata<'a>;
+
+ fn try_from(value: Metadata<'a>) -> Result<Self, Self::Error> {
+ match value {
+ Metadata::Mirrors(inner) => Ok(inner),
+ _ => Err(value),
+ }
+ }
+}
+
+impl<'a> TryFrom<Metadata<'a>> for Cow<'a, Alternates> {
+ type Error = Metadata<'a>;
+
+ fn try_from(value: Metadata<'a>) -> Result<Self, Self::Error> {
+ match value {
+ Metadata::Alternates(inner) => Ok(inner),
+ _ => Err(value),
+ }
+ }
+}
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Signed<T> {
+ pub signed: T,
+ pub signatures: BTreeMap<KeyId, Signature>,
+}
+
+impl<T> Signed<T> {
+ pub fn fmap<U, F>(self, f: F) -> Signed<U>
+ where
+ F: FnOnce(T) -> U,
+ {
+ Signed {
+ signed: f(self.signed),
+ signatures: self.signatures,
+ }
+ }
+}
+
+impl<T, E> Signed<Result<T, E>> {
+ pub fn transpose(self) -> Result<Signed<T>, E> {
+ let Self { signed, signatures } = self;
+ signed.map(|signed| Signed { signed, signatures })
+ }
+}
+
+impl<T: HasPrev> Signed<T> {
+ pub fn ancestors<F>(&self, find_prev: F) -> impl Iterator<Item = io::Result<Self>>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Self>,
+ {
+ Ancestors {
+ prev: self.signed.prev().cloned(),
+ find_prev,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn has_ancestor<F>(&self, ancestor: &ContentHash, find_prev: F) -> io::Result<bool>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Self>,
+ {
+ match self.signed.prev() {
+ None => Ok(false),
+ Some(parent) if parent == ancestor => Ok(true),
+ Some(_) => {
+ for prev in self.ancestors(find_prev) {
+ match prev?.signed.prev() {
+ None => return Ok(false),
+ Some(parent) if parent == ancestor => return Ok(true),
+ _ => continue,
+ }
+ }
+
+ Ok(false)
+ },
+ }
+ }
+}
+
+impl Signed<Drop> {
+ pub fn verified<'a, F, G>(
+ self,
+ find_prev: F,
+ find_signer: G,
+ ) -> Result<drop::Verified, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Self>,
+ G: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ self.signed
+ .verified(&self.signatures, find_prev, find_signer)
+ }
+}
+
+impl Signed<Identity> {
+ pub fn verified<F>(self, find_prev: F) -> Result<identity::Verified, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Self>,
+ {
+ self.signed.verified(&self.signatures, find_prev)
+ }
+
+ pub fn verify<F>(&self, find_prev: F) -> Result<IdentityId, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Self>,
+ {
+ self.signed.verify(&self.signatures, find_prev)
+ }
+}
+
+impl<T> AsRef<T> for Signed<T> {
+ fn as_ref(&self) -> &T {
+ &self.signed
+ }
+}
+
+struct Ancestors<T, F> {
+ prev: Option<ContentHash>,
+ find_prev: F,
+ _marker: PhantomData<T>,
+}
+
+impl<T, F> Iterator for Ancestors<T, F>
+where
+ T: HasPrev,
+ F: FnMut(&ContentHash) -> io::Result<Signed<T>>,
+{
+ type Item = io::Result<Signed<T>>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let prev = self.prev.take()?;
+ (self.find_prev)(&prev)
+ .map(|parent| {
+ self.prev = parent.signed.prev().cloned();
+ Some(parent)
+ })
+ .transpose()
+ }
+}
+
+pub trait HasPrev {
+ fn prev(&self) -> Option<&ContentHash>;
+}
+
+impl HasPrev for Identity {
+ fn prev(&self) -> Option<&ContentHash> {
+ self.prev.as_ref()
+ }
+}
+
+impl HasPrev for Drop {
+ fn prev(&self) -> Option<&ContentHash> {
+ self.prev.as_ref()
+ }
+}
+
+#[derive(Clone)]
+pub struct Key<'a>(VerificationKey<'a>);
+
+impl Key<'_> {
+ pub fn id(&self) -> KeyId {
+ self.into()
+ }
+}
+
+impl fmt::Debug for Key<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Key").field(&self.0.to_string()).finish()
+ }
+}
+
+impl<'a> From<VerificationKey<'a>> for Key<'a> {
+ fn from(vk: VerificationKey<'a>) -> Self {
+ Self(vk.without_comment())
+ }
+}
+
+impl signature::Verifier<Signature> for Key<'_> {
+ fn verify(&self, msg: &[u8], signature: &Signature) -> Result<(), signature::Error> {
+ let ssh = ssh::Signature::new(self.0.algorithm(), signature.as_ref())?;
+ self.0.verify(msg, &ssh)
+ }
+}
+
+impl serde::Serialize for Key<'_> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(&self.0.to_openssh().map_err(serde::ser::Error::custom)?)
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for Key<'_> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s: &str = serde::Deserialize::deserialize(deserializer)?;
+ VerificationKey::from_openssh(s)
+ .map(Self)
+ .map_err(serde::de::Error::custom)
+ }
+}
+
+impl FromStr for Key<'_> {
+ type Err = ssh_key::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ VerificationKey::from_openssh(s).map(Self)
+ }
+}
+
+#[derive(Clone, Default)]
+pub struct KeySet<'a>(BTreeMap<KeyId, Key<'a>>);
+
+impl<'a> Deref for KeySet<'a> {
+ type Target = BTreeMap<KeyId, Key<'a>>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<'a> DerefMut for KeySet<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+impl<'a> FromIterator<Key<'a>> for KeySet<'a> {
+ fn from_iter<T: IntoIterator<Item = Key<'a>>>(iter: T) -> Self {
+ let mut kv = BTreeMap::new();
+ for key in iter {
+ kv.insert(KeyId::from(&key), key);
+ }
+ Self(kv)
+ }
+}
+
+impl serde::Serialize for KeySet<'_> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ let mut seq = serializer.serialize_seq(Some(self.0.len()))?;
+ for key in self.0.values() {
+ seq.serialize_element(key)?;
+ }
+ seq.end()
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for KeySet<'static> {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct Visitor;
+
+ impl<'de> serde::de::Visitor<'de> for Visitor {
+ type Value = KeySet<'static>;
+
+ fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("a sequence of keys")
+ }
+
+ fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ let mut kv = BTreeMap::new();
+ while let Some(key) = seq.next_element()? {
+ kv.insert(KeyId::from(&key), key);
+ }
+
+ Ok(KeySet(kv))
+ }
+ }
+
+ deserializer.deserialize_seq(Visitor)
+ }
+}
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+pub struct Signature(#[serde(with = "hex::serde")] Vec<u8>);
+
+impl From<ssh::Signature> for Signature {
+ fn from(sig: ssh::Signature) -> Self {
+ Self(sig.as_bytes().to_vec())
+ }
+}
+
+impl AsRef<[u8]> for Signature {
+ fn as_ref(&self) -> &[u8] {
+ self.0.as_ref()
+ }
+}
+
+impl signature::Signature for Signature {
+ fn from_bytes(bytes: &[u8]) -> Result<Self, signature::Error> {
+ Ok(Self(bytes.to_vec()))
+ }
+}
+
+pub struct Verified<T>(T);
+
+impl<T> Verified<T> {
+ pub fn into_inner(self) -> T {
+ self.0
+ }
+}
+
+impl<T> Deref for Verified<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
diff --git a/src/metadata/drop.rs b/src/metadata/drop.rs
new file mode 100644
index 0000000..d231712
--- /dev/null
+++ b/src/metadata/drop.rs
@@ -0,0 +1,274 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ collections::{
+ BTreeMap,
+ BTreeSet,
+ HashMap,
+ },
+ io,
+ num::NonZeroUsize,
+};
+
+use log::warn;
+use sha2::{
+ Digest,
+ Sha512,
+};
+use signature::Verifier;
+
+use super::{
+ error,
+ Alternates,
+ ContentHash,
+ Custom,
+ DateTime,
+ IdentityId,
+ KeyId,
+ KeySet,
+ Metadata,
+ Mirrors,
+ Signature,
+ Signed,
+ SpecVersion,
+};
+use crate::{
+ git::Refname,
+ json::canonical,
+ str::Varchar,
+};
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Roles {
+ pub root: Role,
+ pub snapshot: Role,
+ pub mirrors: Role,
+ pub branches: HashMap<Refname, Annotated>,
+}
+
+impl Roles {
+ pub(crate) fn ids(&self) -> BTreeSet<IdentityId> {
+ let Self {
+ root: Role { ids: root, .. },
+ snapshot: Role { ids: snapshot, .. },
+ mirrors: Role { ids: mirrors, .. },
+ branches,
+ } = self;
+
+ let mut ids = BTreeSet::new();
+ ids.extend(root);
+ ids.extend(snapshot);
+ ids.extend(mirrors);
+ ids.extend(branches.values().flat_map(|a| &a.role.ids));
+ ids
+ }
+}
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Role {
+ pub ids: BTreeSet<IdentityId>,
+ pub threshold: NonZeroUsize,
+}
+
+pub type Description = Varchar<String, 128>;
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Annotated {
+ #[serde(flatten)]
+ pub role: Role,
+ pub description: Description,
+}
+
+pub type Verified = super::Verified<Drop>;
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Drop {
+ pub spec_version: SpecVersion,
+ #[serde(default = "Description::new")]
+ pub description: Description,
+ pub prev: Option<ContentHash>,
+ pub roles: Roles,
+ #[serde(default)]
+ pub custom: Custom,
+}
+
+impl Drop {
+ pub fn verified<'a, F, G>(
+ self,
+ signatures: &BTreeMap<KeyId, Signature>,
+ find_prev: F,
+ find_signer: G,
+ ) -> Result<Verified, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ G: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ self.verify(signatures, find_prev, find_signer)?;
+ Ok(super::Verified(self))
+ }
+
+ pub fn verify<'a, F, G>(
+ &self,
+ signatures: &BTreeMap<KeyId, Signature>,
+ mut find_prev: F,
+ mut find_signer: G,
+ ) -> Result<(), error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ G: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ use error::Verification::*;
+
+ if !crate::SPEC_VERSION.is_compatible(&self.spec_version) {
+ return Err(IncompatibleSpecVersion);
+ }
+
+ let canonical = self.canonicalise()?;
+ let payload = Sha512::digest(&canonical);
+ verify::AuthorisedSigners::from_ids(&self.roles.root.ids, &mut find_signer)?
+ .verify_signatures(&payload, self.roles.root.threshold, signatures)?;
+ if let Some(prev) = self.prev.as_ref().map(&mut find_prev).transpose()? {
+ verify::AuthorisedSigners::from_ids(&prev.signed.roles.root.ids, &mut find_signer)?
+ .verify_signatures(&payload, prev.signed.roles.root.threshold, signatures)?;
+ return prev.signed.verify(&prev.signatures, find_prev, find_signer);
+ }
+
+ Ok(())
+ }
+
+ pub fn verify_mirrors<'a, F>(
+ &self,
+ mirrors: &Signed<Mirrors>,
+ find_signer: F,
+ ) -> Result<(), error::Verification>
+ where
+ F: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ use error::Verification::*;
+
+ if let Some(deadline) = &mirrors.signed.expires {
+ if deadline < &DateTime::now() {
+ return Err(Expired);
+ }
+ }
+ if !crate::SPEC_VERSION.is_compatible(&mirrors.signed.spec_version) {
+ return Err(IncompatibleSpecVersion);
+ }
+
+ let payload = Sha512::digest(mirrors.signed.canonicalise()?);
+ verify::AuthorisedSigners::from_ids(&self.roles.mirrors.ids, find_signer)?
+ .verify_signatures(&payload, self.roles.mirrors.threshold, &mirrors.signatures)
+ }
+
+ pub fn verify_alternates<'a, F>(
+ &self,
+ alt: &Signed<Alternates>,
+ find_signer: F,
+ ) -> Result<(), error::Verification>
+ where
+ F: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ use error::Verification::*;
+
+ if let Some(deadline) = &alt.signed.expires {
+ if deadline < &DateTime::now() {
+ return Err(Expired);
+ }
+ }
+ if !crate::SPEC_VERSION.is_compatible(&alt.signed.spec_version) {
+ return Err(IncompatibleSpecVersion);
+ }
+
+ let payload = Sha512::digest(alt.signed.canonicalise()?);
+ verify::AuthorisedSigners::from_ids(&self.roles.mirrors.ids, find_signer)?
+ .verify_signatures(&payload, self.roles.mirrors.threshold, &alt.signatures)
+ }
+
+ pub fn canonicalise(&self) -> Result<Vec<u8>, canonical::error::Canonicalise> {
+ canonical::to_vec(Metadata::drop(self))
+ }
+}
+
+impl From<Drop> for Cow<'static, Drop> {
+ fn from(d: Drop) -> Self {
+ Self::Owned(d)
+ }
+}
+
+impl<'a> From<&'a Drop> for Cow<'a, Drop> {
+ fn from(d: &'a Drop) -> Self {
+ Self::Borrowed(d)
+ }
+}
+
+mod verify {
+ use super::*;
+
+ pub struct AuthorisedSigners<'a, 'b>(BTreeMap<&'a IdentityId, KeySet<'b>>);
+
+ impl<'a, 'b> AuthorisedSigners<'a, 'b> {
+ pub fn from_ids<F>(
+ ids: &'a BTreeSet<IdentityId>,
+ mut find_signer: F,
+ ) -> Result<AuthorisedSigners<'a, 'b>, error::Verification>
+ where
+ F: FnMut(&IdentityId) -> io::Result<KeySet<'b>>,
+ {
+ let mut signers = BTreeMap::new();
+ for id in ids {
+ signers.insert(id, find_signer(id)?);
+ }
+ signers
+ .values()
+ .try_fold(BTreeSet::new(), |mut all_keys, keys| {
+ for key in keys.keys() {
+ if !all_keys.insert(key) {
+ return Err(error::Verification::DuplicateKey(*key));
+ }
+ }
+
+ Ok(all_keys)
+ })?;
+
+ Ok(Self(signers))
+ }
+
+ pub fn verify_signatures<'c, S>(
+ &mut self,
+ payload: &[u8],
+ threshold: NonZeroUsize,
+ signatures: S,
+ ) -> Result<(), error::Verification>
+ where
+ S: IntoIterator<Item = (&'c KeyId, &'c Signature)>,
+ {
+ use error::Verification::SignatureThreshold;
+
+ let mut need_signatures = threshold.get();
+ for (key_id, signature) in signatures {
+ if let Some(sig_id) = self.0.iter().find_map(|(id, keys)| {
+ #[allow(clippy::unnecessary_lazy_evaluations)]
+ keys.contains_key(key_id).then(|| *id)
+ }) {
+ let key = self.0.remove(sig_id).unwrap().remove(key_id).unwrap();
+ if key.verify(payload, signature).is_ok() {
+ need_signatures -= 1;
+ } else {
+ warn!("Bad signature by {key_id}");
+ }
+
+ if need_signatures == 0 {
+ break;
+ }
+ }
+ }
+ if need_signatures > 0 {
+ return Err(SignatureThreshold);
+ }
+
+ Ok(())
+ }
+ }
+}
diff --git a/src/metadata/error.rs b/src/metadata/error.rs
new file mode 100644
index 0000000..66173f9
--- /dev/null
+++ b/src/metadata/error.rs
@@ -0,0 +1,40 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::io;
+
+use thiserror::Error;
+
+use super::KeyId;
+use crate::json::canonical::error::Canonicalise;
+
+#[derive(Debug, Error)]
+pub enum SigId {
+ #[error("payload not at root revision")]
+ NotRoot,
+
+ #[error("invalid payload: canonicalisation failed")]
+ Canonical(#[from] Canonicalise),
+}
+
+#[derive(Debug, Error)]
+#[non_exhaustive]
+pub enum Verification {
+ #[error("incompatible spec version")]
+ IncompatibleSpecVersion,
+
+ #[error("canonicalisation failed")]
+ Canonicalise(#[from] Canonicalise),
+
+ #[error("required signature threshold not met")]
+ SignatureThreshold,
+
+ #[error("metadata past its expiry date")]
+ Expired,
+
+ #[error("duplicate key: key {0} appears in more than one identity")]
+ DuplicateKey(KeyId),
+
+ #[error(transparent)]
+ Io(#[from] io::Error),
+}
diff --git a/src/metadata/git.rs b/src/metadata/git.rs
new file mode 100644
index 0000000..1dde3da
--- /dev/null
+++ b/src/metadata/git.rs
@@ -0,0 +1,232 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ io,
+};
+
+use anyhow::anyhow;
+
+use super::{
+ drop,
+ identity,
+ Alternates,
+ ContentHash,
+ Drop,
+ Identity,
+ IdentityId,
+ KeySet,
+ Metadata,
+ Mirrors,
+ Signed,
+};
+use crate::{
+ cmd,
+ git::if_not_found_none,
+ json,
+};
+
+pub const META_FILE_ALTERNATES: &str = "alternates.json";
+pub const META_FILE_DROP: &str = "drop.json";
+pub const META_FILE_ID: &str = "id.json";
+pub const META_FILE_MIRRORS: &str = "mirrors.json";
+
+pub mod error {
+ use thiserror::Error;
+
+ #[derive(Debug, Error)]
+ #[error("unexpected metadata type")]
+ pub struct TypeMismatch;
+
+ #[derive(Debug, Error)]
+ #[error("{file} not found in tree")]
+ pub struct FileNotFound {
+ pub file: &'static str,
+ }
+}
+
+pub struct GitMeta<T> {
+ pub hash: ContentHash,
+ pub signed: Signed<T>,
+}
+
+pub type GitIdentity = GitMeta<Identity>;
+pub type GitDrop = GitMeta<Drop>;
+pub type GitMirrors = GitMeta<Mirrors>;
+pub type GitAlternates = GitMeta<Alternates>;
+
+impl GitMeta<Drop> {
+ pub fn verified<'a, F, G>(
+ self,
+ find_prev: F,
+ find_signer: G,
+ ) -> Result<drop::Verified, super::error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Drop>>,
+ G: FnMut(&IdentityId) -> io::Result<KeySet<'a>>,
+ {
+ self.signed.verified(find_prev, find_signer)
+ }
+}
+
+impl GitMeta<Identity> {
+ pub fn verified<F>(self, find_prev: F) -> Result<identity::Verified, super::error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Identity>>,
+ {
+ self.signed.verified(find_prev)
+ }
+}
+
+pub struct FromSearchPath<'a, T> {
+ /// The repository (from the search path) the object was found in
+ pub repo: &'a git2::Repository,
+ pub meta: GitMeta<T>,
+}
+
+pub trait FromGit: Sized + Clone
+where
+ for<'a> Cow<'a, Self>: TryFrom<Metadata<'a>>,
+{
+ const METADATA_JSON: &'static str;
+
+ fn from_blob(blob: &git2::Blob) -> crate::Result<GitMeta<Self>> {
+ let hash = ContentHash::from(blob);
+ let signed = json::from_blob::<Signed<Metadata>>(blob)?
+ .fmap(Cow::<Self>::try_from)
+ .transpose()
+ .map_err(|_| error::TypeMismatch)?
+ .fmap(Cow::into_owned);
+
+ Ok(GitMeta { hash, signed })
+ }
+
+ fn from_tip<R: AsRef<str>>(
+ repo: &git2::Repository,
+ refname: R,
+ ) -> crate::Result<GitMeta<Self>> {
+ Self::from_reference(repo, &repo.find_reference(refname.as_ref())?)
+ }
+
+ fn from_reference(
+ repo: &git2::Repository,
+ reference: &git2::Reference,
+ ) -> crate::Result<GitMeta<Self>> {
+ Self::from_commit(repo, &reference.peel_to_commit()?)
+ }
+
+ fn from_commit(repo: &git2::Repository, commit: &git2::Commit) -> crate::Result<GitMeta<Self>> {
+ Self::from_tree(repo, &commit.tree()?)
+ }
+
+ fn from_tree(repo: &git2::Repository, tree: &git2::Tree) -> crate::Result<GitMeta<Self>> {
+ let entry = tree
+ .get_name(Self::METADATA_JSON)
+ .ok_or(error::FileNotFound {
+ file: Self::METADATA_JSON,
+ })?;
+ let blob = entry.to_object(repo)?.peel_to_blob()?;
+
+ Self::from_blob(&blob)
+ }
+
+ fn from_content_hash(
+ repo: &git2::Repository,
+ hash: &ContentHash,
+ ) -> crate::Result<GitMeta<Self>> {
+ let blob = repo.find_blob(hash.into())?;
+ Self::from_blob(&blob)
+ }
+
+ fn from_search_path<R: AsRef<str>>(
+ search_path: &[git2::Repository],
+ refname: R,
+ ) -> crate::Result<FromSearchPath<Self>> {
+ let (repo, reference) = find_ref_in_path(search_path, refname.as_ref())?
+ .ok_or_else(|| anyhow!("{} not found in search path", refname.as_ref()))?;
+ Self::from_reference(repo, &reference).map(|meta| FromSearchPath { repo, meta })
+ }
+}
+
+impl FromGit for Identity {
+ const METADATA_JSON: &'static str = META_FILE_ID;
+}
+
+impl FromGit for Drop {
+ const METADATA_JSON: &'static str = META_FILE_DROP;
+}
+
+impl FromGit for Mirrors {
+ const METADATA_JSON: &'static str = META_FILE_MIRRORS;
+}
+
+impl FromGit for Alternates {
+ const METADATA_JSON: &'static str = META_FILE_ALTERNATES;
+}
+
+pub fn find_parent<T>(
+ repo: &git2::Repository,
+) -> impl Fn(&ContentHash) -> io::Result<Signed<T>> + '_
+where
+ T: FromGit,
+ for<'a> Cow<'a, T>: TryFrom<Metadata<'a>>,
+{
+ |hash| {
+ T::from_content_hash(repo, hash)
+ .map_err(as_io)
+ .map(|meta| meta.signed)
+ }
+}
+
+pub fn find_parent_in_tree<'a, T>(
+ repo: &'a git2::Repository,
+ tree: &'a git2::Tree<'a>,
+) -> impl Fn(&ContentHash) -> io::Result<Signed<T>> + 'a
+where
+ T: FromGit,
+ for<'b> Cow<'b, T>: TryFrom<Metadata<'b>>,
+{
+ fn go<T>(
+ repo: &git2::Repository,
+ tree: &git2::Tree,
+ hash: &ContentHash,
+ ) -> crate::Result<Signed<T>>
+ where
+ T: FromGit,
+ for<'b> Cow<'b, T>: TryFrom<Metadata<'b>>,
+ {
+ let oid = git2::Oid::from(hash);
+ let blob = tree
+ .get_id(oid)
+ .ok_or_else(|| anyhow!("parent {} not found in tree {}", oid, tree.id()))?
+ .to_object(repo)?
+ .into_blob()
+ .map_err(|_| anyhow!("parent {} is not a file", oid))?;
+
+ T::from_blob(&blob).map(|meta| meta.signed)
+ }
+
+ move |hash| go(repo, tree, hash).map_err(as_io)
+}
+
+pub fn find_ref_in_path<'a>(
+ search_path: &'a [git2::Repository],
+ name: &str,
+) -> cmd::Result<Option<(&'a git2::Repository, git2::Reference<'a>)>> {
+ for repo in search_path {
+ let have_ref = if_not_found_none(repo.resolve_reference_from_short_name(name))?;
+ if let Some(r) = have_ref {
+ return Ok(Some((repo, r)));
+ }
+ }
+
+ Ok(None)
+}
+
+fn as_io<E>(e: E) -> io::Error
+where
+ E: Into<Box<dyn std::error::Error + Send + Sync>>,
+{
+ io::Error::new(io::ErrorKind::Other, e)
+}
diff --git a/src/metadata/identity.rs b/src/metadata/identity.rs
new file mode 100644
index 0000000..8071e84
--- /dev/null
+++ b/src/metadata/identity.rs
@@ -0,0 +1,366 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ collections::{
+ BTreeMap,
+ BTreeSet,
+ },
+ fmt,
+ io,
+ marker::PhantomData,
+ num::NonZeroUsize,
+ path::PathBuf,
+ str::FromStr,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+};
+use hex::FromHex;
+use log::warn;
+use sha2::{
+ Digest,
+ Sha256,
+ Sha512,
+};
+use signature::Verifier;
+use url::Url;
+
+use super::{
+ error,
+ git::{
+ find_parent_in_tree,
+ FromGit,
+ META_FILE_ID,
+ },
+ Ancestors,
+ ContentHash,
+ Custom,
+ DateTime,
+ Key,
+ KeyId,
+ KeySet,
+ Metadata,
+ Signature,
+ Signed,
+ SpecVersion,
+};
+use crate::{
+ json::{
+ self,
+ canonical,
+ },
+ metadata::git::find_parent,
+};
+
+#[derive(
+ Clone, Copy, Eq, Ord, PartialEq, PartialOrd, Hash, serde::Serialize, serde::Deserialize,
+)]
+pub struct IdentityId(#[serde(with = "hex::serde")] [u8; 32]);
+
+impl TryFrom<&Identity> for IdentityId {
+ type Error = error::SigId;
+
+ fn try_from(id: &Identity) -> Result<Self, Self::Error> {
+ if id.prev.is_some() {
+ return Err(error::SigId::NotRoot);
+ }
+ let digest = Sha256::digest(id.canonicalise()?);
+ Ok(Self(digest.into()))
+ }
+}
+
+impl fmt::Display for IdentityId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl fmt::Debug for IdentityId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl FromStr for IdentityId {
+ type Err = hex::FromHexError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ FromHex::from_hex(s).map(Self)
+ }
+}
+
+impl TryFrom<String> for IdentityId {
+ type Error = hex::FromHexError;
+
+ fn try_from(value: String) -> Result<Self, Self::Error> {
+ FromHex::from_hex(value).map(Self)
+ }
+}
+
+pub struct Verified {
+ id: IdentityId,
+ cur: Identity,
+}
+
+impl Verified {
+ pub fn id(&self) -> &IdentityId {
+ &self.id
+ }
+
+ pub fn identity(&self) -> &Identity {
+ &self.cur
+ }
+
+ pub fn into_parts(self) -> (IdentityId, Identity) {
+ (self.id, self.cur)
+ }
+
+ /// `true` if signature is valid over message for any of the signer's
+ /// _current_ set of keys
+ pub fn did_sign<T: AsRef<[u8]>>(&self, msg: T, sig: &Signature) -> bool {
+ self.cur
+ .keys
+ .values()
+ .any(|key| key.verify(msg.as_ref(), sig).is_ok())
+ }
+}
+
+impl AsRef<Identity> for Verified {
+ fn as_ref(&self) -> &Identity {
+ self.identity()
+ }
+}
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Identity {
+ pub spec_version: SpecVersion,
+ pub prev: Option<ContentHash>,
+ pub keys: KeySet<'static>,
+ pub threshold: NonZeroUsize,
+ pub mirrors: BTreeSet<Url>,
+ pub expires: Option<DateTime>,
+ #[serde(default)]
+ pub custom: Custom,
+}
+
+impl Identity {
+ pub fn verified<F>(
+ self,
+ signatures: &BTreeMap<KeyId, Signature>,
+ find_prev: F,
+ ) -> Result<Verified, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ {
+ let id = self.verify(signatures, find_prev)?;
+ Ok(Verified { id, cur: self })
+ }
+
+ pub fn verify<F>(
+ &self,
+ signatures: &BTreeMap<KeyId, Signature>,
+ find_prev: F,
+ ) -> Result<IdentityId, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ {
+ use error::Verification::Expired;
+
+ if let Some(deadline) = &self.expires {
+ if deadline < &DateTime::now() {
+ return Err(Expired);
+ }
+ }
+ self.verify_tail(Cow::Borrowed(signatures), find_prev)
+ }
+
+ fn verify_tail<F>(
+ &self,
+ signatures: Cow<BTreeMap<KeyId, Signature>>,
+ mut find_prev: F,
+ ) -> Result<IdentityId, error::Verification>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ {
+ use error::Verification::IncompatibleSpecVersion;
+
+ if !crate::SPEC_VERSION.is_compatible(&self.spec_version) {
+ return Err(IncompatibleSpecVersion);
+ }
+
+ let canonical = self.canonicalise()?;
+ let signed = Sha512::digest(&canonical);
+ verify_signatures(&signed, self.threshold, signatures.iter(), &self.keys)?;
+ if let Some(prev) = self.prev.as_ref().map(&mut find_prev).transpose()? {
+ verify_signatures(
+ &signed,
+ prev.signed.threshold,
+ signatures.iter(),
+ &prev.signed.keys,
+ )?;
+ return prev
+ .signed
+ .verify_tail(Cow::Owned(prev.signatures), find_prev);
+ }
+
+ Ok(IdentityId(Sha256::digest(canonical).into()))
+ }
+
+ pub fn canonicalise(&self) -> Result<Vec<u8>, canonical::error::Canonicalise> {
+ canonical::to_vec(Metadata::identity(self))
+ }
+
+ pub fn ancestors<F>(&self, find_prev: F) -> impl Iterator<Item = io::Result<Signed<Self>>>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ {
+ Ancestors {
+ prev: self.prev.clone(),
+ find_prev,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn has_ancestor<F>(&self, ancestor: &ContentHash, find_prev: F) -> io::Result<bool>
+ where
+ F: FnMut(&ContentHash) -> io::Result<Signed<Self>>,
+ {
+ match &self.prev {
+ None => Ok(false),
+ Some(parent) if parent == ancestor => Ok(true),
+ Some(_) => {
+ for prev in self.ancestors(find_prev) {
+ match &prev?.signed.prev {
+ None => return Ok(false),
+ Some(parent) if parent == ancestor => return Ok(true),
+ _ => continue,
+ }
+ }
+
+ Ok(false)
+ },
+ }
+ }
+}
+
+impl From<Identity> for Cow<'static, Identity> {
+ fn from(s: Identity) -> Self {
+ Self::Owned(s)
+ }
+}
+
+impl<'a> From<&'a Identity> for Cow<'a, Identity> {
+ fn from(s: &'a Identity) -> Self {
+ Self::Borrowed(s)
+ }
+}
+
+fn verify_signatures<'a, S>(
+ payload: &[u8],
+ threshold: NonZeroUsize,
+ signatures: S,
+ keys: &BTreeMap<KeyId, Key>,
+) -> Result<(), error::Verification>
+where
+ S: IntoIterator<Item = (&'a KeyId, &'a Signature)>,
+{
+ use error::Verification::SignatureThreshold;
+
+ let mut need_signatures = threshold.get();
+ for (key_id, signature) in signatures {
+ if let Some(key) = keys.get(key_id) {
+ if key.verify(payload, signature).is_ok() {
+ need_signatures -= 1;
+ } else {
+ warn!("Bad signature by {key_id}");
+ }
+
+ if need_signatures == 0 {
+ break;
+ }
+ }
+ }
+ if need_signatures > 0 {
+ return Err(SignatureThreshold);
+ }
+
+ Ok(())
+}
+
+const FOLDED_HISTORY: &str = ".history";
+
+pub fn fold_to_tree<'a>(
+ repo: &'a git2::Repository,
+ tree: &mut git2::TreeBuilder<'a>,
+ Signed { signed, signatures }: Signed<Identity>,
+) -> crate::Result<()> {
+ use git2::FileMode::{
+ Blob,
+ Tree,
+ };
+
+ let meta = Signed {
+ signed: Metadata::from(&signed),
+ signatures,
+ };
+ tree.insert(META_FILE_ID, json::to_blob(repo, &meta)?, Blob.into())?;
+
+ let mut history = {
+ let existing = tree
+ .get(FOLDED_HISTORY)?
+ .map(|t| t.to_object(repo))
+ .transpose()?;
+ repo.treebuilder(existing.as_ref().and_then(git2::Object::as_tree))?
+ };
+ let mut parents = Vec::new();
+ for parent in signed.ancestors(find_parent(repo)) {
+ let meta = parent?.fmap(Metadata::from);
+ let blob = json::to_blob(repo, &meta)?;
+ parents.push(blob);
+ }
+ for (n, oid) in parents.into_iter().rev().enumerate() {
+ history.insert(&format!("{n}.json"), oid, Blob.into())?;
+ }
+ tree.insert(FOLDED_HISTORY, history.write()?, Tree.into())?;
+
+ Ok(())
+}
+
+pub fn find_in_tree(
+ repo: &git2::Repository,
+ root: &git2::Tree,
+ id: &IdentityId,
+) -> crate::Result<Verified> {
+ let (id_path, hist_path) = {
+ let base = PathBuf::from(id.to_string());
+ (base.join(META_FILE_ID), base.join(FOLDED_HISTORY))
+ };
+
+ let blob = root
+ .get_path(&id_path)?
+ .to_object(repo)?
+ .into_blob()
+ .map_err(|_| anyhow!("{} is not a file", id_path.display()))?;
+ let meta = Identity::from_blob(&blob)?.signed;
+ let hist = root
+ .get_path(&hist_path)?
+ .to_object(repo)?
+ .into_tree()
+ .map_err(|_| anyhow!("{} is not a directory", hist_path.display()))?;
+
+ let verified = meta
+ .signed
+ .verified(&meta.signatures, find_parent_in_tree(repo, &hist))?;
+ ensure!(
+ verified.id() == id,
+ "ids don't match after verification: expected {} found {}",
+ id,
+ verified.id()
+ );
+
+ Ok(verified)
+}
diff --git a/src/metadata/mirrors.rs b/src/metadata/mirrors.rs
new file mode 100644
index 0000000..9124dd3
--- /dev/null
+++ b/src/metadata/mirrors.rs
@@ -0,0 +1,95 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ borrow::Cow,
+ collections::BTreeSet,
+};
+
+use url::Url;
+
+use super::{
+ Custom,
+ DateTime,
+ Metadata,
+ SpecVersion,
+};
+use crate::{
+ json::canonical,
+ str::Varchar,
+};
+
+#[derive(Clone, serde::Serialize, serde::Deserialize)]
+pub struct Mirror {
+ pub url: Url,
+ #[serde(default)]
+ pub kind: Kind,
+ #[serde(default)]
+ pub custom: Custom,
+}
+
+#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum Kind {
+ /// Can fetch bundles
+ Bundled,
+ /// Can fetch packs via git-protocol
+ #[default]
+ Packed,
+ /// Not serving bundles at all
+ Sparse,
+ /// Unknown kind
+ Unknown(Varchar<String, 16>),
+}
+
+#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
+pub struct Mirrors {
+ pub spec_version: SpecVersion,
+ pub mirrors: Vec<Mirror>,
+ pub expires: Option<DateTime>,
+}
+
+impl Mirrors {
+ pub fn canonicalise(&self) -> Result<Vec<u8>, canonical::error::Canonicalise> {
+ canonical::to_vec(Metadata::mirrors(self))
+ }
+}
+
+impl From<Mirrors> for Cow<'static, Mirrors> {
+ fn from(m: Mirrors) -> Self {
+ Self::Owned(m)
+ }
+}
+
+impl<'a> From<&'a Mirrors> for Cow<'a, Mirrors> {
+ fn from(m: &'a Mirrors) -> Self {
+ Self::Borrowed(m)
+ }
+}
+
+#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
+pub struct Alternates {
+ pub spec_version: SpecVersion,
+ pub alternates: BTreeSet<Url>,
+ #[serde(default)]
+ pub custom: Custom,
+ pub expires: Option<DateTime>,
+}
+
+impl Alternates {
+ pub fn canonicalise(&self) -> Result<Vec<u8>, canonical::error::Canonicalise> {
+ canonical::to_vec(Metadata::alternates(self))
+ }
+}
+
+impl From<Alternates> for Cow<'static, Alternates> {
+ fn from(a: Alternates) -> Self {
+ Self::Owned(a)
+ }
+}
+
+impl<'a> From<&'a Alternates> for Cow<'a, Alternates> {
+ fn from(a: &'a Alternates) -> Self {
+ Self::Borrowed(a)
+ }
+}
diff --git a/src/patches.rs b/src/patches.rs
new file mode 100644
index 0000000..8623e4e
--- /dev/null
+++ b/src/patches.rs
@@ -0,0 +1,212 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::{
+ fmt,
+ ops::Deref,
+};
+use std::{
+ io::BufRead,
+ str::FromStr,
+};
+
+use anyhow::{
+ anyhow,
+ bail,
+};
+
+use hex::FromHex;
+use once_cell::sync::Lazy;
+use sha2::{
+ digest::{
+ generic_array::GenericArray,
+ typenum::U32,
+ },
+ Digest,
+ Sha256,
+};
+
+use crate::{
+ git::Refname,
+ iter::IteratorExt,
+};
+
+mod traits;
+pub use traits::{
+ to_blob,
+ to_tree,
+ Seen,
+};
+use traits::{
+ write_sharded,
+ Blob,
+};
+
+mod bundle;
+pub use bundle::Bundle;
+
+mod error;
+pub use error::FromTree;
+
+pub mod iter;
+pub mod notes;
+
+pub mod record;
+pub use record::{
+ Record,
+ Signature,
+};
+
+mod state;
+pub use state::{
+ merge_notes,
+ unbundle,
+ unbundled_ref,
+ DropHead,
+};
+
+mod submit;
+pub use submit::{
+ AcceptArgs,
+ AcceptOptions,
+ Submission,
+ ALLOWED_REFS,
+ GLOB_HEADS,
+ GLOB_IT_BUNDLES,
+ GLOB_IT_IDS,
+ GLOB_IT_TOPICS,
+ GLOB_NOTES,
+ GLOB_TAGS,
+};
+
+pub const MAX_LEN_BUNDLE: usize = 5_000_000;
+
+pub const HTTP_HEADER_SIGNATURE: &str = "X-it-Signature";
+
+pub const REF_HEADS_PATCHES: &str = "refs/heads/patches";
+
+pub const REF_IT_BRANCHES: &str = "refs/it/branches";
+pub const REF_IT_BUNDLES: &str = "refs/it/bundles";
+pub const REF_IT_PATCHES: &str = "refs/it/patches";
+pub const REF_IT_SEEN: &str = "refs/it/seen";
+pub const REF_IT_TOPICS: &str = "refs/it/topics";
+
+pub const BLOB_HEADS: &str = "heads";
+pub const BLOB_META: &str = "record.json";
+
+pub static TOPIC_MERGES: Lazy<Topic> = Lazy::new(|| Topic::hashed("merges"));
+pub static TOPIC_SNAPSHOTS: Lazy<Topic> = Lazy::new(|| Topic::hashed("snapshots"));
+
+#[derive(Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
+pub struct Topic(#[serde(with = "hex::serde")] [u8; 32]);
+
+impl Topic {
+ const TRAILER_PREFIX: &str = "Re:";
+
+ pub fn hashed<T: AsRef<[u8]>>(v: T) -> Self {
+ Self(Sha256::digest(v).into())
+ }
+
+ pub fn from_commit(commit: &git2::Commit) -> crate::Result<Option<Self>> {
+ commit
+ .message_raw_bytes()
+ .lines()
+ .try_find_map(|line| -> crate::Result<Option<Topic>> {
+ let val = line?
+ .strip_prefix(Self::TRAILER_PREFIX)
+ .map(|v| Self::from_hex(v.trim()))
+ .transpose()?;
+ Ok(val)
+ })
+ }
+
+ pub fn as_trailer(&self) -> String {
+ format!("{} {}", Self::TRAILER_PREFIX, self)
+ }
+
+ pub fn from_refname(name: &str) -> crate::Result<Self> {
+ let last = name
+ .split('/')
+ .next_back()
+ .ok_or_else(|| anyhow!("invalid topic ref {name}"))?;
+ Ok(Self::from_hex(last)?)
+ }
+
+ pub fn as_refname(&self) -> Refname {
+ let name = format!("{}/{}", REF_IT_TOPICS, self);
+ Refname::try_from(name).unwrap()
+ }
+}
+
+impl FromHex for Topic {
+ type Error = hex::FromHexError;
+
+ fn from_hex<T: AsRef<[u8]>>(hex: T) -> Result<Self, Self::Error> {
+ <[u8; 32]>::from_hex(hex).map(Self)
+ }
+}
+
+impl FromStr for Topic {
+ type Err = <Self as FromHex>::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Self::from_hex(s)
+ }
+}
+
+impl fmt::Display for Topic {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl fmt::Debug for Topic {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl From<GenericArray<u8, U32>> for Topic {
+ fn from(a: GenericArray<u8, U32>) -> Self {
+ Self(a.into())
+ }
+}
+
+/// Maps a [`Refname`] to the [`REF_IT_BRANCHES`] namespace
+///
+/// The [`Refname`] must be a branch, ie. start with 'refs/heads/'.
+pub struct TrackingBranch(String);
+
+impl TrackingBranch {
+ pub fn master() -> Self {
+ Self([REF_IT_BRANCHES, "master"].join("/"))
+ }
+
+ pub fn main() -> Self {
+ Self([REF_IT_BRANCHES, "main"].join("/"))
+ }
+
+ pub fn into_refname(self) -> Refname {
+ Refname::try_from(self.0).unwrap()
+ }
+}
+
+impl Deref for TrackingBranch {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl TryFrom<&Refname> for TrackingBranch {
+ type Error = crate::Error;
+
+ fn try_from(r: &Refname) -> Result<Self, Self::Error> {
+ match r.strip_prefix("refs/heads/") {
+ None => bail!("not a branch: {r}"),
+ Some("patches") => bail!("reserved name: {r}"),
+ Some(suf) => Ok(Self([REF_IT_BRANCHES, suf].join("/"))),
+ }
+ }
+}
diff --git a/src/patches/bundle.rs b/src/patches/bundle.rs
new file mode 100644
index 0000000..296b24a
--- /dev/null
+++ b/src/patches/bundle.rs
@@ -0,0 +1,344 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ fs::File,
+ io::{
+ self,
+ Read,
+ Seek,
+ SeekFrom,
+ },
+ iter,
+ path::{
+ Path,
+ PathBuf,
+ },
+};
+
+use anyhow::{
+ bail,
+ ensure,
+ Context,
+};
+use multipart::client::lazy::Multipart;
+use sha2::{
+ Digest,
+ Sha256,
+};
+use tempfile::NamedTempFile;
+use url::Url;
+
+use super::record::{
+ self,
+ Encryption,
+};
+use crate::{
+ bundle,
+ io::HashWriter,
+ keys::Signature,
+ Result,
+};
+
+pub struct Bundle {
+ pub(super) header: bundle::Header,
+ pub(super) path: PathBuf,
+ pub(super) info: bundle::Info,
+ pub(super) encryption: Option<Encryption>,
+ pack_start: u64,
+}
+
+impl Bundle {
+ pub fn create<P>(bundle_dir: P, repo: &git2::Repository, header: bundle::Header) -> Result<Self>
+ where
+ P: AsRef<Path>,
+ {
+ let bundle_dir = bundle_dir.as_ref();
+ std::fs::create_dir_all(bundle_dir)?;
+
+ let mut tmp = NamedTempFile::new_in(bundle_dir)?;
+ let info = bundle::create(&mut tmp, repo, &header)?;
+ let path = bundle_dir
+ .join(info.hash.to_string())
+ .with_extension(bundle::FILE_EXTENSION);
+ tmp.persist(&path)?;
+ let mut buf = Vec::new();
+ header.to_writer(&mut buf)?;
+ let pack_start = buf.len() as u64;
+
+ Ok(Self {
+ header,
+ path,
+ info,
+ encryption: None,
+ pack_start,
+ })
+ }
+
+ pub fn from_fetched(bundle: bundle::Fetched) -> Result<Self> {
+ let (path, info) = bundle.into_inner();
+ let (header, mut pack) = split(&path)?;
+ let pack_start = pack.offset;
+ let encryption = pack.encryption()?;
+
+ Ok(Self {
+ header,
+ path,
+ info,
+ encryption,
+ pack_start,
+ })
+ }
+
+ // TODO: defer computing the checksum until needed
+ pub fn from_stored<P>(bundle_dir: P, expect: bundle::Expect) -> Result<Self>
+ where
+ P: AsRef<Path>,
+ {
+ let path = bundle_dir
+ .as_ref()
+ .join(expect.hash.to_string())
+ .with_extension(bundle::FILE_EXTENSION);
+
+ let (header, mut pack) = split(&path)?;
+ let pack_start = pack.offset;
+ let encryption = pack.encryption()?;
+ drop(pack);
+ let mut file = File::open(&path)?;
+ let mut sha2 = Sha256::new();
+
+ let len = io::copy(&mut file, &mut sha2)?;
+ let hash = header.hash();
+ ensure!(expect.hash == &hash, "header hash mismatch");
+ let checksum = sha2.finalize().into();
+ if let Some(expect) = expect.checksum {
+ ensure!(expect == checksum, "claimed and actual hash differ");
+ }
+
+ let info = bundle::Info {
+ len,
+ hash,
+ checksum,
+ uris: vec![],
+ };
+
+ Ok(Self {
+ header,
+ path,
+ info,
+ encryption,
+ pack_start,
+ })
+ }
+
+ pub fn copy<R, P>(mut from: R, to: P) -> Result<Self>
+ where
+ R: Read,
+ P: AsRef<Path>,
+ {
+ std::fs::create_dir_all(&to)?;
+ let mut tmp = NamedTempFile::new_in(&to)?;
+ let mut out = HashWriter::new(Sha256::new(), &mut tmp);
+
+ let len = io::copy(&mut from, &mut out)?;
+ let checksum = out.hash().into();
+
+ let (header, mut pack) = split(tmp.path())?;
+ let hash = header.hash();
+ let pack_start = pack.offset;
+ let encryption = pack.encryption()?;
+
+ let info = bundle::Info {
+ len,
+ hash,
+ checksum,
+ uris: vec![],
+ };
+
+ let path = to
+ .as_ref()
+ .join(hash.to_string())
+ .with_extension(bundle::FILE_EXTENSION);
+ tmp.persist(&path)?;
+
+ Ok(Self {
+ header,
+ path,
+ info,
+ encryption,
+ pack_start,
+ })
+ }
+
+ pub fn encryption(&self) -> Option<Encryption> {
+ self.encryption
+ }
+
+ pub fn is_encrypted(&self) -> bool {
+ self.encryption.is_some()
+ }
+
+ pub fn reader(&self) -> Result<impl io::Read> {
+ Ok(File::open(&self.path)?)
+ }
+
+ pub fn header(&self) -> &bundle::Header {
+ &self.header
+ }
+
+ pub fn info(&self) -> &bundle::Info {
+ &self.info
+ }
+
+ pub fn packdata(&self) -> Result<Packdata> {
+ let bundle = File::open(&self.path)?;
+ Ok(Packdata {
+ offset: self.pack_start,
+ bundle,
+ })
+ }
+
+ pub fn default_location(&self) -> bundle::Location {
+ let uri = bundle::Uri::Relative(format!("/bundles/{}.bundle", self.info.hash));
+ let id = hex::encode(Sha256::digest(uri.as_str()));
+
+ bundle::Location {
+ id,
+ uri,
+ filter: None,
+ creation_token: None,
+ location: None,
+ }
+ }
+
+ pub fn bundle_list_path(&self) -> PathBuf {
+ self.path.with_extension(bundle::list::FILE_EXTENSION)
+ }
+
+ pub fn write_bundle_list<I>(&self, extra: I) -> Result<()>
+ where
+ I: IntoIterator<Item = bundle::Location>,
+ {
+ let mut blist = bundle::List::any();
+ blist.extend(
+ iter::once(self.default_location())
+ .chain(self.info.uris.iter().map(|url| {
+ let uri = bundle::Uri::Absolute(url.clone());
+ let id = hex::encode(Sha256::digest(uri.as_str()));
+
+ bundle::Location {
+ id,
+ uri,
+ filter: None,
+ creation_token: None,
+ location: None,
+ }
+ }))
+ .chain(extra),
+ );
+
+ let mut cfg = git2::Config::open(&self.bundle_list_path())?;
+ blist.to_config(&mut cfg)?;
+
+ Ok(())
+ }
+
+ pub fn sign<S>(&self, signer: &mut S) -> Result<Signature>
+ where
+ S: crate::keys::Signer,
+ {
+ Ok(signer.sign(record::Heads::from(&self.header).as_slice())?)
+ }
+
+ pub fn ipfs_add(&mut self, via: &Url) -> Result<Url> {
+ let name = format!("{}.{}", self.info.hash, bundle::FILE_EXTENSION);
+ let mut api = via.join("api/v0/add")?;
+ api.query_pairs_mut()
+ // FIXME: we may want this, but `rust-chunked-transfer` (used by
+ // `ureq`) doesn't know about trailers
+ // .append_pair("to-files", &name)
+ .append_pair("quiet", "true");
+ let mpart = Multipart::new()
+ .add_file(name, self.path.as_path())
+ .prepare()?;
+
+ #[derive(serde::Deserialize)]
+ struct Response {
+ #[serde(rename = "Hash")]
+ cid: String,
+ }
+
+ let Response { cid } = ureq::post(api.as_str())
+ .set(
+ "Content-Length",
+ &mpart
+ .content_len()
+ .expect("zero-size bundle file?")
+ .to_string(),
+ )
+ .set(
+ "Content-Type",
+ &format!("multipart/form-data; boundary={}", mpart.boundary()),
+ )
+ .send(mpart)
+ .context("posting to IPFS API")?
+ .into_json()
+ .context("parsing IPFS API response")?;
+
+ let url = Url::parse(&format!("ipfs://{cid}"))?;
+ self.info.uris.push(url.clone());
+
+ Ok(url)
+ }
+}
+
+impl From<Bundle> for bundle::Info {
+ fn from(Bundle { info, .. }: Bundle) -> Self {
+ info
+ }
+}
+
+fn split(bundle: &Path) -> Result<(bundle::Header, Packdata)> {
+ let mut bundle = File::open(bundle)?;
+ let header = bundle::Header::from_reader(&mut bundle)?;
+ let offset = bundle.stream_position()?;
+ let pack = Packdata { offset, bundle };
+ Ok((header, pack))
+}
+
+pub struct Packdata {
+ offset: u64,
+ bundle: File,
+}
+
+impl Packdata {
+ pub fn index(&mut self, odb: &git2::Odb) -> Result<()> {
+ self.bundle.seek(SeekFrom::Start(self.offset))?;
+
+ let mut pw = odb.packwriter()?;
+ io::copy(&mut self.bundle, &mut pw)?;
+ pw.commit()?;
+
+ Ok(())
+ }
+
+ pub fn encryption(&mut self) -> Result<Option<Encryption>> {
+ const PACK: &[u8] = b"PACK";
+ const AGE: &[u8] = b"age-encryption.org/v1";
+ const GPG: &[u8] = b"-----BEGIN PGP MESSAGE-----";
+
+ self.bundle.seek(SeekFrom::Start(self.offset))?;
+
+ let mut buf = [0; 32];
+ self.bundle.read_exact(&mut buf)?;
+ if buf.starts_with(PACK) {
+ Ok(None)
+ } else if buf.starts_with(AGE) {
+ Ok(Some(Encryption::Age))
+ } else if buf.starts_with(GPG) {
+ Ok(Some(Encryption::Gpg))
+ } else {
+ bail!("packdata does not appear to be in a known format")
+ }
+ }
+}
diff --git a/src/patches/error.rs b/src/patches/error.rs
new file mode 100644
index 0000000..a02ed94
--- /dev/null
+++ b/src/patches/error.rs
@@ -0,0 +1,29 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use thiserror::Error;
+
+#[derive(Debug, Error)]
+#[non_exhaustive]
+pub enum FromTree {
+ #[error("'{name}' not found in tree")]
+ NotFound { name: &'static str },
+
+ #[error("expected '{name}' to be a blob, but found {kind:?}")]
+ TypeMismatch {
+ name: &'static str,
+ kind: Option<git2::ObjectType>,
+ },
+
+ #[error("max blob size {max} exceeded: {found}")]
+ BlobSize { max: usize, found: usize },
+
+ #[error("type conversion from byte slice to T failed")]
+ TypeConversion(#[source] crate::Error),
+
+ #[error("invalid signature")]
+ InvalidSignature(#[from] signature::Error),
+
+ #[error(transparent)]
+ Git(#[from] git2::Error),
+}
diff --git a/src/patches/iter.rs b/src/patches/iter.rs
new file mode 100644
index 0000000..6023247
--- /dev/null
+++ b/src/patches/iter.rs
@@ -0,0 +1,395 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ collections::BTreeSet,
+ rc::Rc,
+ str::FromStr,
+};
+
+use anyhow::anyhow;
+use time::{
+ OffsetDateTime,
+ UtcOffset,
+};
+
+use super::{
+ notes,
+ record::{
+ Heads,
+ Record,
+ },
+ Topic,
+ GLOB_IT_TOPICS,
+ TOPIC_MERGES,
+};
+use crate::{
+ git::{
+ self,
+ Refname,
+ EMPTY_TREE,
+ },
+ iter,
+ patches::REF_IT_BUNDLES,
+ Result,
+};
+
+pub mod dropped {
+ use super::*;
+ use crate::{
+ error,
+ patches::TOPIC_SNAPSHOTS,
+ };
+
+ pub fn topics<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ ) -> impl Iterator<Item = Result<(Topic, git2::Oid)>> + 'a {
+ let topic = move |oid| -> Result<Option<(Topic, git2::Oid)>> {
+ let commit = repo.find_commit(oid)?;
+ Ok(Topic::from_commit(&commit)?.map(|topic| (topic, oid)))
+ };
+ let init = || {
+ let mut walk = repo.revwalk()?;
+ walk.push_ref(drop_ref)?;
+ Ok(walk.map(|i| i.map_err(Into::into)))
+ };
+
+ iter::Iter::new(init, Some).filter_map(move |oid| oid.and_then(topic).transpose())
+ }
+
+ pub fn topic<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ topic: &'a Topic,
+ ) -> impl Iterator<Item = Result<git2::Oid>> + 'a {
+ topics(repo, drop_ref).filter_map(move |i| {
+ i.map(|(top, oid)| (&top == topic).then_some(oid))
+ .transpose()
+ })
+ }
+
+ #[allow(unused)]
+ pub fn merges<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ ) -> impl Iterator<Item = Result<git2::Oid>> + 'a {
+ topic(repo, drop_ref, &TOPIC_MERGES)
+ }
+
+ #[allow(unused)]
+ pub fn snapshots<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ ) -> impl Iterator<Item = Result<git2::Oid>> + 'a {
+ topic(repo, drop_ref, &TOPIC_SNAPSHOTS)
+ }
+
+ pub fn records<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ ) -> impl Iterator<Item = Result<Record>> + 'a {
+ _records(repo, drop_ref, false)
+ }
+
+ pub fn records_rev<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ ) -> impl Iterator<Item = Result<Record>> + 'a {
+ _records(repo, drop_ref, true)
+ }
+
+ fn _records<'a>(
+ repo: &'a git2::Repository,
+ drop_ref: &'a str,
+ rev: bool,
+ ) -> impl Iterator<Item = Result<Record>> + 'a {
+ let record = move |oid| -> Result<Option<Record>> {
+ let commit = repo.find_commit(oid)?;
+ match Record::from_commit(repo, &commit) {
+ Ok(r) => Ok(Some(r)),
+ Err(e) => match e.downcast_ref::<error::NotFound<&str, String>>() {
+ Some(error::NotFound { what: "topic", .. }) => Ok(None),
+ _ => Err(e),
+ },
+ }
+ };
+ let init = move || {
+ let mut walk = repo.revwalk()?;
+ walk.push_ref(drop_ref)?;
+ if rev {
+ walk.set_sorting(git2::Sort::REVERSE)?;
+ }
+ Ok(walk.map(|i| i.map_err(Into::into)))
+ };
+
+ iter::Iter::new(init, Some).filter_map(move |oid| oid.and_then(record).transpose())
+ }
+}
+
+pub mod unbundled {
+ use super::*;
+
+ #[allow(unused)]
+ pub fn topics(repo: &git2::Repository) -> impl Iterator<Item = Result<Topic>> + '_ {
+ iter::Iter::new(
+ move || {
+ let refs = repo.references_glob(GLOB_IT_TOPICS.glob())?;
+ Ok(git::ReferenceNames::new(refs, Topic::from_refname))
+ },
+ Some,
+ )
+ }
+
+ pub fn topics_with_subject(
+ repo: &git2::Repository,
+ ) -> impl Iterator<Item = Result<(Topic, String)>> + '_ {
+ let topic_and_subject = move |refname: &str| -> Result<(Topic, String)> {
+ let topic = Topic::from_refname(refname)?;
+ let subject = find_subject(repo, refname)?;
+ Ok((topic, subject))
+ };
+ iter::Iter::new(
+ move || {
+ let refs = repo.references_glob(GLOB_IT_TOPICS.glob())?;
+ Ok(git::ReferenceNames::new(refs, topic_and_subject))
+ },
+ Some,
+ )
+ }
+
+ // TODO: cache this somewhere
+ fn find_subject(repo: &git2::Repository, topic_ref: &str) -> Result<String> {
+ let mut walk = repo.revwalk()?;
+ walk.push_ref(topic_ref)?;
+ walk.simplify_first_parent()?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::REVERSE)?;
+ match walk.next() {
+ None => Ok(String::default()),
+ Some(oid) => {
+ let tree = repo.find_commit(oid?)?.tree()?;
+ let note = notes::Note::from_tree(repo, &tree)?;
+ let subj = match note {
+ notes::Note::Simple(n) => n
+ .checkpoint_kind()
+ .map(|k| {
+ match k {
+ notes::CheckpointKind::Merge => "Merges",
+ notes::CheckpointKind::Snapshot => "Snapshots",
+ }
+ .to_owned()
+ })
+ .unwrap_or_else(|| n.subject().unwrap_or_default().to_owned()),
+ _ => String::default(),
+ };
+
+ Ok(subj)
+ },
+ }
+ }
+}
+
+#[derive(Eq, PartialEq, serde::Serialize)]
+pub struct Subject {
+ pub name: String,
+ pub email: String,
+}
+
+impl TryFrom<git2::Signature<'_>> for Subject {
+ type Error = std::str::Utf8Error;
+
+ fn try_from(git: git2::Signature<'_>) -> std::result::Result<Self, Self::Error> {
+ let utf8 = |bs| std::str::from_utf8(bs).map(ToOwned::to_owned);
+
+ let name = utf8(git.name_bytes())?;
+ let email = utf8(git.email_bytes())?;
+
+ Ok(Self { name, email })
+ }
+}
+
+#[derive(serde::Serialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct NoteHeader {
+ #[serde(with = "git::serde::oid")]
+ pub id: git2::Oid,
+ pub author: Subject,
+ /// `Some` iff different from `author`
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub committer: Option<Subject>,
+ /// Committer time
+ #[serde(with = "time::serde::rfc3339")]
+ pub time: OffsetDateTime,
+ pub patch: Rc<PatchInfo>,
+ #[serde(
+ with = "git::serde::oid::option",
+ skip_serializing_if = "Option::is_none"
+ )]
+ pub in_reply_to: Option<git2::Oid>,
+}
+
+#[derive(serde::Serialize)]
+pub struct PatchInfo {
+ pub id: Heads,
+ pub tips: BTreeSet<Refname>,
+}
+
+#[derive(serde::Serialize)]
+pub struct Note {
+ pub header: NoteHeader,
+ pub message: notes::Note,
+}
+
+pub fn topic<'a>(
+ repo: &'a git2::Repository,
+ topic: &'a Topic,
+) -> impl Iterator<Item = Result<Note>> + DoubleEndedIterator + 'a {
+ let init = move || {
+ let topic_ref = topic.as_refname();
+ let mut walk = repo.revwalk()?;
+ walk.push_ref(&topic_ref)?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
+
+ fn patch_id(c: &git2::Commit) -> Result<Option<Heads>> {
+ let parse = || Heads::try_from(c);
+ let is_merge = c.tree_id() == *EMPTY_TREE;
+ is_merge.then(parse).transpose()
+ }
+
+ fn patch_info(repo: &git2::Repository, id: Heads) -> Result<PatchInfo> {
+ let prefix = format!("{}/{}", REF_IT_BUNDLES, id);
+ let glob = format!("{prefix}/**");
+ let mut iter = repo.references_glob(&glob)?;
+ let tips = iter
+ .names()
+ .filter_map(|i| match i {
+ Err(e) => Some(Err(e.into())),
+ Ok(name)
+ if name
+ .strip_prefix(&prefix)
+ .expect("glob yields prefix")
+ .starts_with("/it/") =>
+ {
+ None
+ },
+ Ok(name) => Refname::from_str(name)
+ .map_err(Into::into)
+ .map(Some)
+ .transpose(),
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(PatchInfo { id, tips })
+ }
+
+ let mut patches: Vec<Rc<PatchInfo>> = Vec::new();
+ let mut commits: Vec<(git2::Tree<'a>, NoteHeader)> = Vec::new();
+
+ if let Some(tip) = walk.next() {
+ // ensure tip is a merge
+ {
+ let tip = repo.find_commit(tip?)?;
+ let id = patch_id(&tip)?.ok_or_else(|| {
+ anyhow!("invalid topic '{topic_ref}': tip must be a merge commit")
+ })?;
+ let patch = patch_info(repo, id)?;
+ patches.push(Rc::new(patch));
+ }
+
+ for id in walk {
+ let commit = repo.find_commit(id?)?;
+ match patch_id(&commit)? {
+ Some(id) => {
+ let patch = patch_info(repo, id)?;
+ patches.push(Rc::new(patch))
+ },
+ None => {
+ let id = commit.id();
+ let (author, committer) = {
+ let a = commit.author();
+ let c = commit.committer();
+
+ if a.name_bytes() != c.name_bytes()
+ && a.email_bytes() != c.email_bytes()
+ {
+ let author = Subject::try_from(a)?;
+ let committer = Subject::try_from(c).map(Some)?;
+
+ (author, committer)
+ } else {
+ (Subject::try_from(a)?, None)
+ }
+ };
+ let time = {
+ let t = commit.time();
+ let ofs = UtcOffset::from_whole_seconds(t.offset_minutes() * 60)?;
+ OffsetDateTime::from_unix_timestamp(t.seconds())?.replace_offset(ofs)
+ };
+ let tree = commit.tree()?;
+ let patch = Rc::clone(&patches[patches.len() - 1]);
+ let in_reply_to = commit.parent_ids().next();
+
+ let header = NoteHeader {
+ id,
+ author,
+ committer,
+ time,
+ patch,
+ in_reply_to,
+ };
+
+ commits.push((tree, header));
+ },
+ }
+ }
+ }
+
+ Ok(commits.into_iter().map(move |(tree, header)| {
+ notes::Note::from_tree(repo, &tree).map(|message| Note { header, message })
+ }))
+ };
+
+ iter::Iter::new(init, Some)
+}
+
+pub mod topic {
+ use crate::git::if_not_found_none;
+
+ use super::*;
+
+ pub(crate) fn default_reply_to(
+ repo: &git2::Repository,
+ topic: &Topic,
+ ) -> Result<Option<git2::Oid>> {
+ let topic_ref = topic.as_refname();
+ if if_not_found_none(repo.refname_to_id(&topic_ref))?.is_none() {
+ return Ok(None);
+ }
+
+ let mut walk = repo.revwalk()?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::REVERSE)?;
+ walk.push_ref(&topic_ref)?;
+
+ let first = walk
+ .next()
+ .expect("topic can't be empty, because {topic_ref} exists")?;
+ let mut last = first;
+ let mut seen = BTreeSet::<git2::Oid>::new();
+ for id in walk {
+ let id = id?;
+ let commit = repo.find_commit(id)?;
+ if commit.tree_id() != *EMPTY_TREE {
+ let first_parent = commit
+ .parent_ids()
+ .next()
+ .expect("commit {id} must have a parent");
+ if first_parent == first || !seen.contains(&first_parent) {
+ last = id;
+ }
+ seen.insert(id);
+ }
+ }
+
+ Ok(Some(last))
+ }
+}
diff --git a/src/patches/notes.rs b/src/patches/notes.rs
new file mode 100644
index 0000000..b85ca64
--- /dev/null
+++ b/src/patches/notes.rs
@@ -0,0 +1,181 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ cmp,
+ collections::BTreeMap,
+ convert::Infallible,
+ io,
+ ops::Range,
+};
+
+use super::{
+ error,
+ traits::{
+ Blob,
+ BlobData,
+ TreeData,
+ },
+};
+use crate::{
+ bundle::ObjectId,
+ git::Refname,
+};
+
+#[derive(serde::Serialize)]
+#[serde(untagged)]
+pub enum Note {
+ Simple(Simple),
+ Automerge(Automerge),
+}
+
+impl Note {
+ pub fn from_tree<'a>(repo: &'a git2::Repository, tree: &git2::Tree<'a>) -> crate::Result<Self> {
+ Blob::<Simple>::from_tree(repo, tree)
+ .map(|Blob { content, .. }| Self::Simple(content))
+ .or_else(|e| match e {
+ error::FromTree::NotFound { .. } => {
+ let Blob { content, .. } = Blob::<Automerge>::from_tree(repo, tree)?;
+ Ok(Self::Automerge(content))
+ },
+ x => Err(x.into()),
+ })
+ }
+}
+
+#[derive(serde::Serialize)]
+pub struct Automerge(Vec<u8>);
+
+impl BlobData for Automerge {
+ type Error = Infallible;
+
+ const MAX_BYTES: usize = 1_000_000;
+
+ fn from_blob(data: &[u8]) -> Result<Self, Self::Error> {
+ Ok(Self(data.to_vec()))
+ }
+
+ fn write_blob<W: io::Write>(&self, mut writer: W) -> io::Result<()> {
+ writer.write_all(&self.0)
+ }
+}
+
+impl TreeData for Automerge {
+ const BLOB_NAME: &'static str = "c";
+}
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+#[serde(untagged)]
+pub enum Simple {
+ Known(Predef),
+ Unknown(serde_json::Map<String, serde_json::Value>),
+}
+
+impl Simple {
+ pub fn new(message: String) -> Self {
+ Self::basic(message)
+ }
+
+ pub fn basic(message: String) -> Self {
+ Self::Known(Predef::Basic { message })
+ }
+
+ pub fn checkpoint(
+ kind: CheckpointKind,
+ refs: BTreeMap<Refname, ObjectId>,
+ message: Option<String>,
+ ) -> Self {
+ Self::Known(Predef::Checkpoint {
+ kind,
+ refs,
+ message,
+ })
+ }
+
+ pub fn from_commit(repo: &git2::Repository, commit: &git2::Commit) -> crate::Result<Self> {
+ let tree = commit.tree()?;
+ let blob = Blob::from_tree(repo, &tree)?;
+
+ Ok(blob.content)
+ }
+
+ pub fn subject(&self) -> Option<&str> {
+ match self {
+ Self::Known(k) => k.subject(),
+ _ => None,
+ }
+ }
+
+ pub fn is_checkpoint(&self) -> bool {
+ matches!(self, Self::Known(Predef::Checkpoint { .. }))
+ }
+
+ pub fn checkpoint_kind(&self) -> Option<&CheckpointKind> {
+ match self {
+ Self::Known(Predef::Checkpoint { kind, .. }) => Some(kind),
+ _ => None,
+ }
+ }
+}
+
+impl BlobData for Simple {
+ type Error = serde_json::Error;
+
+ const MAX_BYTES: usize = 1_000_000;
+
+ fn from_blob(data: &[u8]) -> Result<Self, Self::Error> {
+ serde_json::from_slice(data)
+ }
+
+ fn write_blob<W: io::Write>(&self, writer: W) -> io::Result<()> {
+ serde_json::to_writer_pretty(writer, self).map_err(Into::into)
+ }
+}
+
+impl TreeData for Simple {
+ const BLOB_NAME: &'static str = "m";
+}
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+#[serde(tag = "_type")]
+pub enum Predef {
+ #[serde(rename = "eagain.io/it/notes/basic")]
+ Basic { message: String },
+ #[serde(rename = "eagain.io/it/notes/code-comment")]
+ CodeComment { loc: SourceLoc, message: String },
+ #[serde(rename = "eagain.io/it/notes/checkpoint")]
+ Checkpoint {
+ kind: CheckpointKind,
+ refs: BTreeMap<Refname, ObjectId>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ message: Option<String>,
+ },
+}
+
+impl Predef {
+ pub fn subject(&self) -> Option<&str> {
+ let msg = match self {
+ Self::Basic { message } | Self::CodeComment { message, .. } => Some(message),
+ Self::Checkpoint { message, .. } => message.as_ref(),
+ }?;
+ let line = msg.lines().next()?;
+ let subj = &line[..cmp::min(72, line.len())];
+
+ (!subj.is_empty()).then_some(subj)
+ }
+}
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+pub struct SourceLoc {
+ #[serde(with = "crate::git::serde::oid")]
+ pub file: git2::Oid,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub line: Option<Range<usize>>,
+}
+
+#[derive(Clone, Copy, Debug, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum CheckpointKind {
+ Merge,
+ Snapshot,
+}
diff --git a/src/patches/record.rs b/src/patches/record.rs
new file mode 100644
index 0000000..6a95973
--- /dev/null
+++ b/src/patches/record.rs
@@ -0,0 +1,472 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::ops::Deref;
+use std::{
+ collections::{
+ BTreeMap,
+ BTreeSet,
+ },
+ fmt,
+ io::{
+ self,
+ BufRead,
+ },
+ path::{
+ Path,
+ PathBuf,
+ },
+ str::FromStr,
+};
+
+use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+ Context,
+};
+
+use hex::{
+ FromHex,
+ ToHex,
+};
+
+use sha2::{
+ Digest,
+ Sha256,
+};
+use signature::{
+ Signature as _,
+ Verifier,
+};
+
+use super::{
+ traits::{
+ to_tree,
+ BlobData,
+ Foldable,
+ TreeData,
+ },
+ write_sharded,
+ Blob,
+ Bundle,
+ Topic,
+ BLOB_HEADS,
+ BLOB_META,
+ HTTP_HEADER_SIGNATURE,
+ TOPIC_MERGES,
+ TOPIC_SNAPSHOTS,
+};
+use crate::{
+ bundle,
+ error::NotFound,
+ git::{
+ self,
+ Refname,
+ },
+ iter::IteratorExt,
+ metadata::{
+ self,
+ identity,
+ ContentHash,
+ },
+};
+
+#[derive(Clone, Copy, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
+pub struct Heads(#[serde(with = "hex::serde")] [u8; 32]);
+
+impl Heads {
+ const TRAILER_PREFIX: &str = "Patch:";
+
+ pub fn from_commit(commit: &git2::Commit) -> crate::Result<Option<Self>> {
+ commit.message_raw_bytes().lines().try_find_map(|line| {
+ line?
+ .strip_prefix(Self::TRAILER_PREFIX)
+ .map(|s| Self::from_str(s.trim()).map_err(crate::Error::from))
+ .transpose()
+ })
+ }
+
+ pub fn as_trailer(&self) -> String {
+ format!("{} {}", Self::TRAILER_PREFIX, self)
+ }
+}
+
+impl Deref for Heads {
+ type Target = [u8; 32];
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl AsRef<[u8]> for Heads {
+ fn as_ref(&self) -> &[u8] {
+ &self.0
+ }
+}
+
+impl From<&bundle::Header> for Heads {
+ fn from(h: &bundle::Header) -> Self {
+ let tips = h.references.values().collect::<BTreeSet<_>>();
+ let mut hasher = Sha256::new();
+ for sha in tips {
+ hasher.update(sha.as_bytes());
+ }
+ Self(hasher.finalize().into())
+ }
+}
+
+impl TryFrom<&git2::Commit<'_>> for Heads {
+ type Error = crate::Error;
+
+ fn try_from(commit: &git2::Commit) -> Result<Self, Self::Error> {
+ Self::from_commit(commit)?.ok_or_else(|| {
+ anyhow!(NotFound {
+ what: "patch trailer",
+ whence: format!("commit {}", commit.id()),
+ })
+ })
+ }
+}
+
+impl FromStr for Heads {
+ type Err = hex::FromHexError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Self::from_hex(s)
+ }
+}
+
+impl FromHex for Heads {
+ type Error = hex::FromHexError;
+
+ fn from_hex<T: AsRef<[u8]>>(hex: T) -> Result<Self, Self::Error> {
+ <[u8; 32]>::from_hex(hex).map(Self)
+ }
+}
+
+impl fmt::Display for Heads {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl fmt::Debug for Heads {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&hex::encode(self.0))
+ }
+}
+
+impl BlobData for Heads {
+ type Error = <[u8; 32] as FromHex>::Error;
+
+ const MAX_BYTES: usize = 64;
+
+ fn from_blob(data: &[u8]) -> Result<Self, Self::Error> {
+ Self::from_hex(data)
+ }
+
+ fn write_blob<W: io::Write>(&self, mut writer: W) -> io::Result<()> {
+ writer.write_all(self.encode_hex::<String>().as_bytes())
+ }
+}
+
+impl TreeData for Heads {
+ const BLOB_NAME: &'static str = BLOB_HEADS;
+}
+
+impl Foldable for Heads {
+ fn folded_name(&self) -> String {
+ self.encode_hex()
+ }
+}
+
+#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
+pub struct Signature {
+ pub signer: metadata::ContentHash,
+ pub signature: metadata::Signature,
+}
+
+impl From<Signature> for tiny_http::Header {
+ fn from(s: Signature) -> Self {
+ let value = format!(
+ "s1={}; s2={}; sd={}",
+ hex::encode(s.signer.sha1),
+ hex::encode(s.signer.sha2),
+ hex::encode(s.signature.as_ref())
+ );
+
+ Self::from_bytes(HTTP_HEADER_SIGNATURE.as_bytes(), value).unwrap()
+ }
+}
+
+impl TryFrom<&tiny_http::Header> for Signature {
+ type Error = crate::Error;
+
+ fn try_from(hdr: &tiny_http::Header) -> Result<Self, Self::Error> {
+ ensure!(
+ hdr.field.equiv(HTTP_HEADER_SIGNATURE),
+ "not a {HTTP_HEADER_SIGNATURE} header"
+ );
+
+ let mut sha1: Option<[u8; 20]> = None;
+ let mut sha2: Option<[u8; 32]> = None;
+ let mut signature = None;
+ for part in hdr.value.as_str().split(';') {
+ match part.trim().split_at(2) {
+ ("s1", val) => {
+ let bytes = <[u8; 20]>::from_hex(val)?;
+ sha1 = Some(bytes);
+ },
+ ("s2", val) => {
+ let bytes = <[u8; 32]>::from_hex(val)?;
+ sha2 = Some(bytes);
+ },
+ ("sd", val) => {
+ let bytes = hex::decode(val)?;
+ signature = Some(metadata::Signature::from_bytes(&bytes)?);
+ },
+
+ _ => continue,
+ }
+ }
+
+ let sha1 = sha1.ok_or_else(|| anyhow!("missing sha1 identity content hash"))?;
+ let sha2 = sha2.ok_or_else(|| anyhow!("missing sha2 identity content hash"))?;
+ let signature = signature.ok_or_else(|| anyhow!("missing signature bytes"))?;
+
+ Ok(Self {
+ signer: metadata::ContentHash { sha1, sha2 },
+ signature,
+ })
+ }
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+pub struct Meta {
+ pub bundle: BundleInfo,
+ pub signature: Signature,
+}
+
+impl BlobData for Meta {
+ type Error = serde_json::Error;
+
+ const MAX_BYTES: usize = 100_000;
+
+ fn from_blob(data: &[u8]) -> Result<Self, Self::Error> {
+ serde_json::from_slice(data)
+ }
+
+ fn write_blob<W: io::Write>(&self, writer: W) -> io::Result<()> {
+ serde_json::to_writer_pretty(writer, self).map_err(Into::into)
+ }
+}
+
+impl TreeData for Meta {
+ const BLOB_NAME: &'static str = BLOB_META;
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
+#[serde(rename_all = "lowercase")]
+pub enum Encryption {
+ Age,
+ Gpg,
+}
+
+impl Encryption {
+ pub fn as_str(&self) -> &str {
+ match self {
+ Self::Age => "age",
+ Self::Gpg => "gpg",
+ }
+ }
+}
+
+impl FromStr for Encryption {
+ type Err = serde_json::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ serde_json::from_str(s)
+ }
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+pub struct BundleInfo {
+ #[serde(flatten)]
+ pub info: bundle::Info,
+ pub prerequisites: BTreeSet<bundle::ObjectId>,
+ pub references: BTreeMap<Refname, bundle::ObjectId>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub encryption: Option<Encryption>,
+}
+
+impl BundleInfo {
+ pub fn as_expect(&self) -> bundle::Expect {
+ bundle::Expect::from(&self.info)
+ }
+}
+
+impl From<&Bundle> for BundleInfo {
+ fn from(bundle: &Bundle) -> Self {
+ let (prerequisites, references) = {
+ let h = bundle.header();
+ (h.prerequisites.clone(), h.references.clone())
+ };
+ Self {
+ info: bundle.info().clone(),
+ prerequisites,
+ references,
+ encryption: bundle.encryption(),
+ }
+ }
+}
+
+/// Log record of a patch submission
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+pub struct Record {
+ pub topic: Topic,
+ pub heads: Heads,
+ pub meta: Meta,
+}
+
+impl Record {
+ pub fn from_commit<'a>(
+ repo: &'a git2::Repository,
+ commit: &git2::Commit<'a>,
+ ) -> crate::Result<Self> {
+ let topic = Topic::from_commit(commit)?.ok_or_else(|| crate::error::NotFound {
+ what: "topic",
+ whence: format!("message of commit {}", commit.id()),
+ })?;
+
+ let tree = commit.tree()?;
+
+ let mut heads: Option<Heads> = None;
+ let mut meta: Option<Meta> = None;
+
+ for entry in &tree {
+ match entry.name() {
+ Some(BLOB_HEADS) => {
+ heads = Some(Blob::<Heads>::from_entry(repo, entry)?.content);
+ },
+ Some(BLOB_META) => {
+ meta = Some(Blob::<Meta>::from_entry(repo, entry)?.content);
+ },
+
+ None | Some(_) => continue,
+ }
+ }
+
+ let whence = || format!("tree {}", tree.id());
+ let heads = heads.ok_or_else(|| crate::error::NotFound {
+ what: BLOB_HEADS,
+ whence: whence(),
+ })?;
+ let meta = meta.ok_or_else(|| crate::error::NotFound {
+ what: BLOB_META,
+ whence: whence(),
+ })?;
+
+ Ok(Self { topic, heads, meta })
+ }
+
+ pub fn commit<S>(
+ &self,
+ signer: &mut S,
+ repo: &git2::Repository,
+ ids: &git2::Tree,
+ parent: Option<&git2::Commit>,
+ seen: Option<&mut git2::TreeBuilder>,
+ ) -> crate::Result<git2::Oid>
+ where
+ S: crate::keys::Signer,
+ {
+ let tree = {
+ let mut tb = repo.treebuilder(parent.map(|p| p.tree()).transpose()?.as_ref())?;
+ tb.insert("ids", ids.id(), git2::FileMode::Tree.into())?;
+ to_tree(repo, &mut tb, &self.heads)?;
+ to_tree(repo, &mut tb, &self.meta)?;
+ repo.find_tree(tb.write()?)?
+ };
+ let oid = git::commit_signed(
+ signer,
+ repo,
+ self.topic.as_trailer(),
+ &tree,
+ &parent.into_iter().collect::<Vec<_>>(),
+ )?;
+
+ if let Some(seen) = seen {
+ write_sharded(
+ repo,
+ seen,
+ &self.heads,
+ tree.get_name(Heads::BLOB_NAME)
+ .expect("heads blob written above")
+ .id(),
+ )?;
+ }
+
+ Ok(oid)
+ }
+
+ pub fn signed_part(&self) -> [u8; 32] {
+ *self.heads
+ }
+
+ pub fn verify_signature<F>(&self, mut find_id: F) -> crate::Result<()>
+ where
+ F: FnMut(&ContentHash) -> crate::Result<identity::Verified>,
+ {
+ let signed_data = self.signed_part();
+ let addr = &self.meta.signature.signer;
+ let signature = &self.meta.signature.signature;
+ let id =
+ find_id(addr).with_context(|| format!("invalid or non-existent id at {:?}", addr))?;
+ for key in id.identity().keys.values() {
+ if key.verify(&signed_data, signature).is_ok() {
+ return Ok(());
+ }
+ }
+ bail!("signature key not in id at {:?}", addr);
+ }
+
+ pub fn bundle_info(&self) -> &BundleInfo {
+ &self.meta.bundle
+ }
+
+ pub fn bundle_hash(&self) -> &bundle::Hash {
+ &self.meta.bundle.info.hash
+ }
+
+ pub fn bundle_path(&self, prefix: &Path) -> PathBuf {
+ let mut p = prefix.join(self.bundle_hash().to_string());
+ p.set_extension(bundle::FILE_EXTENSION);
+ p
+ }
+
+ pub fn is_encrypted(&self) -> bool {
+ self.meta.bundle.encryption.is_some()
+ }
+
+ pub fn is_snapshot(&self) -> bool {
+ self.topic == *TOPIC_SNAPSHOTS
+ }
+
+ pub fn is_mergepoint(&self) -> bool {
+ self.topic == *TOPIC_MERGES
+ }
+
+ /// Remove traces of a record from the given tree
+ pub(crate) fn remove_from(tree: &mut git2::TreeBuilder) -> crate::Result<()> {
+ if tree.get(Heads::BLOB_NAME)?.is_some() {
+ tree.remove(Heads::BLOB_NAME)?;
+ }
+ if tree.get(Meta::BLOB_NAME)?.is_some() {
+ tree.remove(Meta::BLOB_NAME)?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/patches/state.rs b/src/patches/state.rs
new file mode 100644
index 0000000..220971d
--- /dev/null
+++ b/src/patches/state.rs
@@ -0,0 +1,231 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ io,
+ ops::Range,
+};
+
+use anyhow::{
+ anyhow,
+ ensure,
+ Context,
+};
+use log::warn;
+
+use super::{
+ Record,
+ TrackingBranch,
+};
+use crate::{
+ git::{
+ self,
+ if_not_found_none,
+ refs::{
+ self,
+ LockedRef,
+ },
+ Refname,
+ },
+ keys::VerificationKey,
+ metadata::{
+ self,
+ git::FromGit,
+ identity,
+ },
+ Result,
+};
+
+/// Somewhat ad-hoc view of the tip of a drop
+pub struct DropHead<'a> {
+ pub tip: git2::Reference<'a>,
+ pub ids: git2::Tree<'a>,
+ pub meta: metadata::drop::Verified,
+}
+
+impl<'a> DropHead<'a> {
+ pub fn from_refname<S: AsRef<str>>(repo: &'a git2::Repository, name: S) -> crate::Result<Self> {
+ let tip = repo.find_reference(name.as_ref())?;
+ let root = tip.peel_to_tree()?;
+ let ids = root
+ .get_name("ids")
+ .ok_or_else(|| anyhow!("invalid drop: 'ids' tree not found"))?
+ .to_object(repo)?
+ .into_tree()
+ .map_err(|_| anyhow!("invalid drop: 'ids' tree is not a tree"))?;
+ let meta = metadata::Drop::from_tree(repo, &root)
+ .context("error loading drop metadata")?
+ .verified(metadata::git::find_parent(repo), |id| {
+ metadata::identity::find_in_tree(repo, &ids, id)
+ .map(|verified| verified.into_parts().1.keys)
+ .map_err(|e| io::Error::new(io::ErrorKind::Other, e))
+ })?;
+
+ Ok(Self { tip, ids, meta })
+ }
+}
+
+pub fn unbundle(
+ odb: &git2::Odb,
+ tx: &mut refs::Transaction,
+ ref_prefix: &str,
+ record: &Record,
+) -> Result<Vec<(Refname, git2::Oid)>> {
+ let reflog = format!("it: storing head from {}", record.bundle_hash());
+
+ let mut updated = Vec::with_capacity(record.meta.bundle.references.len());
+ for (name, oid) in &record.meta.bundle.references {
+ let oid = git2::Oid::try_from(oid)?;
+ ensure!(odb.exists(oid), "ref not actually in bundle: {oid} {name}");
+
+ let by_heads = unbundled_ref(ref_prefix, record, name)?;
+ tx.lock_ref(by_heads.clone())?
+ .set_target(oid, reflog.clone());
+ updated.push((by_heads, oid));
+ }
+
+ Ok(updated)
+}
+
+pub fn unbundled_ref(prefix: &str, record: &Record, name: &Refname) -> Result<Refname> {
+ format!(
+ "{}/{}/{}",
+ prefix.trim_matches('/'),
+ record.heads,
+ name.trim_start_matches("refs/")
+ )
+ .try_into()
+ .map_err(Into::into)
+}
+
+pub fn merge_notes(
+ repo: &git2::Repository,
+ submitter: &identity::Verified,
+ topics_ref: &LockedRef,
+ record: &Record,
+) -> Result<()> {
+ let theirs: git2::Oid = record
+ .meta
+ .bundle
+ .references
+ .get(topics_ref.name())
+ .ok_or_else(|| anyhow!("invalid record: missing '{topics_ref}'"))?
+ .try_into()?;
+
+ let tree = git::empty_tree(repo)?;
+ let usr = repo.signature()?;
+ let theirs_commit = repo.find_commit(theirs)?;
+ match if_not_found_none(repo.find_reference(topics_ref.name()))? {
+ None => {
+ let msg = format!(
+ "Create topic from '{theirs}'\n\n{}",
+ record.heads.as_trailer()
+ );
+ let oid = repo.commit(None, &usr, &usr, &msg, &tree, &[&theirs_commit])?;
+ topics_ref.set_target(oid, "it: create topic");
+ },
+ Some(ours_ref) => {
+ let ours_commit = ours_ref.peel_to_commit()?;
+ let ours = ours_commit.id();
+
+ ensure!(ours != theirs, "illegal state: theirs equals ours ({ours})");
+
+ let base = repo
+ .merge_base(ours, theirs)
+ .with_context(|| format!("{topics_ref}: {theirs} diverges from {ours}"))?;
+ let theirs_commit = repo.find_commit(theirs)?;
+
+ verify_commit_range(repo, submitter, theirs_commit.id()..base)?;
+
+ let msg = format!(
+ "Merge '{theirs}' into {}\n\n{}",
+ record.topic,
+ record.heads.as_trailer()
+ );
+ let oid = repo.commit(
+ None,
+ &usr,
+ &usr,
+ &msg,
+ &tree,
+ &[&ours_commit, &theirs_commit],
+ )?;
+ let reflog = format!("it: auto-merge from {theirs}");
+ topics_ref.set_target(oid, reflog);
+ },
+ }
+
+ Ok(())
+}
+
+pub fn update_branches(
+ repo: &git2::Repository,
+ tx: &mut refs::Transaction,
+ submitter: &identity::Verified,
+ meta: &metadata::drop::Verified,
+ record: &Record,
+) -> Result<()> {
+ let branches = meta
+ .roles
+ .branches
+ .iter()
+ .filter_map(|(name, role)| role.role.ids.contains(submitter.id()).then_some(name));
+ for branch in branches {
+ let sandboxed = match TrackingBranch::try_from(branch) {
+ Ok(tracking) => tracking.into_refname(),
+ Err(e) => {
+ warn!("Skipping invalid branch {branch}: {e}");
+ continue;
+ },
+ };
+
+ if let Some(target) = record.meta.bundle.references.get(branch) {
+ let target = git2::Oid::try_from(target)?;
+ let locked = tx.lock_ref(sandboxed.clone())?;
+ let reflog = format!(
+ "it: update tip from {} by {}",
+ record.bundle_hash(),
+ submitter.id()
+ );
+ match if_not_found_none(repo.refname_to_id(&sandboxed))? {
+ Some(ours) => {
+ ensure!(
+ repo.graph_descendant_of(target, ours)?,
+ "checkpoint branch {branch} diverges from previously recorded tip {target}"
+ );
+ locked.set_target(target, reflog);
+ },
+ None => locked.set_target(target, reflog),
+ }
+
+ if repo.is_bare() {
+ tx.lock_ref(branch.clone())?
+ .set_symbolic_target(sandboxed, "it: symref auto-updated branch".to_owned());
+ }
+ }
+ }
+
+ Ok(())
+}
+
+fn verify_commit_range(
+ repo: &git2::Repository,
+ allowed: &identity::Verified,
+ Range { start, end }: Range<git2::Oid>,
+) -> Result<()> {
+ let mut walk = repo.revwalk()?;
+ walk.push(start)?;
+ walk.hide(end)?;
+ walk.simplify_first_parent()?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
+ for id in walk {
+ let pk = git::verify_commit_signature(repo, &id?)?;
+ let keyid = VerificationKey::from(pk).keyid();
+ ensure!(
+ allowed.identity().keys.contains_key(&keyid),
+ "good signature by unknown signer"
+ );
+ }
+
+ Ok(())
+}
diff --git a/src/patches/submit.rs b/src/patches/submit.rs
new file mode 100644
index 0000000..bca428b
--- /dev/null
+++ b/src/patches/submit.rs
@@ -0,0 +1,574 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ path::{
+ Path,
+ PathBuf,
+ },
+ str::FromStr,
+};
+
+use anyhow::{
+ anyhow,
+ bail,
+ ensure,
+ Context,
+};
+use globset::{
+ Glob,
+ GlobBuilder,
+ GlobSet,
+ GlobSetBuilder,
+};
+use log::info;
+use once_cell::sync::Lazy;
+use thiserror::Error;
+use tiny_http::Request;
+use url::Url;
+
+use super::{
+ bundle::Bundle,
+ record::{
+ self,
+ Heads,
+ Signature,
+ },
+ state,
+ Record,
+ Seen,
+ Topic,
+ HTTP_HEADER_SIGNATURE,
+ MAX_LEN_BUNDLE,
+ REF_IT_BUNDLES,
+ REF_IT_TOPICS,
+ TOPIC_MERGES,
+};
+use crate::{
+ bundle,
+ git::{
+ self,
+ if_not_found_none,
+ refs,
+ },
+ metadata::{
+ self,
+ git::{
+ FromGit,
+ GitMeta,
+ META_FILE_ID,
+ },
+ identity,
+ ContentHash,
+ Signed,
+ Verified,
+ },
+ Result,
+};
+
+pub static GLOB_HEADS: Lazy<Glob> = Lazy::new(|| Glob::new("refs/heads/**").unwrap());
+pub static GLOB_TAGS: Lazy<Glob> = Lazy::new(|| Glob::new("refs/tags/**").unwrap());
+pub static GLOB_NOTES: Lazy<Glob> = Lazy::new(|| Glob::new("refs/notes/**").unwrap());
+
+pub static GLOB_IT_TOPICS: Lazy<Glob> = Lazy::new(|| {
+ GlobBuilder::new(&format!("{}/*", REF_IT_TOPICS))
+ .literal_separator(true)
+ .build()
+ .unwrap()
+});
+pub static GLOB_IT_IDS: Lazy<Glob> = Lazy::new(|| {
+ GlobBuilder::new("refs/it/ids/*")
+ .literal_separator(true)
+ .build()
+ .unwrap()
+});
+pub static GLOB_IT_BUNDLES: Lazy<Glob> =
+ Lazy::new(|| Glob::new(&format!("{}/**", REF_IT_BUNDLES)).unwrap());
+
+pub static ALLOWED_REFS: Lazy<GlobSet> = Lazy::new(|| {
+ GlobSetBuilder::new()
+ .add(GLOB_HEADS.clone())
+ .add(GLOB_TAGS.clone())
+ .add(GLOB_NOTES.clone())
+ .add(GLOB_IT_TOPICS.clone())
+ .add(GLOB_IT_IDS.clone())
+ .build()
+ .unwrap()
+});
+
+pub struct AcceptArgs<'a, S> {
+ /// The prefix under which to store the refs contained in the bundle
+ pub unbundle_prefix: &'a str,
+ /// The refname of the drop history
+ pub drop_ref: &'a str,
+ /// The refname anchoring the seen objects tree
+ pub seen_ref: &'a str,
+ /// The repo to operate on
+ pub repo: &'a git2::Repository,
+ /// The signer for the drop history
+ pub signer: &'a mut S,
+ /// IPFS API address
+ pub ipfs_api: Option<&'a Url>,
+ /// Options
+ pub options: AcceptOptions,
+}
+
+pub struct AcceptOptions {
+ /// Allow bundles to convey "fat" packs, ie. packs which do not have any
+ /// prerequisites
+ ///
+ /// Default: false
+ pub allow_fat_pack: bool,
+ /// Allow encrypted bundles
+ ///
+ /// Default: false
+ pub allow_encrypted: bool,
+ /// Allowed ref name patterns
+ ///
+ /// Default:
+ ///
+ /// - refs/heads/**
+ /// - refs/tags/**
+ /// - refs/notes/**
+ /// - refs/it/topics/*
+ /// - refs/it/ids/*
+ pub allowed_refs: GlobSet,
+ /// Maximum number of branches the bundle is allowed to carry
+ ///
+ /// A branch is a ref which starts with `refs/heads/`.
+ ///
+ /// Default: 1
+ pub max_branches: usize,
+ /// Maximum number of tags the bundle is allowed to carry
+ ///
+ /// A tag is a ref which starts with `refs/tags/`.
+ ///
+ /// Default: 1
+ pub max_tags: usize,
+ /// Maximum number of git notes refs the bundle is allowed to carry
+ ///
+ /// A notes ref is a ref which starts with `refs/notes/`.
+ ///
+ /// Default: 1
+ pub max_notes: usize,
+ /// Maximum number of refs in the bundle, considering all refs
+ ///
+ /// Default: 10,
+ pub max_refs: usize,
+ /// Maximum number of commits a bundle ref can have
+ ///
+ /// Default: 20
+ pub max_commits: usize,
+}
+
+impl Default for AcceptOptions {
+ fn default() -> Self {
+ Self {
+ allow_fat_pack: false,
+ allow_encrypted: false,
+ allowed_refs: ALLOWED_REFS.clone(),
+ max_branches: 1,
+ max_tags: 1,
+ max_notes: 1,
+ max_refs: 10,
+ max_commits: 20,
+ }
+ }
+}
+
+pub struct Submission {
+ pub signature: Signature,
+ pub bundle: Bundle,
+}
+
+impl Submission {
+ pub fn from_http<P>(bundle_dir: P, req: &mut Request) -> Result<Self>
+ where
+ P: AsRef<Path>,
+ {
+ let len = req
+ .body_length()
+ .ok_or_else(|| anyhow!("chunked body not permitted"))?;
+ ensure!(
+ len <= MAX_LEN_BUNDLE,
+ "submitted patch bundle exceeds {MAX_LEN_BUNDLE}",
+ );
+
+ let mut signature = None;
+
+ for hdr in req.headers() {
+ if hdr.field.equiv(HTTP_HEADER_SIGNATURE) {
+ let sig = Signature::try_from(hdr)?;
+ signature = Some(sig);
+ break;
+ }
+ }
+
+ #[derive(Debug, Error)]
+ #[error("missing header {0}")]
+ struct Missing(&'static str);
+
+ let signature = signature.ok_or(Missing(HTTP_HEADER_SIGNATURE))?;
+ let bundle = Bundle::copy(req.as_reader(), bundle_dir)?;
+
+ Ok(Self { signature, bundle })
+ }
+
+ pub fn submit(self, mut base_url: Url) -> Result<Record> {
+ base_url
+ .path_segments_mut()
+ .map_err(|()| anyhow!("invalid url"))?
+ .push("patches");
+ let tiny_http::Header {
+ field: sig_hdr,
+ value: sig,
+ } = self.signature.into();
+ let req = ureq::request_url("POST", &base_url)
+ .set("Content-Length", &self.bundle.info.len.to_string())
+ .set(sig_hdr.as_str().as_str(), sig.as_str());
+ let res = req.send(self.bundle.reader()?)?;
+
+ Ok(res.into_json()?)
+ }
+
+ pub fn try_accept<S>(
+ &mut self,
+ AcceptArgs {
+ unbundle_prefix,
+ drop_ref,
+ seen_ref,
+ repo,
+ signer,
+ ipfs_api,
+ options,
+ }: AcceptArgs<S>,
+ ) -> Result<Record>
+ where
+ S: crate::keys::Signer,
+ {
+ ensure!(
+ unbundle_prefix.starts_with("refs/"),
+ "prefix must start with 'refs/'"
+ );
+ ensure!(
+ !self.bundle.is_encrypted() || options.allow_encrypted,
+ "encrypted bundle rejected"
+ );
+
+ let header = &self.bundle.header;
+
+ ensure!(
+ matches!(header.object_format, bundle::ObjectFormat::Sha1),
+ "object-format {} not (yet) supported",
+ header.object_format
+ );
+ ensure!(
+ !header.prerequisites.is_empty() || options.allow_fat_pack,
+ "thin pack required"
+ );
+ ensure!(
+ header.references.len() <= options.max_refs,
+ "max number of refs exceeded"
+ );
+ let topic = {
+ let mut topic: Option<Topic> = None;
+
+ let mut heads = 0;
+ let mut tags = 0;
+ let mut notes = 0;
+ static GIT_IT: Lazy<GlobSet> = Lazy::new(|| {
+ GlobSetBuilder::new()
+ .add(GLOB_HEADS.clone())
+ .add(GLOB_TAGS.clone())
+ .add(GLOB_NOTES.clone())
+ .add(GLOB_IT_TOPICS.clone())
+ .build()
+ .unwrap()
+ });
+ let mut matches = Vec::with_capacity(1);
+ for r in header.references.keys() {
+ let cand = globset::Candidate::new(r);
+ ensure!(
+ options.allowed_refs.is_match_candidate(&cand),
+ "unconventional ref rejected: {r}"
+ );
+ GIT_IT.matches_candidate_into(&cand, &mut matches);
+ match &matches[..] {
+ [] => {},
+ [0] => heads += 1,
+ [1] => tags += 1,
+ [2] => notes += 1,
+ [3] => {
+ ensure!(topic.is_none(), "more than one topic");
+ match r.split('/').next_back() {
+ None => bail!("invalid notes '{r}': missing topic"),
+ Some(s) => {
+ let t = Topic::from_str(s).context("invalid topic")?;
+ topic = Some(t);
+ },
+ }
+ },
+ x => unreachable!("impossible match: {x:?}"),
+ }
+ }
+ ensure!(
+ heads <= options.max_branches,
+ "max number of git branches exceeded"
+ );
+ ensure!(tags <= options.max_tags, "max number of git tags exceeded");
+ ensure!(
+ notes <= options.max_notes,
+ "max number of git notes exceeded"
+ );
+
+ topic.ok_or_else(|| anyhow!("missing '{}'", GLOB_IT_TOPICS.glob()))?
+ };
+ let heads = Heads::from(header);
+
+ let mut tx = refs::Transaction::new(repo)?;
+ let seen_ref = tx.lock_ref(seen_ref.parse()?)?;
+ let seen_tree = match if_not_found_none(repo.find_reference(seen_ref.name()))? {
+ Some(seen) => seen.peel_to_tree()?,
+ None => git::empty_tree(repo)?,
+ };
+ ensure!(!heads.in_tree(&seen_tree)?, "submission already exists");
+
+ // In a bare drop, indexing the pack is enough to detect missing
+ // prerequisites (ie. delta bases). Otherwise, or if the bundle is
+ // encrypted, we need to look for merge bases from the previously
+ // accepted patches.
+ if !repo.is_bare() || self.bundle.is_encrypted() {
+ let mut prereqs = header
+ .prerequisites
+ .iter()
+ .map(git2::Oid::try_from)
+ .collect::<std::result::Result<Vec<_>, _>>()?;
+
+ for r in repo.references_glob(GLOB_IT_BUNDLES.glob())? {
+ let commit = r?.peel_to_commit()?.id();
+ for (i, id) in prereqs.clone().into_iter().enumerate() {
+ if if_not_found_none(repo.merge_base(commit, id))?.is_some() {
+ prereqs.swap_remove(i);
+ }
+ }
+ if prereqs.is_empty() {
+ break;
+ }
+ }
+
+ ensure!(
+ prereqs.is_empty(),
+ "prerequisite commits not found, try checkpointing a branch or \
+ base the patch on a previous one: {}",
+ prereqs
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(", ")
+ );
+ }
+
+ let odb = repo.odb()?;
+ if !self.bundle.is_encrypted() {
+ let mut pack = self.bundle.packdata()?;
+ pack.index(&odb)?;
+
+ let prereqs = header
+ .prerequisites
+ .iter()
+ .map(git2::Oid::try_from)
+ .collect::<std::result::Result<Vec<_>, _>>()?;
+ let mut walk = repo.revwalk()?;
+ for (name, oid) in &header.references {
+ walk.push(oid.try_into()?)?;
+ for hide in &prereqs {
+ walk.hide(*hide)?;
+ }
+ let mut cnt = 0;
+ for x in &mut walk {
+ let _ = x?;
+ cnt += 1;
+ ensure!(
+ cnt <= options.max_commits,
+ "{name} exceeds configured max number of commits ({})",
+ options.max_commits
+ );
+ }
+ walk.reset()?;
+ }
+ }
+
+ if let Some(url) = ipfs_api {
+ let ipfs = self.bundle.ipfs_add(url)?;
+ info!("Published bundle to IPFS as {ipfs}");
+ }
+
+ let record = Record {
+ topic,
+ heads,
+ meta: record::Meta {
+ bundle: record::BundleInfo::from(&self.bundle),
+ signature: self.signature.clone(),
+ },
+ };
+
+ let drop_ref = tx.lock_ref(drop_ref.parse()?)?;
+ let mut drop = state::DropHead::from_refname(repo, drop_ref.name())?;
+ ensure!(
+ drop.meta.roles.snapshot.threshold.get() == 1,
+ "threshold signatures for drop snapshots not yet supported"
+ );
+ ensure!(
+ is_signer_eligible(signer, repo, &drop.ids, &drop.meta)?,
+ "supplied signer does not have the 'snapshot' role needed to record patches"
+ );
+
+ let submitter = {
+ let mut id = Identity::find(repo, &drop.ids, &self.signature.signer)?;
+ id.verify_signature(&record.signed_part(), &self.signature)?;
+ if let Some(updated) = id.update(repo, &drop.ids)? {
+ drop.ids = updated;
+ }
+ id.verified
+ };
+
+ let mut seen = repo.treebuilder(Some(&seen_tree))?;
+ let new_head = record.commit(
+ signer,
+ repo,
+ &drop.ids,
+ Some(&drop.tip.peel_to_commit()?),
+ Some(&mut seen),
+ )?;
+ drop_ref.set_target(new_head, format!("commit: {}", record.topic));
+ seen_ref.set_target(seen.write()?, format!("it: update to record {}", new_head));
+
+ if !self.bundle.is_encrypted() {
+ state::unbundle(&odb, &mut tx, unbundle_prefix, &record)?;
+ let topic_ref = tx.lock_ref(record.topic.as_refname())?;
+ state::merge_notes(repo, &submitter, &topic_ref, &record)?;
+ if record.topic == *TOPIC_MERGES {
+ state::update_branches(repo, &mut tx, &submitter, &drop.meta, &record)?;
+ }
+ }
+
+ tx.commit()?;
+
+ Ok(record)
+ }
+}
+
+fn is_signer_eligible<S>(
+ signer: &S,
+ repo: &git2::Repository,
+ ids: &git2::Tree,
+ meta: &Verified<metadata::Drop>,
+) -> Result<bool>
+where
+ S: crate::keys::Signer,
+{
+ let signer_id = metadata::KeyId::from(signer.ident());
+ for id in &meta.roles.snapshot.ids {
+ let s = metadata::identity::find_in_tree(repo, ids, id)?;
+ if s.identity().keys.contains_key(&signer_id) {
+ return Ok(true);
+ }
+ }
+
+ Ok(false)
+}
+
+struct Identity {
+ verified: identity::Verified,
+ to_update: Option<Signed<metadata::Identity>>,
+}
+
+impl Identity {
+ fn find(repo: &git2::Repository, ids: &git2::Tree, hash: &ContentHash) -> Result<Self> {
+ let find_parent = metadata::git::find_parent(repo);
+
+ let (theirs_hash, theirs_signed, theirs) = metadata::Identity::from_content_hash(
+ repo, hash,
+ )
+ .and_then(|GitMeta { hash, signed }| {
+ let signed_dup = signed.clone();
+ let verified = signed.verified(&find_parent)?;
+ Ok((hash, signed_dup, verified))
+ })?;
+
+ let tree_path = PathBuf::from(theirs.id().to_string()).join(META_FILE_ID);
+ let newer = match if_not_found_none(ids.get_path(&tree_path))? {
+ None => Self {
+ verified: theirs,
+ to_update: Some(theirs_signed),
+ },
+ Some(in_tree) if theirs_hash == in_tree.id() => Self {
+ verified: theirs,
+ to_update: None,
+ },
+ Some(in_tree) => {
+ let (ours_hash, ours) = metadata::Identity::from_blob(
+ &repo.find_blob(in_tree.id())?,
+ )
+ .and_then(|GitMeta { hash, signed }| {
+ let ours = signed.verified(&find_parent)?;
+ Ok((hash, ours))
+ })?;
+
+ if ours.identity().has_ancestor(&theirs_hash, &find_parent)? {
+ Self {
+ verified: ours,
+ to_update: None,
+ }
+ } else if theirs.identity().has_ancestor(&ours_hash, &find_parent)? {
+ Self {
+ verified: theirs,
+ to_update: Some(theirs_signed),
+ }
+ } else {
+ bail!(
+ "provided signer id at {} diverges from known id at {}",
+ theirs_hash,
+ ours_hash,
+ );
+ }
+ },
+ };
+
+ Ok(newer)
+ }
+
+ fn verify_signature(&self, msg: &[u8], sig: &Signature) -> Result<()> {
+ ensure!(
+ self.verified.did_sign(msg, &sig.signature),
+ "signature not valid for current keys in id {}, provided signer at {}",
+ self.verified.id(),
+ sig.signer
+ );
+ Ok(())
+ }
+
+ fn update<'a>(
+ &mut self,
+ repo: &'a git2::Repository,
+ root: &git2::Tree,
+ ) -> Result<Option<git2::Tree<'a>>> {
+ if let Some(meta) = self.to_update.take() {
+ let mut new_root = repo.treebuilder(Some(root))?;
+ let mut id_tree = repo.treebuilder(None)?;
+ metadata::identity::fold_to_tree(repo, &mut id_tree, meta)?;
+ new_root.insert(
+ self.verified.id().to_string().as_str(),
+ id_tree.write()?,
+ git2::FileMode::Tree.into(),
+ )?;
+
+ let oid = new_root.write()?;
+ let tree = repo.find_tree(oid).map(Some)?;
+
+ return Ok(tree);
+ }
+
+ Ok(None)
+ }
+}
diff --git a/src/patches/traits.rs b/src/patches/traits.rs
new file mode 100644
index 0000000..ef9ae61
--- /dev/null
+++ b/src/patches/traits.rs
@@ -0,0 +1,165 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ io,
+ path::{
+ Path,
+ PathBuf,
+ },
+};
+
+use super::error;
+use crate::git::{
+ self,
+ if_not_found_none,
+};
+
+pub trait BlobData: Sized {
+ type Error;
+
+ const MAX_BYTES: usize;
+
+ fn from_blob(data: &[u8]) -> Result<Self, Self::Error>;
+ fn write_blob<W: io::Write>(&self, writer: W) -> io::Result<()>;
+}
+
+pub trait TreeData: BlobData {
+ const BLOB_NAME: &'static str;
+}
+
+pub struct Blob<T> {
+ pub oid: git2::Oid,
+ pub content: T,
+}
+
+impl<T> Blob<T>
+where
+ T: TreeData,
+ T::Error: Into<crate::Error>,
+{
+ pub fn from_tree<'a>(
+ repo: &'a git2::Repository,
+ tree: &git2::Tree<'a>,
+ ) -> Result<Blob<T>, error::FromTree> {
+ use error::FromTree::NotFound;
+
+ let entry = tree
+ .get_name(T::BLOB_NAME)
+ .ok_or(NotFound { name: T::BLOB_NAME })?;
+ Self::from_entry(repo, entry)
+ }
+
+ pub fn from_entry<'a>(
+ repo: &'a git2::Repository,
+ entry: git2::TreeEntry<'a>,
+ ) -> Result<Self, error::FromTree> {
+ use error::FromTree::{
+ BlobSize,
+ TypeConversion,
+ TypeMismatch,
+ };
+
+ let blob = entry
+ .to_object(repo)?
+ .into_blob()
+ .map_err(|obj| TypeMismatch {
+ name: T::BLOB_NAME,
+ kind: obj.kind(),
+ })?;
+ let sz = blob.size();
+ if sz > T::MAX_BYTES {
+ return Err(BlobSize {
+ max: T::MAX_BYTES,
+ found: sz,
+ });
+ }
+ let content = T::from_blob(blob.content())
+ .map_err(Into::into)
+ .map_err(TypeConversion)?;
+
+ Ok(Self {
+ oid: entry.id(),
+ content,
+ })
+ }
+}
+
+pub trait Foldable {
+ fn folded_name(&self) -> String;
+}
+
+pub trait Seen {
+ fn in_odb(&self, odb: &git2::Odb) -> git::Result<bool>;
+ fn in_tree(&self, tree: &git2::Tree) -> git::Result<bool>;
+}
+
+impl<T> Seen for T
+where
+ T: BlobData + Foldable,
+{
+ fn in_odb(&self, odb: &git2::Odb) -> git::Result<bool> {
+ let hash = blob_hash(self)?;
+ Ok(odb.exists(hash))
+ }
+
+ fn in_tree(&self, tree: &git2::Tree) -> git::Result<bool> {
+ let path = shard_path(&self.folded_name());
+ Ok(if_not_found_none(tree.get_path(&path))?.is_some())
+ }
+}
+
+pub fn to_tree<T: TreeData>(
+ repo: &git2::Repository,
+ tree: &mut git2::TreeBuilder,
+ data: &T,
+) -> git::Result<()> {
+ tree.insert(
+ T::BLOB_NAME,
+ to_blob(repo, data)?,
+ git2::FileMode::Blob.into(),
+ )?;
+ Ok(())
+}
+
+pub fn to_blob<T: BlobData>(repo: &git2::Repository, data: &T) -> git::Result<git2::Oid> {
+ let mut writer = repo.blob_writer(None)?;
+ data.write_blob(&mut writer).map_err(|e| {
+ git2::Error::new(
+ git2::ErrorCode::GenericError,
+ git2::ErrorClass::Object,
+ e.to_string(),
+ )
+ })?;
+ writer.commit()
+}
+
+pub fn blob_hash<T: BlobData>(data: &T) -> git::Result<git2::Oid> {
+ let mut buf = Vec::new();
+ data.write_blob(&mut buf).unwrap();
+ git::blob_hash(&buf)
+}
+
+pub fn write_sharded<F: Foldable>(
+ repo: &git2::Repository,
+ root: &mut git2::TreeBuilder,
+ item: &F,
+ blob: git2::Oid,
+) -> git::Result<()> {
+ let name = item.folded_name();
+ let (pre, suf) = name.split_at(2);
+ let shard = root
+ .get(pre)?
+ .map(|entry| entry.to_object(repo))
+ .transpose()?;
+ let mut sub = repo.treebuilder(shard.as_ref().and_then(git2::Object::as_tree))?;
+ sub.insert(suf, blob, git2::FileMode::Blob.into())?;
+ root.insert(pre, sub.write()?, git2::FileMode::Tree.into())?;
+
+ Ok(())
+}
+
+pub fn shard_path(name: &str) -> PathBuf {
+ let (pre, suf) = name.split_at(2);
+ Path::new(pre).join(suf)
+}
diff --git a/src/serde.rs b/src/serde.rs
new file mode 100644
index 0000000..cbbf6a9
--- /dev/null
+++ b/src/serde.rs
@@ -0,0 +1,28 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub mod display {
+ use std::{
+ fmt,
+ str::FromStr,
+ };
+
+ pub fn serialize<T, S>(v: &T, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ T: ToString,
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(&v.to_string())
+ }
+
+ #[allow(unused)]
+ pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
+ where
+ T: FromStr,
+ T::Err: fmt::Display,
+ D: serde::Deserializer<'de>,
+ {
+ let s: &str = serde::Deserialize::deserialize(deserializer)?;
+ s.parse().map_err(serde::de::Error::custom)
+ }
+}
diff --git a/src/ssh.rs b/src/ssh.rs
new file mode 100644
index 0000000..3019d45
--- /dev/null
+++ b/src/ssh.rs
@@ -0,0 +1,5 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+pub mod agent;
+pub use ssh_key::*;
diff --git a/src/ssh/agent.rs b/src/ssh/agent.rs
new file mode 100644
index 0000000..c29ad62
--- /dev/null
+++ b/src/ssh/agent.rs
@@ -0,0 +1,279 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use std::{
+ env,
+ io::{
+ self,
+ ErrorKind::*,
+ },
+};
+
+use anyhow::Context;
+use ssh_encoding::{
+ CheckedSum,
+ Decode,
+ Encode,
+ Reader,
+ Writer,
+};
+use ssh_key::{
+ public::KeyData,
+ Algorithm,
+ HashAlg,
+ PublicKey,
+ Signature,
+};
+
+#[cfg(unix)]
+pub use std::os::unix::net::UnixStream;
+#[cfg(windows)]
+pub use uds_windows::UnixStram;
+
+const SSH_AUTH_SOCK: &str = "SSH_AUTH_SOCK";
+
+const MAX_AGENT_REPLY_LEN: usize = 256 * 1024;
+
+const SSH_AGENTC_REQUEST_IDENTITIES: u8 = 11;
+const SSH_AGENTC_SIGN_REQUEST: u8 = 13;
+const SSH_AGENT_FAILURE: u8 = 5;
+const SSH_AGENT_IDENTITIES_ANSWER: u8 = 12;
+const SSH_AGENT_RSA_SHA2_256: u32 = 2;
+const SSH_AGENT_RSA_SHA2_512: u32 = 4;
+const SSH_AGENT_SIGN_RESPONSE: u8 = 14;
+
+pub struct Client<T> {
+ conn: T,
+}
+
+impl Client<UnixStream> {
+ pub fn from_env() -> io::Result<Self> {
+ let path = env::var_os(SSH_AUTH_SOCK).ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::AddrNotAvailable,
+ "SSH_AUTH_SOCK environment variable not set",
+ )
+ })?;
+ UnixStream::connect(path).map(Self::from)
+ }
+}
+
+impl From<UnixStream> for Client<UnixStream> {
+ fn from(conn: UnixStream) -> Self {
+ Self { conn }
+ }
+}
+
+impl<'a> From<&'a UnixStream> for Client<&'a UnixStream> {
+ fn from(conn: &'a UnixStream) -> Self {
+ Self { conn }
+ }
+}
+
+impl<T> Client<T>
+where
+ T: io::Read + io::Write,
+{
+ pub fn sign(&mut self, key: &PublicKey, msg: impl AsRef<[u8]>) -> io::Result<Signature> {
+ request(
+ &mut self.conn,
+ SignRequest {
+ key,
+ msg: msg.as_ref(),
+ },
+ )
+ .map(|SignResponse { sig }| sig)
+ }
+
+ pub fn list_keys(&mut self) -> io::Result<Vec<PublicKey>> {
+ request(&mut self.conn, RequestIdentities).map(|IdentitiesAnswer { keys }| keys)
+ }
+}
+
+trait Request: Encode<Error = crate::Error> {
+ type Response: Response;
+}
+
+trait Response: Decode<Error = crate::Error> {
+ const SUCCESS: u8;
+}
+
+fn request<I, T>(mut io: I, req: T) -> io::Result<T::Response>
+where
+ I: io::Read + io::Write,
+ T: Request,
+{
+ send(&mut io, req)?;
+ let resp = recv(&mut io)?;
+ let mut reader = resp.as_slice();
+ match u8::decode(&mut reader).map_err(|_| unknown_response())? {
+ x if x == T::Response::SUCCESS => T::Response::decode(&mut reader).map_err(decode),
+ SSH_AGENT_FAILURE => Err(agent_error()),
+ _ => Err(unknown_response()),
+ }
+}
+
+fn send<W, T>(mut io: W, req: T) -> io::Result<()>
+where
+ W: io::Write,
+ T: Encode<Error = crate::Error>,
+{
+ let len = req.encoded_len_prefixed().map_err(encode)?;
+ let mut buf = Vec::with_capacity(len);
+ req.encode_prefixed(&mut buf).map_err(encode)?;
+
+ io.write_all(&buf)?;
+ io.flush()?;
+
+ Ok(())
+}
+
+fn recv<R: io::Read>(mut io: R) -> io::Result<Vec<u8>> {
+ let want = {
+ let mut buf = [0; 4];
+ io.read_exact(&mut buf)?;
+ u32::from_be_bytes(buf) as usize
+ };
+
+ if want < 1 {
+ return Err(incomplete_response());
+ }
+ if want > MAX_AGENT_REPLY_LEN {
+ return Err(reponse_too_large());
+ }
+
+ let mut buf = vec![0; want];
+ io.read_exact(&mut buf)?;
+
+ Ok(buf)
+}
+
+struct SignRequest<'a> {
+ key: &'a PublicKey,
+ msg: &'a [u8],
+}
+
+impl Request for SignRequest<'_> {
+ type Response = SignResponse;
+}
+
+impl Encode for SignRequest<'_> {
+ type Error = crate::Error;
+
+ fn encoded_len(&self) -> Result<usize, Self::Error> {
+ Ok([
+ self.key.key_data().encoded_len_prefixed()?,
+ self.msg.encoded_len()?,
+ SSH_AGENTC_SIGN_REQUEST.encoded_len()?,
+ 4, // flags
+ ]
+ .checked_sum()?)
+ }
+
+ fn encode(&self, writer: &mut impl Writer) -> Result<(), Self::Error> {
+ SSH_AGENTC_SIGN_REQUEST.encode(writer)?;
+ self.key.key_data().encode_prefixed(writer)?;
+ self.msg.encode(writer)?;
+ let flags = match self.key.algorithm() {
+ Algorithm::Rsa { hash } => match hash {
+ Some(HashAlg::Sha256) => SSH_AGENT_RSA_SHA2_256,
+ _ => SSH_AGENT_RSA_SHA2_512, // sane default
+ },
+ _ => 0,
+ };
+ flags.encode(writer)?;
+ Ok(())
+ }
+}
+
+struct SignResponse {
+ sig: Signature,
+}
+
+impl Response for SignResponse {
+ const SUCCESS: u8 = SSH_AGENT_SIGN_RESPONSE;
+}
+
+impl Decode for SignResponse {
+ type Error = crate::Error;
+
+ fn decode(reader: &mut impl Reader) -> Result<Self, Self::Error> {
+ let sig = reader.read_prefixed(Signature::decode)?;
+ Ok(Self { sig })
+ }
+}
+
+struct RequestIdentities;
+
+impl Request for RequestIdentities {
+ type Response = IdentitiesAnswer;
+}
+
+impl Encode for RequestIdentities {
+ type Error = crate::Error;
+
+ fn encoded_len(&self) -> Result<usize, Self::Error> {
+ Ok(SSH_AGENTC_REQUEST_IDENTITIES.encoded_len()?)
+ }
+
+ fn encode(&self, writer: &mut impl Writer) -> Result<(), Self::Error> {
+ Ok(SSH_AGENTC_REQUEST_IDENTITIES.encode(writer)?)
+ }
+}
+
+struct IdentitiesAnswer {
+ keys: Vec<PublicKey>,
+}
+
+impl Response for IdentitiesAnswer {
+ const SUCCESS: u8 = SSH_AGENT_IDENTITIES_ANSWER;
+}
+
+impl Decode for IdentitiesAnswer {
+ type Error = crate::Error;
+
+ fn decode(reader: &mut impl Reader) -> Result<Self, Self::Error> {
+ let nkeys = usize::decode(reader).context("nkeys")?;
+ let mut keys = Vec::with_capacity(nkeys);
+
+ for _ in 0..nkeys {
+ let key_data = reader.read_prefixed(KeyData::decode).context("key data")?;
+ let comment = String::decode(reader).context("comment")?;
+ keys.push(PublicKey::new(key_data, comment));
+ }
+
+ Ok(Self { keys })
+ }
+}
+
+fn e(kind: io::ErrorKind, msg: &str) -> io::Error {
+ io::Error::new(kind, msg)
+}
+
+fn ee(kind: io::ErrorKind, e: crate::Error) -> io::Error {
+ io::Error::new(kind, e)
+}
+
+fn incomplete_response() -> io::Error {
+ e(UnexpectedEof, "incomplete response")
+}
+
+fn reponse_too_large() -> io::Error {
+ e(Unsupported, "response payload too large")
+}
+
+fn encode(e: crate::Error) -> io::Error {
+ ee(InvalidData, e.context("failed to encode request"))
+}
+
+fn decode(e: crate::Error) -> io::Error {
+ ee(InvalidData, e.context("failed to decode response"))
+}
+
+fn agent_error() -> io::Error {
+ e(Other, "error response from agent")
+}
+
+fn unknown_response() -> io::Error {
+ e(Unsupported, "unknown response")
+}
diff --git a/src/str.rs b/src/str.rs
new file mode 100644
index 0000000..1825e06
--- /dev/null
+++ b/src/str.rs
@@ -0,0 +1,94 @@
+// Copyright © 2022 Kim Altintop <kim@eagain.io>
+// SPDX-License-Identifier: GPL-2.0-only WITH openvpn-openssl-exception
+
+use core::fmt;
+use std::{
+ ops::Deref,
+ str::FromStr,
+};
+
+use anyhow::ensure;
+
+// A variable-length string type with a maximum length `N`.
+#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, serde::Serialize)]
+pub struct Varchar<T, const N: usize>(T);
+
+impl<T, const N: usize> Varchar<T, N>
+where
+ T: AsRef<str>,
+{
+ pub fn len(&self) -> usize {
+ self.0.as_ref().len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().is_empty()
+ }
+
+ fn try_from_t(t: T) -> crate::Result<Self> {
+ let len = t.as_ref().len();
+ ensure!(len <= N, "string length exceeds {N}: {len}");
+ Ok(Self(t))
+ }
+}
+
+impl<const N: usize> Varchar<String, N> {
+ pub const fn new() -> Self {
+ Self(String::new())
+ }
+}
+
+impl<const N: usize> TryFrom<String> for Varchar<String, N> {
+ type Error = crate::Error;
+
+ fn try_from(s: String) -> Result<Self, Self::Error> {
+ Self::try_from_t(s)
+ }
+}
+
+impl<const N: usize> FromStr for Varchar<String, N> {
+ type Err = crate::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ Self::try_from(s.to_owned())
+ }
+}
+
+impl<'a, const N: usize> TryFrom<&'a str> for Varchar<&'a str, N> {
+ type Error = crate::Error;
+
+ fn try_from(s: &'a str) -> Result<Self, Self::Error> {
+ Self::try_from_t(s)
+ }
+}
+
+impl<T, const N: usize> Deref for Varchar<T, N> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<T, const N: usize> fmt::Display for Varchar<T, N>
+where
+ T: AsRef<str>,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.0.as_ref())
+ }
+}
+
+impl<'de, T, const N: usize> serde::Deserialize<'de> for Varchar<T, N>
+where
+ T: serde::Deserialize<'de> + TryInto<Self>,
+ <T as TryInto<Self>>::Error: fmt::Display,
+{
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let t = T::deserialize(deserializer)?;
+ t.try_into().map_err(serde::de::Error::custom)
+ }
+}