diff --git a/.github/workflows/typos.yml b/.github/workflows/typos.yml index b3ee311b..375600ed 100644 --- a/.github/workflows/typos.yml +++ b/.github/workflows/typos.yml @@ -23,4 +23,4 @@ jobs: # When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too - name: Spell Check Repo - uses: crate-ci/typos@cf5f1c29a8ac336af8568821ec41919923b05a83 # v1.45.1 + uses: crate-ci/typos@7c572958218557a3272c2d6719629443b5cc26fd # v1.45.2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b16ae4c6..f10cef65 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: # When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too - repo: https://github.com/crate-ci/typos - rev: cf5f1c29a8ac336af8568821ec41919923b05a83 # v1.45.1 + rev: 7c572958218557a3272c2d6719629443b5cc26fd # v1.45.2 hooks: - id: typos diff --git a/.typos.toml b/.typos.toml index 59f6d7d6..87c0c4a6 100644 --- a/.typos.toml +++ b/.typos.toml @@ -23,4 +23,6 @@ extend-ignore-re = [ # https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86 # https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45 "AuthRequestResponseRecieved", + # Ignore Punycode/IDN tests + "xn--.+" ] diff --git a/Cargo.lock b/Cargo.lock index 298a8d80..9a44e0b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,17 +8,6 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures 0.2.17", -] - [[package]] name = "ahash" version = "0.8.12" @@ -152,9 +141,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f9ee0f6e02ffd7ad5816e9464499fba7b3effd01123b515c41d1697c43dad1" +checksum = "e79b3f8a79cccc2898f31920fc69f304859b3bd567490f75ebf51ae1c792a9ac" dependencies = [ "compression-codecs", "compression-core", @@ -670,17 +659,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "backon" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" -dependencies = [ - "fastrand", - "gloo-timers", - "tokio", -] - [[package]] name = "base16ct" version = "0.2.0" @@ -717,9 +695,9 @@ checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" [[package]] name = "base64urlsafedata" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f7f6be94fa637132933fd0a68b9140bcb60e3d46164cb68e82a2bb8d102b3a" +checksum = "b08e33815c87d8cadcddb1e74ac307368a3751fbe40c961538afa21a1899f21c" dependencies = [ "base64 0.21.7", "pastey 0.1.1", @@ -778,15 +756,6 @@ dependencies = [ "hybrid-array", ] -[[package]] -name = "block-padding" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" -dependencies = [ - "generic-array", -] - [[package]] name = "blocking" version = "1.6.2" @@ -892,20 +861,11 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" -[[package]] -name = "cbc" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" -dependencies = [ - "cipher", -] - [[package]] name = "cc" -version = "1.2.60" +version = "1.2.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c5703da9466b66a946814e1adf53ea2c90f10063b86290cc9eb67ce3478a20" +checksum = "d16d90359e986641506914ba71350897565610e87ce0ad9e6f28569db3dd5c6d" dependencies = [ "find-msvc-tools", "jobserver", @@ -919,12 +879,6 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - [[package]] name = "chacha20" version = "0.10.0" @@ -960,16 +914,6 @@ dependencies = [ "phf 0.12.1", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common 0.1.6", - "inout", -] - [[package]] name = "cmov" version = "0.5.3" @@ -994,9 +938,9 @@ dependencies = [ [[package]] name = "compression-codecs" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7b51a7d9c967fc26773061ba86150f19c50c0d65c887cb1fbe295fd16619b7" +checksum = "ce2548391e9c1929c21bf6aa2680af86fe4c1b33e6cea9ac1cfeec0bd11218cf" dependencies = [ "brotli", "compression-core", @@ -1008,9 +952,9 @@ dependencies = [ [[package]] name = "compression-core" -version = "0.4.31" +version = "0.4.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" +checksum = "cc14f565cf027a105f7a44ccf9e5b424348421a1d8952a8fc9d499d313107789" [[package]] name = "concurrent-queue" @@ -1387,9 +1331,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" +checksum = "a4ae5f15dda3c708c0ade84bfee31ccab44a3da4f88015ed22f63732abe300c8" [[package]] name = "data-url" @@ -1521,9 +1465,9 @@ dependencies = [ [[package]] name = "diesel" -version = "2.3.7" +version = "2.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4ae09a41a4b89f94ec1e053623da8340d996bc32c6517d325a9daad9b239358" +checksum = "9940fb8467a0a06312218ed384185cb8536aa10d8ec017d0ce7fad2c1bd882d5" dependencies = [ "bigdecimal", "bitflags", @@ -1558,9 +1502,9 @@ dependencies = [ [[package]] name = "diesel_derives" -version = "2.3.7" +version = "2.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47618bf0fac06bb670c036e48404c26a865e6a71af4114dfd97dfe89936e404e" +checksum = "d1817b7f4279b947fc4cafddec12b0e5f8727141706561ce3ac94a60bddd1cf5" dependencies = [ "diesel_table_macro_syntax", "dsl_auto_type", @@ -1571,9 +1515,9 @@ dependencies = [ [[package]] name = "diesel_migrations" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745fd255645f0f1135f9ec55c7b00e0882192af9683ab4731e4bba3da82b8f9c" +checksum = "28d0f4a98124ba6d4ca75da535f65984badec16a003b6e2f94a01e31a79490b8" dependencies = [ "diesel", "migrations_internals", @@ -2262,9 +2206,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hickory-net" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c61c8db47fae51ba9f8f2a2748bd87542acfbe22f2ec9cf9c8ec72d1ee6e9a6" +checksum = "e2295ed2f9c31e471e1428a8f88a3f0e1f4b27c15049592138d1eebe9c35b183" dependencies = [ "async-trait", "cfg-if", @@ -2286,9 +2230,9 @@ dependencies = [ [[package]] name = "hickory-proto" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a916d0494600d99ecb15aadfab677ad97c4de559e8f1af0c129353a733ac1fcc" +checksum = "0bab31817bfb44672a252e97fe81cd0c18d1b2cf892108922f6818820df8c643" dependencies = [ "data-encoding", "idna", @@ -2306,9 +2250,9 @@ dependencies = [ [[package]] name = "hickory-resolver" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a10bd64d950b4d38ca21e25c8ae230712e4955fb8290cfcb29a5e5dc6017e544" +checksum = "f0d58d28879ceecde6607729660c2667a081ccdc082e082675042793960f178c" dependencies = [ "cfg-if", "futures-util", @@ -2357,15 +2301,6 @@ dependencies = [ "digest 0.11.2", ] -[[package]] -name = "home" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" -dependencies = [ - "windows-sys 0.61.2", -] - [[package]] name = "hostname" version = "0.4.2" @@ -2455,9 +2390,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hybrid-array" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3944cf8cf766b40e2a1a333ee5e9b563f854d5fa49d6a8ca2764e97c6eddb214" +checksum = "08d46837a0ed51fe95bd3b05de33cd64a1ee88fc797477ca48446872504507c5" dependencies = [ "typenum", ] @@ -2515,12 +2450,10 @@ dependencies = [ "http 1.4.0", "hyper 1.9.0", "hyper-util", - "rustls 0.23.38", - "rustls-native-certs", + "rustls 0.23.40", "tokio", "tokio-rustls 0.26.4", "tower-service", - "webpki-roots", ] [[package]] @@ -2679,9 +2612,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +checksum = "cb68373c0d6620ef8105e855e7745e18b0d00d3bdb07fb532e434244cdb9a714" dependencies = [ "icu_normalizer", "icu_properties", @@ -2716,16 +2649,6 @@ version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "block-padding", - "generic-array", -] - [[package]] name = "ipconfig" version = "0.3.4" @@ -2792,24 +2715,26 @@ checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e" [[package]] name = "jiff" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a3546dc96b6d42c5f24902af9e2538e82e39ad350b0c766eb3fbf2d8f3d8359" +checksum = "f00b5dbd620d61dfdcb6007c9c1f6054ebd75319f163d886a9055cec1155073d" dependencies = [ "jiff-static", "jiff-tzdb-platform", + "js-sys", "log", "portable-atomic", "portable-atomic-util", "serde_core", + "wasm-bindgen", "windows-sys 0.61.2", ] [[package]] name = "jiff-static" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a8c8b344124222efd714b73bb41f8b5120b27a7cc1c75593a6ff768d9d05aa4" +checksum = "e000de030ff8022ea1da3f466fbb0f3a809f5e51ed31f6dd931c35181ad8e6d7" dependencies = [ "proc-macro2", "quote", @@ -2903,9 +2828,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.95" +version = "0.3.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca" +checksum = "a1840c94c045fbcf8ba2812c95db44499f7c64910a912551aaaa541decebcacf" dependencies = [ "cfg-if", "futures-util", @@ -2913,21 +2838,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jsonwebtoken" -version = "9.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" -dependencies = [ - "base64 0.22.1", - "js-sys", - "pem", - "ring", - "serde", - "serde_json", - "simple_asn1", -] - [[package]] name = "jsonwebtoken" version = "10.3.0" @@ -3005,7 +2915,7 @@ dependencies = [ "nom 8.0.0", "percent-encoding", "quoted_printable", - "rustls 0.23.38", + "rustls 0.23.40", "rustls-native-certs", "serde", "socket2 0.6.3", @@ -3017,9 +2927,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.185" +version = "0.2.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ff2c0fe9bc6cb6b14a0592c2ff4fa9ceb83eea9db979b0487cd054946a2b8f" +checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66" [[package]] name = "libm" @@ -3038,9 +2948,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b4103cffefa72eb8428cb6b47d6627161e51c2739fc5e3b734584157bc642a" +checksum = "b1f111c8c41e7c61a49cd34e44c7619462967221a6443b0ec299e0ac30cfb9b1" dependencies = [ "cc", "pkg-config", @@ -3098,12 +3008,6 @@ dependencies = [ "tracing-subscriber", ] -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - [[package]] name = "macros" version = "0.1.0" @@ -3131,6 +3035,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "mea" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6747f54621d156e1b47eb6b25f39a941b9fc347f98f67d25d8881ff99e8ed832" +dependencies = [ + "slab", +] + [[package]] name = "memchr" version = "2.8.0" @@ -3408,7 +3321,6 @@ dependencies = [ "getrandom 0.2.17", "http 1.4.0", "rand 0.8.6", - "reqwest", "serde", "serde_json", "serde_path_to_error", @@ -3438,31 +3350,76 @@ dependencies = [ [[package]] name = "opendal" -version = "0.55.0" +version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d075ab8a203a6ab4bc1bce0a4b9fe486a72bf8b939037f4b78d95386384bc80a" +checksum = "97b31d3d8e99a85d83b73ec26647f5607b80578ed9375810b6e44ffa3590a236" +dependencies = [ + "opendal-core", + "opendal-service-fs", + "opendal-service-s3", +] + +[[package]] +name = "opendal-core" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1849dd2687e173e776d3af5fce1ba3ae47b9dd37a09d1c4deba850ef45fe00ca" dependencies = [ "anyhow", - "backon", "base64 0.22.1", "bytes", - "crc32c", "futures", - "getrandom 0.2.17", "http 1.4.0", "http-body 1.0.1", "jiff", "log", "md-5", + "mea", "percent-encoding", "quick-xml 0.38.4", - "reqsign", + "reqsign-core", "reqwest", "serde", "serde_json", "tokio", "url", "uuid", + "web-time", +] + +[[package]] +name = "opendal-service-fs" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf0be0417abeeb0053376d816b90fceb9ca98f20dfb54ebf1f2a282729f83663" +dependencies = [ + "bytes", + "log", + "opendal-core", + "serde", + "tokio", + "xattr", +] + +[[package]] +name = "opendal-service-s3" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dadddeb9bb50b0d30927dd914c298c4ddca47e4c1cfa7674d311f0cf9b051c8" +dependencies = [ + "base64 0.22.1", + "bytes", + "crc32c", + "http 1.4.0", + "log", + "md-5", + "opendal-core", + "quick-xml 0.38.4", + "reqsign-aws-v4", + "reqsign-core", + "reqsign-file-read-tokio", + "serde", + "url", ] [[package]] @@ -3647,19 +3604,9 @@ checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" [[package]] name = "pastey" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec" - -[[package]] -name = "pbkdf2" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest 0.10.7", - "hmac 0.12.1", -] +checksum = "c5a797f0e07bdf071d15742978fc3128ec6c22891c31a3a931513263904c982a" [[package]] name = "pear" @@ -3852,21 +3799,6 @@ dependencies = [ "spki", ] -[[package]] -name = "pkcs5" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6" -dependencies = [ - "aes", - "cbc", - "der", - "pbkdf2", - "scrypt", - "sha2 0.10.9", - "spki", -] - [[package]] name = "pkcs8" version = "0.10.2" @@ -3874,8 +3806,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", - "pkcs5", - "rand_core 0.6.4", "spki", ] @@ -4038,16 +3968,6 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" -[[package]] -name = "quick-xml" -version = "0.37.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" -dependencies = [ - "memchr", - "serde", -] - [[package]] name = "quick-xml" version = "0.38.4" @@ -4059,58 +3979,13 @@ dependencies = [ ] [[package]] -name = "quinn" -version = "0.11.9" +name = "quick-xml" +version = "0.39.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +checksum = "cdcc8dd4e2f670d309a5f0e83fe36dfdc05af317008fea29144da1a2ac858e5e" dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash", - "rustls 0.23.38", - "socket2 0.6.3", - "thiserror 2.0.18", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" -dependencies = [ - "bytes", - "getrandom 0.3.4", - "lru-slab", - "rand 0.9.4", - "ring", - "rustc-hash", - "rustls 0.23.38", - "rustls-pki-types", - "slab", - "thiserror 2.0.18", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.6.3", - "tracing", - "windows-sys 0.60.2", + "memchr", + "serde", ] [[package]] @@ -4312,43 +4187,64 @@ dependencies = [ ] [[package]] -name = "reqsign" -version = "0.16.5" +name = "reqsign-aws-v4" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43451dbf3590a7590684c25fb8d12ecdcc90ed3ac123433e500447c7d77ed701" +checksum = "44eaca382e94505a49f1a4849658d153aebf79d9c1a58e5dd3b10361511e9f43" dependencies = [ "anyhow", - "async-trait", - "base64 0.22.1", - "chrono", + "bytes", "form_urlencoded", - "getrandom 0.2.17", - "hex", - "hmac 0.12.1", - "home", "http 1.4.0", - "jsonwebtoken 9.3.1", "log", - "once_cell", "percent-encoding", - "quick-xml 0.37.5", - "rand 0.8.6", - "reqwest", - "rsa", + "quick-xml 0.39.4", + "reqsign-core", "rust-ini", "serde", "serde_json", + "serde_urlencoded", + "sha1", +] + +[[package]] +name = "reqsign-core" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b10302cf0a7d7e7352ba211fc92c3c5bebf1286153e49cc5aa87348078a8e102" +dependencies = [ + "anyhow", + "base64 0.22.1", + "bytes", + "form_urlencoded", + "futures", + "hex", + "hmac 0.12.1", + "http 1.4.0", + "jiff", + "log", + "percent-encoding", "sha1", "sha2 0.10.9", + "windows-sys 0.61.2", +] + +[[package]] +name = "reqsign-file-read-tokio" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d89295b3d17abea31851cc8de55d843d89c52132c864963c38d41920613dc5" +dependencies = [ + "anyhow", + "reqsign-core", "tokio", - "toml 0.8.23", ] [[package]] name = "reqwest" -version = "0.12.28" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +checksum = "62e0021ea2c22aed41653bc7e1419abb2c97e038ff2c33d0e1309e49a97deec0" dependencies = [ "base64 0.22.1", "bytes", @@ -4370,10 +4266,9 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "quinn", - "rustls 0.23.38", - "rustls-native-certs", + "rustls 0.23.40", "rustls-pki-types", + "rustls-platform-verifier", "serde", "serde_json", "serde_urlencoded", @@ -4389,7 +4284,6 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", ] [[package]] @@ -4537,13 +4431,13 @@ dependencies = [ [[package]] name = "rpassword" -version = "7.4.0" +version = "7.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d4c8b64f049c6721ec8ccec37ddfc3d641c4a7fca57e8f2a89de509c73df39" +checksum = "2501c67132bd19c3005b0111fba298907ef002c8c1cf68e25634707e38bf66fe" dependencies = [ "libc", "rtoolbox", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4560,7 +4454,6 @@ dependencies = [ "pkcs1", "pkcs8", "rand_core 0.6.4", - "sha2 0.10.9", "signature", "spki", "subtle", @@ -4597,12 +4490,6 @@ dependencies = [ "ordered-multimap", ] -[[package]] -name = "rustc-hash" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe" - [[package]] name = "rustc_version" version = "0.4.1" @@ -4648,9 +4535,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.38" +version = "0.23.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69f9466fb2c14ea04357e91413efb882e2a6d4a406e625449bc0a5d360d53a21" +checksum = "ef86cd5876211988985292b91c96a8f2d298df24e75989a43a3c73f2d4d8168b" dependencies = [ "log", "once_cell", @@ -4684,14 +4571,40 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.14.0" +version = "1.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +checksum = "30a7197ae7eb376e574fe940d068c30fe0462554a3ddbe4eca7838e049c937a9" dependencies = [ - "web-time", "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d1e2536ce4f35f4846aa13bff16bd0ff40157cdb14cc056c7b14ba41233ba0" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls 0.23.40", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki 0.103.13", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -4725,15 +4638,6 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" -[[package]] -name = "salsa20" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" -dependencies = [ - "cipher", -] - [[package]] name = "same-file" version = "1.0.6" @@ -4797,17 +4701,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "scrypt" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f" -dependencies = [ - "pbkdf2", - "salsa20", - "sha2 0.10.9", -] - [[package]] name = "sct" version = "0.7.1" @@ -5493,7 +5386,7 @@ version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls 0.23.38", + "rustls 0.23.40", "tokio", ] @@ -5869,7 +5762,6 @@ checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" name = "vaultwarden" version = "1.0.0" dependencies = [ - "anyhow", "argon2", "aws-config", "aws-credential-types", @@ -5899,7 +5791,7 @@ dependencies = [ "html5gum", "http 1.4.0", "job_scheduler_ng", - "jsonwebtoken 10.3.0", + "jsonwebtoken", "lettre", "libsqlite3-sys", "log", @@ -5911,18 +5803,20 @@ dependencies = [ "opendal", "openidconnect", "openssl", - "pastey 0.2.1", + "pastey 0.2.2", "percent-encoding", "pico-args", "rand 0.10.1", "regex", - "reqsign", + "reqsign-aws-v4", + "reqsign-core", "reqwest", "ring", "rmpv", "rocket", "rocket_ws", "rpassword", + "rustls 0.23.40", "semver", "serde", "serde_json", @@ -6006,9 +5900,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.118" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89" +checksum = "df52b6d9b87e0c74c9edfa1eb2d9bf85e5d63515474513aa50fa181b3c4f5db1" dependencies = [ "cfg-if", "once_cell", @@ -6019,9 +5913,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.68" +version = "0.4.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f371d383f2fb139252e0bfac3b81b265689bf45b6874af544ffa4c975ac1ebf8" +checksum = "af934872acec734c2d80e6617bbb5ff4f12b052dd8e6332b0817bce889516084" dependencies = [ "js-sys", "wasm-bindgen", @@ -6029,9 +5923,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.118" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed" +checksum = "78b1041f495fb322e64aca85f5756b2172e35cd459376e67f2a6c9dffcedb103" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6039,9 +5933,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.118" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904" +checksum = "9dcd0ff20416988a18ac686d4d4d0f6aae9ebf08a389ff5d29012b05af2a1b41" dependencies = [ "bumpalo", "proc-macro2", @@ -6052,9 +5946,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.118" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129" +checksum = "49757b3c82ebf16c57d69365a142940b384176c24df52a087fb748e2085359ea" dependencies = [ "unicode-ident", ] @@ -6083,9 +5977,9 @@ dependencies = [ [[package]] name = "wasm-streams" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +checksum = "9d1ec4f6517c9e11ae630e200b2b65d193279042e28edd4a2cda233e46670bbb" dependencies = [ "futures-util", "js-sys", @@ -6108,9 +6002,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.95" +version = "0.3.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2dfbb17949fa2088e5d39408c48368947b86f7834484e87b73de55bc14d97d" +checksum = "2eadbac71025cd7b0834f20d1fe8472e8495821b4e9801eb0a60bd1f19827602" dependencies = [ "js-sys", "wasm-bindgen", @@ -6128,9 +6022,9 @@ dependencies = [ [[package]] name = "webauthn-attestation-ca" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fafcf13f7dc1fb292ed4aea22cdd3757c285d7559e9748950ee390249da4da6b" +checksum = "6475c0bbd1a3f04afaa3e98880408c5be61680c5e6bd3c6f8c250990d5d3e18e" dependencies = [ "base64urlsafedata", "openssl", @@ -6142,9 +6036,9 @@ dependencies = [ [[package]] name = "webauthn-rs" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b24d082d3360258fefb6ffe56123beef7d6868c765c779f97b7a2fcf06727f8" +checksum = "6c548915e0e92ee946bbf2aecf01ea21bef53d974b0793cc6732ba81a03fc422" dependencies = [ "base64urlsafedata", "serde", @@ -6156,9 +6050,9 @@ dependencies = [ [[package]] name = "webauthn-rs-core" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15784340a24c170ce60567282fb956a0938742dbfbf9eff5df793a686a009b8b" +checksum = "296d2d501feb715d80b8e186fb88bab1073bca17f460303a1013d17b673bea6a" dependencies = [ "base64 0.21.7", "base64urlsafedata", @@ -6183,9 +6077,9 @@ dependencies = [ [[package]] name = "webauthn-rs-proto" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16a1fb2580ce73baa42d3011a24de2ceab0d428de1879ece06e02e8c416e497c" +checksum = "c37393beac9c1ed1ca6dbb30b1e01783fb316ab3a45d90ecd48c99052dd7ef1e" dependencies = [ "base64 0.21.7", "base64urlsafedata", @@ -6195,10 +6089,10 @@ dependencies = [ ] [[package]] -name = "webpki-roots" +name = "webpki-root-certs" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52f5ee44c96cf55f1b349600768e3ece3a8f26010c05265ab73f945bb1a2eb9d" +checksum = "f31141ce3fc3e300ae89b78c0dd67f9708061d1d2eda54b8209346fd6be9a92c" dependencies = [ "rustls-pki-types", ] @@ -6346,15 +6240,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - [[package]] name = "windows-sys" version = "0.61.2" @@ -6388,30 +6273,13 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", + "windows_i686_gnullvm", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.1", - "windows_aarch64_msvc 0.53.1", - "windows_i686_gnu 0.53.1", - "windows_i686_gnullvm 0.53.1", - "windows_i686_msvc 0.53.1", - "windows_x86_64_gnu 0.53.1", - "windows_x86_64_gnullvm 0.53.1", - "windows_x86_64_msvc 0.53.1", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -6424,12 +6292,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -6442,12 +6304,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -6460,24 +6316,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -6490,12 +6334,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -6508,12 +6346,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -6526,12 +6358,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -6544,12 +6370,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - [[package]] name = "winnow" version = "0.6.26" @@ -6691,6 +6511,16 @@ dependencies = [ "time", ] +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] + [[package]] name = "xml" version = "1.2.1" @@ -6737,9 +6567,9 @@ dependencies = [ [[package]] name = "yubico_ng" -version = "0.14.1" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "929981f5b46b8fb8ee54b144de6b55c3a94fbe26635ee25b0e126e184250867c" +checksum = "228e2862e3c66f3224102d9a00d9d3646b271a05cc6c4819fea195fa8b5c00e0" dependencies = [ "base64 0.22.1", "form_urlencoded", diff --git a/Cargo.toml b/Cargo.toml index 79eebec0..271d102d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,21 +23,23 @@ publish.workspace = true [features] default = [ - # "sqlite", + # "sqlite" or "sqlite_system", # "mysql", # "postgresql", ] # Empty to keep compatibility, prefer to set USE_SYSLOG=true enable_syslog = [] +# Please enable at least one of these DB backends. mysql = ["diesel/mysql", "diesel_migrations/mysql"] postgresql = ["diesel/postgres", "diesel_migrations/postgres"] -sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"] +sqlite_system = ["diesel/sqlite", "diesel_migrations/sqlite"] +sqlite = ["sqlite_system", "libsqlite3-sys/bundled"] # Alternative to the above, statically linked SQLite into the binary instead of dynamically. # Enable to use a vendored and statically linked openssl vendored_openssl = ["openssl/vendored"] # Enable MiMalloc memory allocator to replace the default malloc # This can improve performance for Alpine builds enable_mimalloc = ["dep:mimalloc"] -s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"] +s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:http", "dep:reqsign-aws-v4", "dep:reqsign-core"] # OIDC specific features oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"] @@ -88,18 +90,19 @@ serde_json = "1.0.149" # A safe, extensible ORM and Query builder # Currently pinned diesel to v2.3.3 as newer version break MySQL/MariaDB compatibility -diesel = { version = "2.3.7", features = ["chrono", "r2d2", "numeric"] } -diesel_migrations = "2.3.1" +diesel = { version = "2.3.9", features = ["chrono", "r2d2", "numeric"] } +diesel_migrations = "2.3.2" derive_more = { version = "2.1.1", features = ["from", "into", "as_ref", "deref", "display"] } diesel-derive-newtype = "2.1.2" -# Bundled/Static SQLite -libsqlite3-sys = { version = "0.36.0", features = ["bundled"], optional = true } +# SQLite, statically bundled unless the `sqlite_system` feature is enabled +libsqlite3-sys = { version = "0.37.0", optional = true } # Crypto-related libraries rand = "0.10.1" ring = "0.17.14" +rustls = { version = "0.23.40", features = ["ring", "std"], default-features = false } subtle = "2.6.1" # UUID generation @@ -114,7 +117,7 @@ time = "0.3.47" job_scheduler_ng = "2.4.0" # Data encoding library Hex/Base32/Base64 -data-encoding = "2.10.0" +data-encoding = "2.11.0" # JWT library jsonwebtoken = { version = "10.3.0", features = ["use_pem", "rust_crypto"], default-features = false } @@ -123,14 +126,14 @@ jsonwebtoken = { version = "10.3.0", features = ["use_pem", "rust_crypto"], defa totp-lite = "2.0.1" # Yubico Library -yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false } +yubico = { package = "yubico_ng", version = "0.15.0", features = ["online-tokio"], default-features = false } # WebAuthn libraries # danger-allow-state-serialisation is needed to save the state in the db # danger-credential-internals is needed to support U2F to Webauthn migration -webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } -webauthn-rs-proto = "0.5.4" -webauthn-rs-core = "0.5.4" +webauthn-rs = { version = "0.5.5", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } +webauthn-rs-proto = "0.5.5" +webauthn-rs-core = "0.5.5" # Handling of URL's for WebAuthn and favicons url = "2.5.8" @@ -144,8 +147,8 @@ email_address = "0.2.9" handlebars = { version = "6.4.0", features = ["dir_source"] } # HTTP client (Used for favicons, version check, DUO and HIBP API) -reqwest = { version = "0.12.28", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} -hickory-resolver = "0.26.0" +reqwest = { version = "0.13.3", features = ["rustls-no-provider", "stream", "json", "form", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} +hickory-resolver = "0.26.1" # Favicon extraction libraries html5gum = "0.8.3" @@ -168,11 +171,11 @@ openssl = "0.10.78" pico-args = "0.5.0" # Macro ident concatenation -pastey = "0.2.1" +pastey = "0.2.2" governor = "0.10.4" # OIDC for SSO -openidconnect = { version = "4.0.1", features = ["reqwest", "rustls-tls"] } +openidconnect = { version = "4.0.1", default-features = false } moka = { version = "0.12.15", features = ["future"] } # Check client versions for specific features. @@ -188,21 +191,21 @@ which = "8.0.2" argon2 = "0.5.3" # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN -rpassword = "7.4.0" +rpassword = "7.5.1" # Loading a dynamic CSS Stylesheet grass_compiler = { version = "0.13.4", default-features = false } # File are accessed through Apache OpenDAL -opendal = { version = "0.55.0", features = ["services-fs"], default-features = false } +opendal = { version = "0.56.0", features = ["services-fs"], default-features = false } # For retrieving AWS credentials, including temporary SSO credentials -anyhow = { version = "1.0.102", optional = true } aws-config = { version = "1.8.16", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } aws-credential-types = { version = "1.2.14", optional = true } aws-smithy-runtime-api = { version = "1.12.0", optional = true } http = { version = "1.4.0", optional = true } -reqsign = { version = "0.16.5", optional = true } +reqsign-aws-v4 = { version = "3.0.0", optional = true } +reqsign-core = { version = "3.0.0", optional = true } # Strip debuginfo from the release builds # The debug symbols are to provide better panic traces @@ -301,6 +304,7 @@ branches_sharing_code = "deny" case_sensitive_file_extension_comparisons = "deny" cast_lossless = "deny" clone_on_ref_ptr = "deny" +duration_suboptimal_units = "deny" equatable_if_let = "deny" excessive_precision = "deny" filter_map_next = "deny" @@ -322,6 +326,7 @@ needless_continue = "deny" needless_lifetimes = "deny" option_option = "deny" redundant_clone = "deny" +ref_option = "deny" string_add_assign = "deny" unnecessary_join = "deny" unnecessary_self_imports = "deny" diff --git a/README.md b/README.md index c84a9c40..0b24ba69 100644 --- a/README.md +++ b/README.md @@ -59,8 +59,9 @@ A nearly complete implementation of the Bitwarden Client API is provided, includ ## Usage > [!IMPORTANT] -> The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API). -> That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS). +> The web-vault requires the use of HTTPS and a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API).
+> That means it will only work if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS).
+> We also suggest to use a [reverse proxy](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples). The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags. diff --git a/build.rs b/build.rs index 4a831737..2d1106c2 100644 --- a/build.rs +++ b/build.rs @@ -2,21 +2,21 @@ use std::env; use std::process::Command; fn main() { - // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros - #[cfg(feature = "sqlite")] + // These allow using e.g. #[cfg(mysql)] instead of #[cfg(feature = "mysql")], which helps when trying to add them through macros + #[cfg(feature = "sqlite_system")] // The `sqlite` feature implies this one. println!("cargo:rustc-cfg=sqlite"); #[cfg(feature = "mysql")] println!("cargo:rustc-cfg=mysql"); #[cfg(feature = "postgresql")] println!("cargo:rustc-cfg=postgresql"); - #[cfg(feature = "s3")] - println!("cargo:rustc-cfg=s3"); - - #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] + #[cfg(not(any(feature = "sqlite_system", feature = "mysql", feature = "postgresql")))] compile_error!( "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" ); + #[cfg(feature = "s3")] + println!("cargo:rustc-cfg=s3"); + // Use check-cfg to let cargo know which cfg's we define, // and avoid warnings when they are used in the code. println!("cargo::rustc-check-cfg=cfg(sqlite)"); diff --git a/docker/DockerSettings.yaml b/docker/DockerSettings.yaml index 9a5d7f02..9d4a563a 100644 --- a/docker/DockerSettings.yaml +++ b/docker/DockerSettings.yaml @@ -1,6 +1,6 @@ --- -vault_version: "v2026.3.1" -vault_image_digest: "sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767" +vault_version: "v2026.4.1" +vault_image_digest: "sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe" # Cross Compile Docker Helper Scripts v1.9.0 # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index 44517aa3..cbb18e2b 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -19,15 +19,15 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2026.3.1 -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.3.1 -# [docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767] +# $ docker pull docker.io/vaultwarden/web-vault:v2026.4.1 +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.4.1 +# [docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767 -# [docker.io/vaultwarden/web-vault:v2026.3.1] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe +# [docker.io/vaultwarden/web-vault:v2026.4.1] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767 AS vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe AS vault ########################## ALPINE BUILD IMAGES ########################## ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 diff --git a/docker/Dockerfile.debian b/docker/Dockerfile.debian index d472cbb6..829f59d2 100644 --- a/docker/Dockerfile.debian +++ b/docker/Dockerfile.debian @@ -19,15 +19,15 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2026.3.1 -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.3.1 -# [docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767] +# $ docker pull docker.io/vaultwarden/web-vault:v2026.4.1 +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.4.1 +# [docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767 -# [docker.io/vaultwarden/web-vault:v2026.3.1] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe +# [docker.io/vaultwarden/web-vault:v2026.4.1] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:c1b1f212333f95bff4ef8d00e8e3589c4ae8eda018691f28f8bddc7e971dd767 AS vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:ca2a4251c4e63c9ad428262b4dd452789a1b9f6fce71da351e93dceed0d2edbe AS vault ########################## Cross Compile Docker Helper Scripts ########################## ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts diff --git a/migrations/mysql/2026-03-09-005927_add_archives/down.sql b/migrations/mysql/2026-03-09-005927_add_archives/down.sql new file mode 100644 index 00000000..a3ef20c3 --- /dev/null +++ b/migrations/mysql/2026-03-09-005927_add_archives/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS archives; diff --git a/migrations/mysql/2026-03-09-005927_add_archives/up.sql b/migrations/mysql/2026-03-09-005927_add_archives/up.sql new file mode 100644 index 00000000..6d7a7024 --- /dev/null +++ b/migrations/mysql/2026-03-09-005927_add_archives/up.sql @@ -0,0 +1,10 @@ +DROP TABLE IF EXISTS archives; + +CREATE TABLE archives ( + user_uuid CHAR(36) NOT NULL, + cipher_uuid CHAR(36) NOT NULL, + archived_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (user_uuid, cipher_uuid), + FOREIGN KEY (user_uuid) REFERENCES users (uuid) ON DELETE CASCADE, + FOREIGN KEY (cipher_uuid) REFERENCES ciphers (uuid) ON DELETE CASCADE +); diff --git a/migrations/mysql/2026-04-25-120000_sso_auth_binding/down.sql b/migrations/mysql/2026-04-25-120000_sso_auth_binding/down.sql new file mode 100644 index 00000000..17e3d8c7 --- /dev/null +++ b/migrations/mysql/2026-04-25-120000_sso_auth_binding/down.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth DROP COLUMN binding_hash; diff --git a/migrations/mysql/2026-04-25-120000_sso_auth_binding/up.sql b/migrations/mysql/2026-04-25-120000_sso_auth_binding/up.sql new file mode 100644 index 00000000..53ee8063 --- /dev/null +++ b/migrations/mysql/2026-04-25-120000_sso_auth_binding/up.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth ADD COLUMN binding_hash TEXT; diff --git a/migrations/postgresql/2026-03-09-005927_add_archives/down.sql b/migrations/postgresql/2026-03-09-005927_add_archives/down.sql new file mode 100644 index 00000000..a3ef20c3 --- /dev/null +++ b/migrations/postgresql/2026-03-09-005927_add_archives/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS archives; diff --git a/migrations/postgresql/2026-03-09-005927_add_archives/up.sql b/migrations/postgresql/2026-03-09-005927_add_archives/up.sql new file mode 100644 index 00000000..c56d01a0 --- /dev/null +++ b/migrations/postgresql/2026-03-09-005927_add_archives/up.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS archives; + +CREATE TABLE archives ( + user_uuid CHAR(36) NOT NULL REFERENCES users (uuid) ON DELETE CASCADE, + cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid) ON DELETE CASCADE, + archived_at TIMESTAMP NOT NULL DEFAULT now(), + PRIMARY KEY (user_uuid, cipher_uuid) +); diff --git a/migrations/postgresql/2026-04-25-120000_sso_auth_binding/down.sql b/migrations/postgresql/2026-04-25-120000_sso_auth_binding/down.sql new file mode 100644 index 00000000..17e3d8c7 --- /dev/null +++ b/migrations/postgresql/2026-04-25-120000_sso_auth_binding/down.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth DROP COLUMN binding_hash; diff --git a/migrations/postgresql/2026-04-25-120000_sso_auth_binding/up.sql b/migrations/postgresql/2026-04-25-120000_sso_auth_binding/up.sql new file mode 100644 index 00000000..53ee8063 --- /dev/null +++ b/migrations/postgresql/2026-04-25-120000_sso_auth_binding/up.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth ADD COLUMN binding_hash TEXT; diff --git a/migrations/sqlite/2026-03-09-005927_add_archives/down.sql b/migrations/sqlite/2026-03-09-005927_add_archives/down.sql new file mode 100644 index 00000000..a3ef20c3 --- /dev/null +++ b/migrations/sqlite/2026-03-09-005927_add_archives/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS archives; diff --git a/migrations/sqlite/2026-03-09-005927_add_archives/up.sql b/migrations/sqlite/2026-03-09-005927_add_archives/up.sql new file mode 100644 index 00000000..d624f57b --- /dev/null +++ b/migrations/sqlite/2026-03-09-005927_add_archives/up.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS archives; + +CREATE TABLE archives ( + user_uuid CHAR(36) NOT NULL REFERENCES users (uuid) ON DELETE CASCADE, + cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid) ON DELETE CASCADE, + archived_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (user_uuid, cipher_uuid) +); diff --git a/migrations/sqlite/2026-04-25-120000_sso_auth_binding/down.sql b/migrations/sqlite/2026-04-25-120000_sso_auth_binding/down.sql new file mode 100644 index 00000000..17e3d8c7 --- /dev/null +++ b/migrations/sqlite/2026-04-25-120000_sso_auth_binding/down.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth DROP COLUMN binding_hash; diff --git a/migrations/sqlite/2026-04-25-120000_sso_auth_binding/up.sql b/migrations/sqlite/2026-04-25-120000_sso_auth_binding/up.sql new file mode 100644 index 00000000..53ee8063 --- /dev/null +++ b/migrations/sqlite/2026-04-25-120000_sso_auth_binding/up.sql @@ -0,0 +1 @@ +ALTER TABLE sso_auth ADD COLUMN binding_hash TEXT; diff --git a/src/api/admin.rs b/src/api/admin.rs index 9a782046..02c976cc 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -469,7 +469,7 @@ async fn deauth_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Noti if CONFIG.push_enabled() { for device in Device::find_push_devices_by_user(&user.uuid, &conn).await { - match unregister_push_device(&device.push_uuid).await { + match unregister_push_device(device.push_uuid.as_ref()).await { Ok(r) => r, Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"), }; diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index fa6a3fd2..a8f9768e 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -137,7 +137,7 @@ struct KeysData { } /// Trims whitespace from password hints, and converts blank password hints to `None`. -fn clean_password_hint(password_hint: &Option) -> Option { +fn clean_password_hint(password_hint: Option<&String>) -> Option { match password_hint { None => None, Some(h) => match h.trim() { @@ -147,7 +147,7 @@ fn clean_password_hint(password_hint: &Option) -> Option { } } -fn enforce_password_hint_setting(password_hint: &Option) -> EmptyResult { +fn enforce_password_hint_setting(password_hint: Option<&String>) -> EmptyResult { if password_hint.is_some() && !CONFIG.password_hints_allowed() { err!("Password hints have been disabled by the administrator. Remove the hint and try again."); } @@ -245,8 +245,8 @@ pub async fn _register(data: Json, email_verification: bool, conn: // Check against the password hint setting here so if it fails, the user // can retry without losing their invitation below. - let password_hint = clean_password_hint(&data.master_password_hint); - enforce_password_hint_setting(&password_hint)?; + let password_hint = clean_password_hint(data.master_password_hint.as_ref()); + enforce_password_hint_setting(password_hint.as_ref())?; let mut user = match User::find_by_mail(&email, &conn).await { Some(user) => { @@ -353,8 +353,8 @@ async fn post_set_password(data: Json, headers: Headers, conn: // Check against the password hint setting here so if it fails, // the user can retry without losing their invitation below. - let password_hint = clean_password_hint(&data.master_password_hint); - enforce_password_hint_setting(&password_hint)?; + let password_hint = clean_password_hint(data.master_password_hint.as_ref()); + enforce_password_hint_setting(password_hint.as_ref())?; set_kdf_data(&mut user, &data.kdf)?; @@ -515,8 +515,8 @@ async fn post_password(data: Json, headers: Headers, conn: DbCon err!("Invalid password") } - user.password_hint = clean_password_hint(&data.master_password_hint); - enforce_password_hint_setting(&user.password_hint)?; + user.password_hint = clean_password_hint(data.master_password_hint.as_ref()); + enforce_password_hint_setting(user.password_hint.as_ref())?; log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &conn) .await; @@ -1438,7 +1438,7 @@ async fn put_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResul if let Some(device) = Device::find_by_uuid(&device_id, &conn).await { Device::clear_push_token_by_uuid(&device_id, &conn).await?; - unregister_push_device(&device.push_uuid).await?; + unregister_push_device(device.push_uuid.as_ref()).await?; } Ok(()) diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 6d4e1f41..43e555e2 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -19,9 +19,9 @@ use crate::{ crypto, db::{ models::{ - Attachment, AttachmentId, Cipher, CipherId, Collection, CollectionCipher, CollectionGroup, CollectionId, - CollectionUser, EventType, Favorite, Folder, FolderCipher, FolderId, Group, Membership, MembershipType, - OrgPolicy, OrgPolicyType, OrganizationId, RepromptType, Send, UserId, + Archive, Attachment, AttachmentId, Cipher, CipherId, Collection, CollectionCipher, CollectionGroup, + CollectionId, CollectionUser, EventType, Favorite, Folder, FolderCipher, FolderId, Group, Membership, + MembershipType, OrgPolicy, OrgPolicyType, OrganizationId, RepromptType, Send, UserId, }, DbConn, DbPool, }, @@ -96,6 +96,10 @@ pub fn routes() -> Vec { post_collections_update, post_collections_admin, put_collections_admin, + archive_cipher_put, + archive_cipher_selected, + unarchive_cipher_put, + unarchive_cipher_selected, ] } @@ -293,6 +297,7 @@ pub struct CipherData { // when using older client versions, or if the operation doesn't involve // updating an existing cipher. last_known_revision_date: Option, + archived_date: Option, } #[derive(Debug, Deserialize)] @@ -534,6 +539,13 @@ pub async fn update_cipher_from_data( cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?; cipher.set_favorite(data.favorite, &headers.user.uuid, conn).await?; + if let Some(dt_str) = data.archived_date { + match NaiveDateTime::parse_from_str(&dt_str, "%+") { + Ok(dt) => cipher.set_archived_at(dt, &headers.user.uuid, conn).await?, + Err(err) => warn!("Error parsing ArchivedDate '{dt_str}': {err}"), + } + } + if ut != UpdateType::None { // Only log events for organizational ciphers if let Some(org_id) = &cipher.organization_uuid { @@ -630,7 +642,7 @@ async fn post_ciphers_import(data: Json, headers: Headers, conn: DbC let mut user = headers.user; user.update_revision(&conn).await?; - nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncVault, &user, headers.device.push_uuid.as_ref(), &conn).await; Ok(()) } @@ -802,12 +814,16 @@ async fn post_collections_update( err!("Collection cannot be changed") } + let Some(ref org_uuid) = cipher.organization_uuid else { + err!("Cipher is not owned by an organization") + }; + let posted_collections = HashSet::::from_iter(data.collection_ids); let current_collections = HashSet::::from_iter(cipher.get_collections(headers.user.uuid.clone(), &conn).await); for collection in posted_collections.symmetric_difference(¤t_collections) { - match Collection::find_by_uuid_and_org(collection, cipher.organization_uuid.as_ref().unwrap(), &conn).await { + match Collection::find_by_uuid_and_org(collection, org_uuid, &conn).await { None => err!("Invalid collection ID provided"), Some(collection) => { if collection.is_writable_by_user(&headers.user.uuid, &conn).await { @@ -838,7 +854,7 @@ async fn post_collections_update( log_event( EventType::CipherUpdatedCollections as i32, &cipher.uuid, - &cipher.organization_uuid.clone().unwrap(), + org_uuid, &headers.user.uuid, headers.device.atype, &headers.ip.ip, @@ -878,12 +894,16 @@ async fn post_collections_admin( err!("Collection cannot be changed") } + let Some(ref org_uuid) = cipher.organization_uuid else { + err!("Cipher is not owned by an organization") + }; + let posted_collections = HashSet::::from_iter(data.collection_ids); let current_collections = HashSet::::from_iter(cipher.get_admin_collections(headers.user.uuid.clone(), &conn).await); for collection in posted_collections.symmetric_difference(¤t_collections) { - match Collection::find_by_uuid_and_org(collection, cipher.organization_uuid.as_ref().unwrap(), &conn).await { + match Collection::find_by_uuid_and_org(collection, org_uuid, &conn).await { None => err!("Invalid collection ID provided"), Some(collection) => { if collection.is_writable_by_user(&headers.user.uuid, &conn).await { @@ -914,7 +934,7 @@ async fn post_collections_admin( log_event( EventType::CipherUpdatedCollections as i32, &cipher.uuid, - &cipher.organization_uuid.unwrap(), + org_uuid, &headers.user.uuid, headers.device.atype, &headers.ip.ip, @@ -1005,7 +1025,7 @@ async fn put_cipher_share_selected( } // Multi share actions do not send out a push for each cipher, we need to send a general sync here - nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), &conn).await; Ok(()) } @@ -1618,7 +1638,7 @@ async fn move_cipher_selected( .await; } else { // Multi move actions do not send out a push for each cipher, we need to send a general sync here - nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), &conn).await; } if cipher_count != accessible_ciphers_count { @@ -1670,7 +1690,7 @@ async fn purge_org_vault( match Membership::find_confirmed_by_user_and_org(&user.uuid, &organization.org_id, &conn).await { Some(member) if member.atype == MembershipType::Owner => { Cipher::delete_all_by_organization(&organization.org_id, &conn).await?; - nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncVault, &user, headers.device.push_uuid.as_ref(), &conn).await; log_event( EventType::OrganizationPurgedVault as i32, @@ -1710,11 +1730,41 @@ async fn purge_personal_vault( } user.update_revision(&conn).await?; - nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncVault, &user, headers.device.push_uuid.as_ref(), &conn).await; Ok(()) } +#[put("/ciphers//archive")] +async fn archive_cipher_put(cipher_id: CipherId, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + archive_cipher(&cipher_id, &headers, false, &conn, &nt).await +} + +#[put("/ciphers/archive", data = "")] +async fn archive_cipher_selected( + data: Json, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + archive_multiple_ciphers(data, &headers, &conn, &nt).await +} + +#[put("/ciphers//unarchive")] +async fn unarchive_cipher_put(cipher_id: CipherId, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + unarchive_cipher(&cipher_id, &headers, false, &conn, &nt).await +} + +#[put("/ciphers/unarchive", data = "")] +async fn unarchive_cipher_selected( + data: Json, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + unarchive_multiple_ciphers(data, &headers, &conn, &nt).await +} + #[derive(PartialEq)] pub enum CipherDeleteOptions { SoftSingle, @@ -1805,7 +1855,7 @@ async fn _delete_multiple_ciphers( } // Multi delete actions do not send out a push for each cipher, we need to send a general sync here - nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), &conn).await; Ok(()) } @@ -1873,7 +1923,7 @@ async fn _restore_multiple_ciphers( } // Multi move actions do not send out a push for each cipher, we need to send a general sync here - nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, conn).await; + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), conn).await; Ok(Json(json!({ "data": ciphers, @@ -1933,6 +1983,122 @@ async fn _delete_cipher_attachment_by_id( Ok(Json(json!({"cipher":cipher_json}))) } +async fn archive_cipher( + cipher_id: &CipherId, + headers: &Headers, + multi_archive: bool, + conn: &DbConn, + nt: &Notify<'_>, +) -> JsonResult { + let Some(cipher) = Cipher::find_by_uuid(cipher_id, conn).await else { + err!("Cipher doesn't exist") + }; + + if !cipher.is_accessible_to_user(&headers.user.uuid, conn).await { + err!("Cipher is not accessible for the current user") + } + + cipher.set_archived_at(Utc::now().naive_utc(), &headers.user.uuid, conn).await?; + + if !multi_archive { + nt.send_cipher_update( + UpdateType::SyncCipherUpdate, + &cipher, + &cipher.update_users_revision(conn).await, + &headers.device, + None, + conn, + ) + .await; + } + + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?)) +} + +async fn unarchive_cipher( + cipher_id: &CipherId, + headers: &Headers, + multi_unarchive: bool, + conn: &DbConn, + nt: &Notify<'_>, +) -> JsonResult { + let Some(cipher) = Cipher::find_by_uuid(cipher_id, conn).await else { + err!("Cipher doesn't exist") + }; + + if !cipher.is_accessible_to_user(&headers.user.uuid, conn).await { + err!("Cipher is not accessible for the current user") + } + + cipher.unarchive(&headers.user.uuid, conn).await?; + + if !multi_unarchive { + nt.send_cipher_update( + UpdateType::SyncCipherUpdate, + &cipher, + &cipher.update_users_revision(conn).await, + &headers.device, + None, + conn, + ) + .await; + } + + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?)) +} + +async fn archive_multiple_ciphers( + data: Json, + headers: &Headers, + conn: &DbConn, + nt: &Notify<'_>, +) -> JsonResult { + let data = data.into_inner(); + + let mut ciphers: Vec = Vec::new(); + for cipher_id in data.ids { + match archive_cipher(&cipher_id, headers, true, conn, nt).await { + Ok(json) => ciphers.push(json.into_inner()), + err => return err, + } + } + + // Multi archive does not send out a push for each cipher, we need to send a general sync here + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), conn).await; + + Ok(Json(json!({ + "data": ciphers, + "object": "list", + "continuationToken": null + }))) +} + +async fn unarchive_multiple_ciphers( + data: Json, + headers: &Headers, + conn: &DbConn, + nt: &Notify<'_>, +) -> JsonResult { + let data = data.into_inner(); + + let mut ciphers: Vec = Vec::new(); + for cipher_id in data.ids { + match unarchive_cipher(&cipher_id, headers, true, conn, nt).await { + Ok(json) => ciphers.push(json.into_inner()), + err => return err, + } + } + + // Multi unarchive does not send out a push for each cipher, we need to send a general sync here + nt.send_user_update(UpdateType::SyncCiphers, &headers.user, headers.device.push_uuid.as_ref(), conn).await; + + Ok(Json(json!({ + "data": ciphers, + "object": "list", + "continuationToken": null + }))) +} + /// This will hold all the necessary data to improve a full sync of all the ciphers /// It can be used during the `Cipher::to_json()` call. /// It will prevent the so called N+1 SQL issue by running just a few queries which will hold all the data needed. @@ -1942,6 +2108,7 @@ pub struct CipherSyncData { pub cipher_folders: HashMap, pub cipher_favorites: HashSet, pub cipher_collections: HashMap>, + pub cipher_archives: HashMap, pub members: HashMap, pub user_collections: HashMap, pub user_collections_groups: HashMap, @@ -1958,20 +2125,25 @@ impl CipherSyncData { pub async fn new(user_id: &UserId, sync_type: CipherSyncType, conn: &DbConn) -> Self { let cipher_folders: HashMap; let cipher_favorites: HashSet; + let cipher_archives: HashMap; match sync_type { - // User Sync supports Folders and Favorites + // User Sync supports Folders, Favorites, and Archives CipherSyncType::User => { // Generate a HashMap with the Cipher UUID as key and the Folder UUID as value cipher_folders = FolderCipher::find_by_user(user_id, conn).await.into_iter().collect(); // Generate a HashSet of all the Cipher UUID's which are marked as favorite cipher_favorites = Favorite::get_all_cipher_uuid_by_user(user_id, conn).await.into_iter().collect(); + + // Generate a HashMap with the Cipher UUID as key and the archived date time as value + cipher_archives = Archive::find_by_user(user_id, conn).await.into_iter().collect(); } - // Organization Sync does not support Folders and Favorites. + // Organization Sync does not support Folders, Favorites, or Archives. // If these are set, it will cause issues in the web-vault. CipherSyncType::Organization => { cipher_folders = HashMap::with_capacity(0); cipher_favorites = HashSet::with_capacity(0); + cipher_archives = HashMap::with_capacity(0); } } @@ -2034,6 +2206,7 @@ impl CipherSyncData { }; Self { + cipher_archives, cipher_attachments, cipher_folders, cipher_favorites, diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 038b9a6d..ad9002fd 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -124,7 +124,7 @@ async fn post_eq_domains(data: Json, headers: Headers, conn: Db user.save(&conn).await?; - nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &conn).await; + nt.send_user_update(UpdateType::SyncSettings, &user, headers.device.push_uuid.as_ref(), &conn).await; Ok(Json(json!({}))) } @@ -204,11 +204,11 @@ fn config() -> Json { // Client (v2026.2.1): https://github.com/bitwarden/clients/blob/f96380c3138291a028bdd2c7a5fee540d5c98ba5/libs/common/src/enums/feature-flag.enum.ts#L12 // Android (v2026.2.1): https://github.com/bitwarden/android/blob/6902c19c0093fa476bbf74ccaa70c9f14afbb82f/core/src/main/kotlin/com/bitwarden/core/data/manager/model/FlagKey.kt#L31 // iOS (v2026.2.1): https://github.com/bitwarden/ios/blob/cdd9ba1770ca2ffc098d02d12cc3208e3a830454/BitwardenShared/Core/Platform/Models/Enum/FeatureFlag.swift#L7 - let feature_states = parse_experimental_client_feature_flags( + let mut feature_states = parse_experimental_client_feature_flags( &CONFIG.experimental_client_feature_flags(), FeatureFlagFilter::ValidOnly, ); - // Add default feature_states here if needed, currently no features are needed by default. + feature_states.insert("pm-19148-innovation-archive".to_string(), true); Json(json!({ // Note: The clients use this version to handle backwards compatibility concerns diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 318001dc..31311a65 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -907,36 +907,21 @@ async fn _get_org_details( Ok(json!(ciphers_json)) } -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct OrgDomainDetails { - email: String, -} - // Returning a Domain/Organization here allow to prefill it and prevent prompting the user -// So we either return an Org name associated to the user or a dummy value. +// So we return a dummy value, since we only support a single SSO integration, and do not use the response anywhere // In use since `v2025.6.0`, appears to use only the first `organizationIdentifier` -#[post("/organizations/domain/sso/verified", data = "")] -async fn get_org_domain_sso_verified(data: Json, conn: DbConn) -> JsonResult { - let data: OrgDomainDetails = data.into_inner(); - - let identifiers = match Organization::find_org_user_email(&data.email, &conn) - .await - .into_iter() - .map(|o| (o.name, o.uuid.to_string())) - .collect::>() - { - v if !v.is_empty() => v, - _ => vec![(FAKE_SSO_IDENTIFIER.to_string(), FAKE_SSO_IDENTIFIER.to_string())], - }; - +#[post("/organizations/domain/sso/verified")] +fn get_org_domain_sso_verified() -> JsonResult { + // Always return a dummy value, no matter if SSO is enabled or not Ok(Json(json!({ "object": "list", - "data": identifiers.into_iter().map(|(name, identifier)| json!({ - "organizationName": name, // appear unused - "organizationIdentifier": identifier, - "domainName": CONFIG.domain(), // appear unused - })).collect::>() + "data": [{ + "organizationIdentifier": FAKE_SSO_IDENTIFIER, + // These appear to be unused + "organizationName": FAKE_SSO_IDENTIFIER, + "domainName": CONFIG.domain() + }], + "continuationToken": null }))) } @@ -1463,7 +1448,7 @@ async fn _confirm_invite( let save_result = member_to_confirm.save(conn).await; if let Some(user) = User::find_by_uuid(&member_to_confirm.user_uuid, conn).await { - nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await; + nt.send_user_update(UpdateType::SyncOrgKeys, &user, headers.device.push_uuid.as_ref(), conn).await; } save_result @@ -1721,7 +1706,7 @@ async fn _delete_member( .await; if let Some(user) = User::find_by_uuid(&member_to_delete.user_uuid, conn).await { - nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await; + nt.send_user_update(UpdateType::SyncOrgKeys, &user, headers.device.push_uuid.as_ref(), conn).await; } member_to_delete.delete(conn).await @@ -1979,7 +1964,7 @@ async fn list_policies_token(org_id: OrganizationId, token: &str, conn: DbConn) } // Called during the SSO enrollment return the default policy -#[get("/organizations/vaultwarden-dummy-oidc-identifier/policies/master-password", rank = 1)] +#[get("/organizations/00000000-01DC-01DC-01DC-000000000000/policies/master-password", rank = 1)] fn get_dummy_master_password_policy() -> JsonResult { let (enabled, data) = match CONFIG.sso_master_password_policy_value() { Some(policy) if CONFIG.sso_enabled() => (true, policy.to_string()), @@ -3049,10 +3034,7 @@ async fn put_reset_password_enrollment( err!("User to enroll isn't member of required organization", "The user_id and acting user do not match"); } - let Some(mut membership) = Membership::find_confirmed_by_user_and_org(&headers.user.uuid, &org_id, &conn).await - else { - err!("User to enroll isn't member of required organization") - }; + let mut membership = headers.membership; check_reset_password_applicable(&org_id, &conn).await?; diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index 10bf85be..45ead810 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -568,13 +568,13 @@ async fn post_access_file( async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?; - if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { + if crate::storage::is_fs_operator(&operator) { let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token = crate::auth::encode_jwt(&token_claims); Ok(format!("{}/api/sends/{send_id}/{file_id}?t={token}", &host.host)) } else { - Ok(operator.presign_read(&format!("{send_id}/{file_id}"), Duration::from_secs(5 * 60)).await?.uri().to_string()) + Ok(operator.presign_read(&format!("{send_id}/{file_id}"), Duration::from_mins(5)).await?.uri().to_string()) } } diff --git a/src/api/core/two_factor/webauthn.rs b/src/api/core/two_factor/webauthn.rs index 0ec0e30e..ad17ce36 100644 --- a/src/api/core/two_factor/webauthn.rs +++ b/src/api/core/two_factor/webauthn.rs @@ -38,7 +38,7 @@ static WEBAUTHN: LazyLock = LazyLock::new(|| { let webauthn = WebauthnBuilder::new(&rp_id, &rp_origin) .expect("Creating WebauthnBuilder failed") .rp_name(&domain) - .timeout(Duration::from_millis(60000)); + .timeout(Duration::from_mins(1)); webauthn.build().expect("Building Webauthn failed") }); diff --git a/src/api/icons.rs b/src/api/icons.rs index da83d0c4..5c9ed113 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -19,7 +19,7 @@ use svg_hush::{data_url_filter, Filter}; use crate::{ config::PathType, error::Error, - http_client::{get_reqwest_client_builder, should_block_address, CustomHttpClientError}, + http_client::{get_reqwest_client_builder, get_valid_host, should_block_host, CustomHttpClientError}, util::Cached, CONFIG, }; @@ -81,19 +81,19 @@ static ICON_SIZE_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"(?x)(\d+ // The function name `icon_external` is checked in the `on_response` function in `AppHeaders` // It is used to prevent sending a specific header which breaks icon downloads. // If this function needs to be renamed, also adjust the code in `util.rs` -#[get("//icon.png")] -fn icon_external(domain: &str) -> Cached> { - if !is_valid_domain(domain) { - warn!("Invalid domain: {domain}"); +#[get("//icon.png")] +fn icon_external(host: &str) -> Cached> { + let Ok(host) = get_valid_host(host) else { + warn!("Invalid host: {host}"); + return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); + }; + + if should_block_host(&host).is_err() { + warn!("Blocked address: {host}"); return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); } - if should_block_address(domain) { - warn!("Blocked address: {domain}"); - return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); - } - - let url = CONFIG._icon_service_url().replace("{}", domain); + let url = CONFIG._icon_service_url().replace("{}", &host.to_string()); let redir = match CONFIG.icon_redirect_code() { 301 => Some(Redirect::moved(url)), // legacy permanent redirect 302 => Some(Redirect::found(url)), // legacy temporary redirect @@ -107,12 +107,21 @@ fn icon_external(domain: &str) -> Cached> { Cached::ttl(redir, CONFIG.icon_cache_ttl(), true) } -#[get("//icon.png")] -async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { +#[get("//icon.png")] +async fn icon_internal(host: &str) -> Cached<(ContentType, Vec)> { const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); - if !is_valid_domain(domain) { - warn!("Invalid domain: {domain}"); + let Ok(host) = get_valid_host(host) else { + warn!("Invalid host: {host}"); + return Cached::ttl( + (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + true, + ); + }; + + if should_block_host(&host).is_err() { + warn!("Blocked address: {host}"); return Cached::ttl( (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl(), @@ -120,16 +129,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { ); } - if should_block_address(domain) { - warn!("Blocked address: {domain}"); - return Cached::ttl( - (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), - CONFIG.icon_cache_negttl(), - true, - ); - } - - match get_icon(domain).await { + match get_icon(&host.to_string()).await { Some((icon, icon_type)) => { Cached::ttl((ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl(), true) } @@ -137,42 +137,6 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { } } -/// Returns if the domain provided is valid or not. -/// -/// This does some manual checks and makes use of Url to do some basic checking. -/// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. -fn is_valid_domain(domain: &str) -> bool { - const ALLOWED_CHARS: &str = "-."; - - // If parsing the domain fails using Url, it will not work with reqwest. - if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) { - debug!("Domain parse error: '{domain}' - {parse_error:?}"); - return false; - } else if domain.is_empty() - || domain.contains("..") - || domain.starts_with('.') - || domain.starts_with('-') - || domain.ends_with('-') - { - debug!( - "Domain validation error: '{domain}' is either empty, contains '..', starts with an '.', starts or ends with a '-'" - ); - return false; - } else if domain.len() > 255 { - debug!("Domain validation error: '{domain}' exceeds 255 characters"); - return false; - } - - for c in domain.chars() { - if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) { - debug!("Domain validation error: '{domain}' contains an invalid character '{c}'"); - return false; - } - } - - true -} - async fn get_icon(domain: &str) -> Option<(Vec, String)> { let path = format!("{domain}.png"); @@ -367,7 +331,7 @@ async fn get_icon_url(domain: &str) -> Result { tld = domain_parts.next_back().unwrap(), base = domain_parts.next_back().unwrap() ); - if is_valid_domain(&base_domain) { + if get_valid_host(&base_domain).is_ok() { let sslbase = format!("https://{base_domain}"); let httpbase = format!("http://{base_domain}"); debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); @@ -378,7 +342,7 @@ async fn get_icon_url(domain: &str) -> Result { // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. } else if is_ip.is_err() && domain.matches('.').count() < 2 { let www_domain = format!("www.{domain}"); - if is_valid_domain(&www_domain) { + if get_valid_host(&www_domain).is_ok() { let sslwww = format!("https://{www_domain}"); let httpwww = format!("http://{www_domain}"); debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); @@ -532,7 +496,8 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { use data_url::DataUrl; - for icon in icon_result.iconlist.iter().take(5) { + let mut icons = icon_result.iconlist.iter().take(5).peekable(); + while let Some(icon) = icons.next() { if icon.href.starts_with("data:image") { let Ok(datauri) = DataUrl::process(&icon.href) else { continue; @@ -560,11 +525,23 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { _ => debug!("Extracted icon from data:image uri is invalid"), }; } else { - let res = get_page_with_referer(&icon.href, &icon_result.referer).await?; + debug!("Trying {}", icon.href); + // Make sure all icons are checked before returning error + let res = match get_page_with_referer(&icon.href, &icon_result.referer).await { + Ok(r) => r, + Err(e) if icons.peek().is_none() => return Err(e), + Err(e) if CustomHttpClientError::downcast_ref(&e).is_some() => return Err(e), // If blacklisted stop immediately instead of checking the rest of the icons. see explanation and actual handling inside get_icon() + Err(e) => { + warn!("Unable to download icon: {e:?}"); + + // Continue to next icon + continue; + } + }; buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) - // Check if the icon type is allowed, else try an icon from the list. + // Check if the icon type is allowed, else try another icon from the list. icon_type = get_icon_type(&buffer); if icon_type.is_none() { buffer.clear(); @@ -618,14 +595,17 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { None } + // Some details can be found here: + // - https://www.garykessler.net/library/file_sigs_GCK_latest.html + // - https://en.wikipedia.org/wiki/List_of_file_signatures match bytes { - [137, 80, 78, 71, ..] => Some("png"), - [0, 0, 1, 0, ..] => Some("x-icon"), - [82, 73, 70, 70, ..] => Some("webp"), - [255, 216, 255, ..] => Some("jpeg"), - [71, 73, 70, 56, ..] => Some("gif"), - [66, 77, ..] => Some("bmp"), - [60, 115, 118, 103, ..] => Some("svg+xml"), // Normal svg + [137, 80, 78, 71, 13, 10, 26, 10, ..] => Some("png"), + [0, 0, 1, 0, n1, n2, ..] if u16::from_le_bytes([*n1, *n2]) > 0 => Some("x-icon"), // https://en.wikipedia.org/wiki/ICO_(file_format) + [82, 73, 70, 70, _, _, _, _, 87, 69, 66, 80, ..] => Some("webp"), // Only match WebP Images + [255, 216, 255, b, ..] if *b >= 0xC0 => Some("jpeg"), + [71, 73, 70, 56, 55 | 57, 97, ..] => Some("gif"), + [66, 77, _, _, _, _, 0, 0, 0, 0, ..] => Some("bmp"), // https://en.wikipedia.org/wiki/BMP_file_format + [60, 115, 118, 103, ..] => Some("svg+xml"), // Normal svg [60, 63, 120, 109, 108, ..] => check_svg_after_xml_declaration(bytes), // An svg starting with None, } diff --git a/src/api/identity.rs b/src/api/identity.rs index b6d659c6..9f64e560 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -2,6 +2,7 @@ use chrono::Utc; use num_traits::FromPrimitive; use rocket::{ form::{Form, FromForm}, + http::{Cookie, CookieJar, SameSite}, response::Redirect, serde::json::Json, Route, @@ -23,7 +24,8 @@ use crate::{ ApiResult, EmptyResult, JsonResult, }, auth, - auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion}, + auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion, Secure}, + crypto, db::{ models::{ AuthRequest, AuthRequestId, Device, DeviceId, EventType, Invitation, OIDCCodeWrapper, OrganizationApiKey, @@ -41,6 +43,7 @@ pub fn routes() -> Vec { routes![ login, prelogin, + prelogin_password, identity_register, register_verification_email, register_finish, @@ -64,43 +67,43 @@ async fn login( let login_result = match data.grant_type.as_ref() { "refresh_token" => { - _check_is_some(&data.refresh_token, "refresh_token cannot be blank")?; + _check_is_some(data.refresh_token.as_ref(), "refresh_token cannot be blank")?; _refresh_login(data, &conn, &client_header.ip).await } "password" if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO sign-in is required"), "password" => { - _check_is_some(&data.client_id, "client_id cannot be blank")?; - _check_is_some(&data.password, "password cannot be blank")?; - _check_is_some(&data.scope, "scope cannot be blank")?; - _check_is_some(&data.username, "username cannot be blank")?; + _check_is_some(data.client_id.as_ref(), "client_id cannot be blank")?; + _check_is_some(data.password.as_ref(), "password cannot be blank")?; + _check_is_some(data.scope.as_ref(), "scope cannot be blank")?; + _check_is_some(data.username.as_ref(), "username cannot be blank")?; - _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; - _check_is_some(&data.device_name, "device_name cannot be blank")?; - _check_is_some(&data.device_type, "device_type cannot be blank")?; + _check_is_some(data.device_identifier.as_ref(), "device_identifier cannot be blank")?; + _check_is_some(data.device_name.as_ref(), "device_name cannot be blank")?; + _check_is_some(data.device_type.as_ref(), "device_type cannot be blank")?; - _password_login(data, &mut user_id, &conn, &client_header.ip, &client_version).await + _password_login(data, &mut user_id, &conn, &client_header.ip, client_version.as_ref()).await } "client_credentials" => { - _check_is_some(&data.client_id, "client_id cannot be blank")?; - _check_is_some(&data.client_secret, "client_secret cannot be blank")?; - _check_is_some(&data.scope, "scope cannot be blank")?; + _check_is_some(data.client_id.as_ref(), "client_id cannot be blank")?; + _check_is_some(data.client_secret.as_ref(), "client_secret cannot be blank")?; + _check_is_some(data.scope.as_ref(), "scope cannot be blank")?; - _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; - _check_is_some(&data.device_name, "device_name cannot be blank")?; - _check_is_some(&data.device_type, "device_type cannot be blank")?; + _check_is_some(data.device_identifier.as_ref(), "device_identifier cannot be blank")?; + _check_is_some(data.device_name.as_ref(), "device_name cannot be blank")?; + _check_is_some(data.device_type.as_ref(), "device_type cannot be blank")?; _api_key_login(data, &mut user_id, &conn, &client_header.ip).await } "authorization_code" if CONFIG.sso_enabled() => { - _check_is_some(&data.client_id, "client_id cannot be blank")?; - _check_is_some(&data.code, "code cannot be blank")?; - _check_is_some(&data.code_verifier, "code verifier cannot be blank")?; + _check_is_some(data.client_id.as_ref(), "client_id cannot be blank")?; + _check_is_some(data.code.as_ref(), "code cannot be blank")?; + _check_is_some(data.code_verifier.as_ref(), "code verifier cannot be blank")?; - _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; - _check_is_some(&data.device_name, "device_name cannot be blank")?; - _check_is_some(&data.device_type, "device_type cannot be blank")?; + _check_is_some(data.device_identifier.as_ref(), "device_identifier cannot be blank")?; + _check_is_some(data.device_name.as_ref(), "device_name cannot be blank")?; + _check_is_some(data.device_type.as_ref(), "device_type cannot be blank")?; - _sso_login(data, &mut user_id, &conn, &client_header.ip, &client_version).await + _sso_login(data, &mut user_id, &conn, &client_header.ip, client_version.as_ref()).await } "authorization_code" => err!("SSO sign-in is not available"), t => err!("Invalid type", t), @@ -176,7 +179,7 @@ async fn _sso_login( user_id: &mut Option, conn: &DbConn, ip: &ClientIp, - client_version: &Option, + client_version: Option<&ClientVersion>, ) -> JsonResult { AuthMethod::Sso.check_scope(data.scope.as_ref())?; @@ -227,7 +230,33 @@ async fn _sso_login( } ) } - Some((user, None)) => Some((user, None)), + Some((user, None)) => match user_infos.email_verified { + None if !CONFIG.sso_allow_unknown_email_verification() => { + error!( + "Login failure ({}), existing non SSO user ({}) with same email ({}) and email verification status is unknown", + user_infos.identifier, user.uuid, user.email + ); + err_silent!( + "Email verification status is unknown", + ErrorEvent { + event: EventType::UserFailedLogIn + } + ) + } + Some(false) => { + error!( + "Login failure ({}), existing non SSO user ({}) with same email ({}) and email is not verified", + user_infos.identifier, user.uuid, user.email + ); + err_silent!( + "Email is not verified by the SSO provider", + ErrorEvent { + event: EventType::UserFailedLogIn + } + ) + } + _ => Some((user, None)), + }, }, Some((user, sso_user)) => Some((user, Some(sso_user))), }; @@ -319,7 +348,7 @@ async fn _password_login( user_id: &mut Option, conn: &DbConn, ip: &ClientIp, - client_version: &Option, + client_version: Option<&ClientVersion>, ) -> JsonResult { // Validate scope AuthMethod::Password.check_scope(data.scope.as_ref())?; @@ -733,7 +762,7 @@ async fn twofactor_auth( data: &ConnectData, device: &mut Device, ip: &ClientIp, - client_version: &Option, + client_version: Option<&ClientVersion>, conn: &DbConn, ) -> ApiResult> { let twofactors = TwoFactor::find_by_user(&user.uuid, conn).await; @@ -878,7 +907,7 @@ async fn _json_err_twofactor( providers: &[i32], user_id: &UserId, data: &ConnectData, - client_version: &Option, + client_version: Option<&ClientVersion>, conn: &DbConn, ) -> ApiResult { let mut result = json!({ @@ -982,6 +1011,11 @@ async fn prelogin(data: Json, conn: DbConn) -> Json { _prelogin(data, conn).await } +#[post("/accounts/prelogin/password", data = "")] +async fn prelogin_password(data: Json, conn: DbConn) -> Json { + _prelogin(data, conn).await +} + #[post("/accounts/register", data = "")] async fn identity_register(data: Json, conn: DbConn) -> JsonResult { _register(data, false, conn).await @@ -1108,7 +1142,7 @@ struct ConnectData { #[field(name = uncased("code_verifier"))] code_verifier: Option, } -fn _check_is_some(value: &Option, msg: &str) -> EmptyResult { +fn _check_is_some(value: Option<&T>, msg: &str) -> EmptyResult { if value.is_none() { err!(msg) } @@ -1127,13 +1161,16 @@ fn prevalidate() -> JsonResult { } } +const SSO_BINDING_COOKIE: &str = "VW_SSO_BINDING"; + #[get("/connect/oidc-signin?&", rank = 1)] -async fn oidcsignin(code: OIDCCode, state: String, mut conn: DbConn) -> ApiResult { +async fn oidcsignin(code: OIDCCode, state: String, cookies: &CookieJar<'_>, mut conn: DbConn) -> ApiResult { _oidcsignin_redirect( state, OIDCCodeWrapper::Ok { code, }, + cookies, &mut conn, ) .await @@ -1146,6 +1183,7 @@ async fn oidcsignin_error( state: String, error: String, error_description: Option, + cookies: &CookieJar<'_>, mut conn: DbConn, ) -> ApiResult { _oidcsignin_redirect( @@ -1154,6 +1192,7 @@ async fn oidcsignin_error( error, error_description, }, + cookies, &mut conn, ) .await @@ -1165,6 +1204,7 @@ async fn oidcsignin_error( async fn _oidcsignin_redirect( base64_state: String, code_response: OIDCCodeWrapper, + cookies: &CookieJar<'_>, conn: &mut DbConn, ) -> ApiResult { let state = sso::decode_state(&base64_state)?; @@ -1173,6 +1213,18 @@ async fn _oidcsignin_redirect( None => err!(format!("Cannot retrieve sso_auth for {state}")), Some(sso_auth) => sso_auth, }; + + // Browser-binding check + // The cookie was set on /connect/authorize and must come from the same browser that initiated the flow. + let cookie_value = cookies.get(SSO_BINDING_COOKIE).map(|c| c.value().to_string()); + let provided_hash = cookie_value.as_deref().map(|v| crypto::sha256_hex(v.as_bytes())); + match (sso_auth.binding_hash.as_deref(), provided_hash.as_deref()) { + (Some(expected), Some(actual)) if crypto::ct_eq(expected, actual) => {} + _ => err!(format!("SSO session binding mismatch for {state}")), + } + cookies + .remove(Cookie::build(SSO_BINDING_COOKIE).path(format!("{}/identity/connect/", CONFIG.domain_path())).build()); + sso_auth.code_response = Some(code_response); sso_auth.updated_at = Utc::now().naive_utc(); sso_auth.save(conn).await?; @@ -1219,7 +1271,7 @@ struct AuthorizeData { // The `redirect_uri` will change depending of the client (web, android, ios ..) #[get("/connect/authorize?")] -async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { +async fn authorize(data: AuthorizeData, cookies: &CookieJar<'_>, secure: Secure, conn: DbConn) -> ApiResult { let AuthorizeData { client_id, redirect_uri, @@ -1233,7 +1285,23 @@ async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { err!("Unsupported code challenge method"); } - let auth_url = sso::authorize_url(state, code_challenge, &client_id, &redirect_uri, conn).await?; + // Generate browser-binding token. Stored hashed in DB; raw value handed to the browser as a cookie. + // Validated on /connect/oidc-signin + let binding_token = data_encoding::BASE64URL_NOPAD.encode(&crypto::get_random_bytes::<32>()); + let binding_hash = crypto::sha256_hex(binding_token.as_bytes()); + + let auth_url = + sso::authorize_url(state, code_challenge, &client_id, &redirect_uri, Some(binding_hash), conn).await?; + + cookies.add( + Cookie::build((SSO_BINDING_COOKIE, binding_token)) + .path(format!("{}/identity/connect/", CONFIG.domain_path())) + .max_age(time::Duration::seconds(sso::SSO_AUTH_EXPIRATION.num_seconds())) + .same_site(SameSite::Lax) // Lax is needed because the IdP runs on a different FQDN + .http_only(true) + .secure(secure.https) + .build(), + ); Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/notifications.rs b/src/api/notifications.rs index 492fdb19..b1d64472 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -338,7 +338,7 @@ impl WebSocketUsers { } // NOTE: The last modified date needs to be updated before calling these methods - pub async fn send_user_update(&self, ut: UpdateType, user: &User, push_uuid: &Option, conn: &DbConn) { + pub async fn send_user_update(&self, ut: UpdateType, user: &User, push_uuid: Option<&PushId>, conn: &DbConn) { // Skip any processing if both WebSockets and Push are not active if *NOTIFICATIONS_DISABLED { return; diff --git a/src/api/push.rs b/src/api/push.rs index 5000869d..e3ff1383 100644 --- a/src/api/push.rs +++ b/src/api/push.rs @@ -135,7 +135,7 @@ pub async fn register_push_device(device: &mut Device, conn: &DbConn) -> EmptyRe Ok(()) } -pub async fn unregister_push_device(push_id: &Option) -> EmptyResult { +pub async fn unregister_push_device(push_id: Option<&PushId>) -> EmptyResult { if !CONFIG.push_enabled() || push_id.is_none() { return Ok(()); } @@ -206,7 +206,7 @@ pub async fn push_logout(user: &User, acting_device: Option<&Device>, conn: &DbC } } -pub async fn push_user_update(ut: UpdateType, user: &User, push_uuid: &Option, conn: &DbConn) { +pub async fn push_user_update(ut: UpdateType, user: &User, push_uuid: Option<&PushId>, conn: &DbConn) { if Device::check_user_has_push_device(&user.uuid, conn).await { tokio::task::spawn(send_to_push_relay(json!({ "userId": user.uuid, diff --git a/src/auth.rs b/src/auth.rs index 43184369..06bd9c22 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -54,12 +54,8 @@ static PUBLIC_RSA_KEY: OnceLock = OnceLock::new(); pub async fn initialize_keys() -> Result<(), Error> { use std::io::Error; - let rsa_key_filename = std::path::PathBuf::from(CONFIG.private_rsa_key()) - .file_name() - .ok_or_else(|| Error::other("Private RSA key path missing filename"))? - .to_str() - .ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))? - .to_string(); + let rsa_key_filename = crate::storage::file_name(&CONFIG.private_rsa_key()) + .ok_or_else(|| Error::other("Private RSA key path missing filename"))?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::RsaKey).map_err(Error::other)?; diff --git a/src/config.rs b/src/config.rs index 6ff09467..b6d0ce8a 100644 --- a/src/config.rs +++ b/src/config.rs @@ -14,6 +14,7 @@ use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor}; use crate::{ error::Error, + storage, util::{ get_active_web_release, get_env, get_env_bool, is_valid_email, parse_experimental_client_feature_flags, FeatureFlagFilter, @@ -22,18 +23,14 @@ use crate::{ static CONFIG_FILE: LazyLock = LazyLock::new(|| { let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data")); - get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json")) + get_env("CONFIG_FILE").unwrap_or_else(|| storage::join_path(&data_folder, "config.json")) }); -static CONFIG_FILE_PARENT_DIR: LazyLock = LazyLock::new(|| { - let path = std::path::PathBuf::from(&*CONFIG_FILE); - path.parent().unwrap_or(std::path::Path::new("data")).to_str().unwrap_or("data").to_string() -}); +static CONFIG_FILE_PARENT_DIR: LazyLock = + LazyLock::new(|| storage::parent(&CONFIG_FILE).unwrap_or_else(|| "data".to_string())); -static CONFIG_FILENAME: LazyLock = LazyLock::new(|| { - let path = std::path::PathBuf::from(&*CONFIG_FILE); - path.file_name().unwrap_or(std::ffi::OsStr::new("config.json")).to_str().unwrap_or("config.json").to_string() -}); +static CONFIG_FILENAME: LazyLock = + LazyLock::new(|| storage::file_name(&CONFIG_FILE).unwrap_or_else(|| "config.json".to_string())); pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false); @@ -263,7 +260,7 @@ macro_rules! make_config { } async fn from_file() -> Result { - let operator = opendal_operator_for_path(&CONFIG_FILE_PARENT_DIR)?; + let operator = storage::operator_for_path(&CONFIG_FILE_PARENT_DIR)?; let config_bytes = operator.read(&CONFIG_FILENAME).await?; println!("[INFO] Using saved config from `{}` for configuration.\n", *CONFIG_FILE); serde_json::from_slice(&config_bytes.to_vec()).map_err(Into::into) @@ -507,19 +504,19 @@ make_config! { /// Data folder |> Main data folder data_folder: String, false, def, "data".to_string(); /// Database URL - database_url: String, false, auto, |c| format!("{}/db.sqlite3", c.data_folder); + database_url: String, false, auto, |c| storage::join_path(&c.data_folder, "db.sqlite3"); /// Icon cache folder - icon_cache_folder: String, false, auto, |c| format!("{}/icon_cache", c.data_folder); + icon_cache_folder: String, false, auto, |c| storage::join_path(&c.data_folder, "icon_cache"); /// Attachments folder - attachments_folder: String, false, auto, |c| format!("{}/attachments", c.data_folder); + attachments_folder: String, false, auto, |c| storage::join_path(&c.data_folder, "attachments"); /// Sends folder - sends_folder: String, false, auto, |c| format!("{}/sends", c.data_folder); + sends_folder: String, false, auto, |c| storage::join_path(&c.data_folder, "sends"); /// Temp folder |> Used for storing temporary file uploads - tmp_folder: String, false, auto, |c| format!("{}/tmp", c.data_folder); + tmp_folder: String, false, auto, |c| storage::join_path(&c.data_folder, "tmp"); /// Templates folder - templates_folder: String, false, auto, |c| format!("{}/templates", c.data_folder); + templates_folder: String, false, auto, |c| storage::join_path(&c.data_folder, "templates"); /// Session JWT key - rsa_key_filename: String, false, auto, |c| format!("{}/rsa_key", c.data_folder); + rsa_key_filename: String, false, auto, |c| storage::join_path(&c.data_folder, "rsa_key"); /// Web vault folder web_vault_folder: String, false, def, "web-vault/".to_string(); }, @@ -1076,7 +1073,7 @@ fn validate_config(cfg: &ConfigItems, on_update: bool) -> Result<(), Error> { validate_internal_sso_issuer_url(&cfg.sso_authority)?; validate_internal_sso_redirect_url(&cfg.sso_callback_path)?; - validate_sso_master_password_policy(&cfg.sso_master_password_policy)?; + validate_sso_master_password_policy(cfg.sso_master_password_policy.as_ref())?; } if cfg._enable_yubico { @@ -1271,7 +1268,7 @@ fn validate_internal_sso_redirect_url(sso_callback_path: &String) -> Result, + sso_master_password_policy: Option<&String>, ) -> Result, Error> { let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::(mpp)); @@ -1366,90 +1363,6 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls "starttls".to_string() } -fn opendal_operator_for_path(path: &str) -> Result { - // Cache of previously built operators by path - static OPERATORS_BY_PATH: LazyLock> = - LazyLock::new(dashmap::DashMap::new); - - if let Some(operator) = OPERATORS_BY_PATH.get(path) { - return Ok(operator.clone()); - } - - let operator = if path.starts_with("s3://") { - #[cfg(not(s3))] - return Err(opendal::Error::new(opendal::ErrorKind::ConfigInvalid, "S3 support is not enabled").into()); - - #[cfg(s3)] - opendal_s3_operator_for_path(path)? - } else { - let builder = opendal::services::Fs::default().root(path); - opendal::Operator::new(builder)?.finish() - }; - - OPERATORS_BY_PATH.insert(path.to_string(), operator.clone()); - - Ok(operator) -} - -#[cfg(s3)] -fn opendal_s3_operator_for_path(path: &str) -> Result { - use crate::http_client::aws::AwsReqwestConnector; - use aws_config::{default_provider::credentials::DefaultCredentialsChain, provider_config::ProviderConfig}; - - // This is a custom AWS credential loader that uses the official AWS Rust - // SDK config crate to load credentials. This ensures maximum compatibility - // with AWS credential configurations. For example, OpenDAL doesn't support - // AWS SSO temporary credentials yet. - struct OpenDALS3CredentialLoader {} - - #[async_trait] - impl reqsign::AwsCredentialLoad for OpenDALS3CredentialLoader { - async fn load_credential(&self, _client: reqwest::Client) -> anyhow::Result> { - use aws_credential_types::provider::ProvideCredentials as _; - use tokio::sync::OnceCell; - - static DEFAULT_CREDENTIAL_CHAIN: OnceCell = OnceCell::const_new(); - - let chain = DEFAULT_CREDENTIAL_CHAIN - .get_or_init(|| { - let reqwest_client = reqwest::Client::builder().build().unwrap(); - let connector = AwsReqwestConnector { - client: reqwest_client, - }; - - let conf = ProviderConfig::default().with_http_client(connector); - - DefaultCredentialsChain::builder().configure(conf).build() - }) - .await; - - let creds = chain.provide_credentials().await?; - - Ok(Some(reqsign::AwsCredential { - access_key_id: creds.access_key_id().to_string(), - secret_access_key: creds.secret_access_key().to_string(), - session_token: creds.session_token().map(|s| s.to_string()), - expires_in: creds.expiry().map(|expiration| expiration.into()), - })) - } - } - - const OPEN_DAL_S3_CREDENTIAL_LOADER: OpenDALS3CredentialLoader = OpenDALS3CredentialLoader {}; - - let url = Url::parse(path).map_err(|e| format!("Invalid path S3 URL path {path:?}: {e}"))?; - - let bucket = url.host_str().ok_or_else(|| format!("Missing Bucket name in data folder S3 URL {path:?}"))?; - - let builder = opendal::services::S3::default() - .customized_credential_load(Box::new(OPEN_DAL_S3_CREDENTIAL_LOADER)) - .enable_virtual_host_style() - .bucket(bucket) - .root(url.path()) - .default_storage_class("INTELLIGENT_TIERING"); - - Ok(opendal::Operator::new(builder)?.finish()) -} - pub enum PathType { Data, IconCache, @@ -1547,7 +1460,7 @@ impl Config { } //Save to file - let operator = opendal_operator_for_path(&CONFIG_FILE_PARENT_DIR)?; + let operator = storage::operator_for_path(&CONFIG_FILE_PARENT_DIR)?; operator.write(&CONFIG_FILENAME, config_str).await?; Ok(()) @@ -1612,7 +1525,7 @@ impl Config { } pub async fn delete_user_config(&self) -> Result<(), Error> { - let operator = opendal_operator_for_path(&CONFIG_FILE_PARENT_DIR)?; + let operator = storage::operator_for_path(&CONFIG_FILE_PARENT_DIR)?; operator.delete(&CONFIG_FILENAME).await?; // Empty user config @@ -1636,7 +1549,7 @@ impl Config { } pub fn private_rsa_key(&self) -> String { - format!("{}.pem", self.rsa_key_filename()) + storage::with_extension(&self.rsa_key_filename(), "pem") } pub fn mail_enabled(&self) -> bool { let inner = &self.inner.read().unwrap().config; @@ -1677,15 +1590,11 @@ impl Config { PathType::IconCache => self.icon_cache_folder(), PathType::Attachments => self.attachments_folder(), PathType::Sends => self.sends_folder(), - PathType::RsaKey => std::path::Path::new(&self.rsa_key_filename()) - .parent() - .ok_or_else(|| std::io::Error::other("Failed to get directory of RSA key file"))? - .to_str() - .ok_or_else(|| std::io::Error::other("Failed to convert RSA key file directory to UTF-8 string"))? - .to_string(), + PathType::RsaKey => storage::parent(&self.private_rsa_key()) + .ok_or_else(|| std::io::Error::other("Failed to get directory of RSA key file"))?, }; - opendal_operator_for_path(&path) + storage::operator_for_path(&path) } pub fn render_template(&self, name: &str, data: &T) -> Result { @@ -1725,7 +1634,7 @@ impl Config { } pub fn sso_master_password_policy_value(&self) -> Option { - validate_sso_master_password_policy(&self.sso_master_password_policy()).ok().flatten() + validate_sso_master_password_policy(self.sso_master_password_policy().as_ref()).ok().flatten() } pub fn sso_scopes_vec(&self) -> Vec { diff --git a/src/crypto.rs b/src/crypto.rs index 1930f380..46d305a5 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -113,3 +113,10 @@ pub fn ct_eq, U: AsRef<[u8]>>(a: T, b: U) -> bool { use subtle::ConstantTimeEq; a.as_ref().ct_eq(b.as_ref()).into() } + +// +// SHA256 +// +pub fn sha256_hex(data: &[u8]) -> String { + HEXLOWER.encode(digest::digest(&digest::SHA256, data).as_ref()) +} diff --git a/src/db/models/archive.rs b/src/db/models/archive.rs new file mode 100644 index 00000000..f576e7ed --- /dev/null +++ b/src/db/models/archive.rs @@ -0,0 +1,91 @@ +use chrono::NaiveDateTime; +use diesel::prelude::*; + +use super::{CipherId, User, UserId}; +use crate::api::EmptyResult; +use crate::db::schema::archives; +use crate::db::DbConn; +use crate::error::MapResult; + +#[derive(Identifiable, Queryable, Insertable)] +#[diesel(table_name = archives)] +#[diesel(primary_key(user_uuid, cipher_uuid))] +pub struct Archive { + pub user_uuid: UserId, + pub cipher_uuid: CipherId, + pub archived_at: NaiveDateTime, +} + +impl Archive { + // Returns the date the specified cipher was archived + pub async fn get_archived_at(cipher_uuid: &CipherId, user_uuid: &UserId, conn: &DbConn) -> Option { + db_run! { conn: { + archives::table + .filter(archives::cipher_uuid.eq(cipher_uuid)) + .filter(archives::user_uuid.eq(user_uuid)) + .select(archives::archived_at) + .first::(conn).ok() + }} + } + + // Saves (inserts or updates) an archive record with the provided timestamp + pub async fn save( + user_uuid: &UserId, + cipher_uuid: &CipherId, + archived_at: NaiveDateTime, + conn: &DbConn, + ) -> EmptyResult { + User::update_uuid_revision(user_uuid, conn).await; + db_run! { conn: + sqlite, mysql { + diesel::replace_into(archives::table) + .values(( + archives::user_uuid.eq(user_uuid), + archives::cipher_uuid.eq(cipher_uuid), + archives::archived_at.eq(archived_at), + )) + .execute(conn) + .map_res("Error saving archive") + } + postgresql { + diesel::insert_into(archives::table) + .values(( + archives::user_uuid.eq(user_uuid), + archives::cipher_uuid.eq(cipher_uuid), + archives::archived_at.eq(archived_at), + )) + .on_conflict((archives::user_uuid, archives::cipher_uuid)) + .do_update() + .set(archives::archived_at.eq(archived_at)) + .execute(conn) + .map_res("Error saving archive") + } + } + } + + // Deletes an archive record for a specific cipher + pub async fn delete_by_cipher(user_uuid: &UserId, cipher_uuid: &CipherId, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(user_uuid, conn).await; + db_run! { conn: { + diesel::delete( + archives::table + .filter(archives::user_uuid.eq(user_uuid)) + .filter(archives::cipher_uuid.eq(cipher_uuid)) + ) + .execute(conn) + .map_res("Error deleting archive") + }} + } + + /// Return a vec with (cipher_uuid, archived_at) + /// This is used during a full sync so we only need one query for all archive matches + pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<(CipherId, NaiveDateTime)> { + db_run! { conn: { + archives::table + .filter(archives::user_uuid.eq(user_uuid)) + .select((archives::cipher_uuid, archives::archived_at)) + .load::<(CipherId, NaiveDateTime)>(conn) + .unwrap_or_default() + }} + } +} diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 4273c22a..dad081bd 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -46,11 +46,11 @@ impl Attachment { pub async fn get_url(&self, host: &str) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?; - if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { + if crate::storage::is_fs_operator(&operator) { let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone())); Ok(format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id)) } else { - Ok(operator.presign_read(&self.get_file_path(), Duration::from_secs(5 * 60)).await?.uri().to_string()) + Ok(operator.presign_read(&self.get_file_path(), Duration::from_mins(5)).await?.uri().to_string()) } } diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index edc5f8c9..db906179 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -10,8 +10,8 @@ use diesel::prelude::*; use serde_json::Value; use super::{ - Attachment, CollectionCipher, CollectionId, Favorite, FolderCipher, FolderId, Group, Membership, MembershipStatus, - MembershipType, OrganizationId, User, UserId, + Archive, Attachment, CollectionCipher, CollectionId, Favorite, FolderCipher, FolderId, Group, Membership, + MembershipStatus, MembershipType, OrganizationId, User, UserId, }; use crate::api::core::{CipherData, CipherSyncData, CipherSyncType}; use macros::UuidFromParam; @@ -380,6 +380,11 @@ impl Cipher { } else { self.is_favorite(user_uuid, conn).await }); + json_object["archivedDate"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { + cipher_sync_data.cipher_archives.get(&self.uuid).map_or(Value::Null, |d| Value::String(format_date(d))) + } else { + self.get_archived_at(user_uuid, conn).await.map_or(Value::Null, |d| Value::String(format_date(&d))) + }); // These values are true by default, but can be false if the // cipher belongs to a collection or group where the org owner has enabled // the "Read Only" or "Hide Passwords" restrictions for the user. @@ -398,7 +403,7 @@ impl Cipher { 3 => "card", 4 => "identity", 5 => "sshKey", - _ => panic!("Wrong type"), + _ => err!(format!("Cipher {} has an invalid type {}", self.uuid, self.atype)), }; json_object[key] = type_data_json; @@ -742,6 +747,18 @@ impl Cipher { } } + pub async fn get_archived_at(&self, user_uuid: &UserId, conn: &DbConn) -> Option { + Archive::get_archived_at(&self.uuid, user_uuid, conn).await + } + + pub async fn set_archived_at(&self, archived_at: NaiveDateTime, user_uuid: &UserId, conn: &DbConn) -> EmptyResult { + Archive::save(user_uuid, &self.uuid, archived_at, conn).await + } + + pub async fn unarchive(&self, user_uuid: &UserId, conn: &DbConn) -> EmptyResult { + Archive::delete_by_cipher(user_uuid, &self.uuid, conn).await + } + pub async fn get_folder_uuid(&self, user_uuid: &UserId, conn: &DbConn) -> Option { db_run! { conn: { folders_ciphers::table diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 1026574c..7364a2ec 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -25,7 +25,7 @@ pub struct Device { pub user_uuid: UserId, pub name: String, - pub atype: i32, // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Enums/DeviceType.cs + pub atype: i32, // https://github.com/bitwarden/server/blob/8d547dcc280babab70dd4a3c94ced6a34b12dfbf/src/Core/Enums/DeviceType.cs pub push_uuid: Option, pub push_token: Option, @@ -332,6 +332,8 @@ pub enum DeviceType { MacOsCLI = 24, #[display("Linux CLI")] LinuxCLI = 25, + #[display("DuckDuckGo")] + DuckDuckGoBrowser = 26, } impl DeviceType { @@ -363,6 +365,7 @@ impl DeviceType { 23 => DeviceType::WindowsCLI, 24 => DeviceType::MacOsCLI, 25 => DeviceType::LinuxCLI, + 26 => DeviceType::DuckDuckGoBrowser, _ => DeviceType::UnknownBrowser, } } diff --git a/src/db/models/emergency_access.rs b/src/db/models/emergency_access.rs index cf7f5385..5ea334a4 100644 --- a/src/db/models/emergency_access.rs +++ b/src/db/models/emergency_access.rs @@ -85,7 +85,8 @@ impl EmergencyAccess { pub async fn to_json_grantee_details(&self, conn: &DbConn) -> Option { let grantee_user = if let Some(grantee_uuid) = &self.grantee_uuid { User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found.") - } else if let Some(email) = self.email.as_deref() { + } else { + let email = self.email.as_deref()?; match User::find_by_mail(email, conn).await { Some(user) => user, None => { @@ -94,8 +95,6 @@ impl EmergencyAccess { return None; } } - } else { - return None; }; Some(json!({ diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index b4fcf658..2d31259c 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -1,3 +1,4 @@ +mod archive; mod attachment; mod auth_request; mod cipher; @@ -17,6 +18,7 @@ mod two_factor_duo_context; mod two_factor_incomplete; mod user; +pub use self::archive::Archive; pub use self::attachment::{Attachment, AttachmentId}; pub use self::auth_request::{AuthRequest, AuthRequestId}; pub use self::cipher::{Cipher, CipherId, RepromptType}; diff --git a/src/db/models/send.rs b/src/db/models/send.rs index 84802c54..5b6611fa 100644 --- a/src/db/models/send.rs +++ b/src/db/models/send.rs @@ -237,7 +237,7 @@ impl Send { if self.atype == SendType::File as i32 { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?; - operator.remove_all(&self.uuid).await.ok(); + operator.delete_with(&self.uuid).recursive(true).await.ok(); } db_run! { conn: { diff --git a/src/db/models/sso_auth.rs b/src/db/models/sso_auth.rs index fec0433a..2c6eec6d 100644 --- a/src/db/models/sso_auth.rs +++ b/src/db/models/sso_auth.rs @@ -54,11 +54,18 @@ pub struct SsoAuth { pub auth_response: Option, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, + pub binding_hash: Option, } /// Local methods impl SsoAuth { - pub fn new(state: OIDCState, client_challenge: OIDCCodeChallenge, nonce: String, redirect_uri: String) -> Self { + pub fn new( + state: OIDCState, + client_challenge: OIDCCodeChallenge, + nonce: String, + redirect_uri: String, + binding_hash: Option, + ) -> Self { let now = Utc::now().naive_utc(); SsoAuth { @@ -70,6 +77,7 @@ impl SsoAuth { updated_at: now, code_response: None, auth_response: None, + binding_hash, } } } diff --git a/src/db/schema.rs b/src/db/schema.rs index 914b4fe9..bf79ceac 100644 --- a/src/db/schema.rs +++ b/src/db/schema.rs @@ -265,6 +265,7 @@ table! { auth_response -> Nullable, created_at -> Timestamp, updated_at -> Timestamp, + binding_hash -> Nullable, } } @@ -341,6 +342,16 @@ table! { } } +table! { + archives (user_uuid, cipher_uuid) { + user_uuid -> Text, + cipher_uuid -> Text, + archived_at -> Timestamp, + } +} + +joinable!(archives -> users (user_uuid)); +joinable!(archives -> ciphers (cipher_uuid)); joinable!(attachments -> ciphers (cipher_uuid)); joinable!(ciphers -> organizations (organization_uuid)); joinable!(ciphers -> users (user_uuid)); @@ -372,6 +383,7 @@ joinable!(auth_requests -> users (user_uuid)); joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( + archives, attachments, ciphers, ciphers_collections, diff --git a/src/http_client.rs b/src/http_client.rs index df52e2bc..d39b884d 100644 --- a/src/http_client.rs +++ b/src/http_client.rs @@ -1,7 +1,6 @@ use std::{ fmt, net::{IpAddr, SocketAddr}, - str::FromStr, sync::{Arc, LazyLock, Mutex}, time::Duration, }; @@ -59,16 +58,6 @@ pub fn get_reqwest_client_builder() -> ClientBuilder { .timeout(Duration::from_secs(10)) } -pub fn should_block_address(domain_or_ip: &str) -> bool { - if let Ok(ip) = IpAddr::from_str(domain_or_ip) { - if should_block_ip(ip) { - return true; - } - } - - should_block_address_regex(domain_or_ip) -} - fn should_block_ip(ip: IpAddr) -> bool { if !CONFIG.http_request_block_non_global_ips() { return false; @@ -100,11 +89,54 @@ fn should_block_address_regex(domain_or_ip: &str) -> bool { is_match } -fn should_block_host(host: &Host<&str>) -> Result<(), CustomHttpClientError> { +pub fn get_valid_host(host: &str) -> Result { + let Ok(host) = Host::parse(host) else { + return Err(CustomHttpClientError::Invalid { + domain: host.to_string(), + }); + }; + + // Some extra checks to validate hosts + match host { + Host::Domain(ref domain) => { + // Host::parse() does not verify length or all possible invalid characters + // We do some extra checks here to prevent issues + if domain.len() > 253 { + debug!("Domain validation error: '{domain}' exceeds 253 characters"); + return Err(CustomHttpClientError::Invalid { + domain: host.to_string(), + }); + } + if !domain.split('.').all(|label| { + !label.is_empty() + // Labels can't be longer than 63 chars + && label.len() <= 63 + // Labels are not allowed to start or end with a hyphen `-` + && !label.starts_with('-') + && !label.ends_with('-') + // Only ASCII Alphanumeric characters are allowed + // We already received a punycoded domain back, so no unicode should exists here + && label.chars().all(|c| c.is_ascii_alphanumeric() || c == '-') + }) { + debug!( + "Domain validation error: '{domain}' labels contain invalid characters or exceed the maximum length" + ); + return Err(CustomHttpClientError::Invalid { + domain: host.to_string(), + }); + } + } + Host::Ipv4(_) | Host::Ipv6(_) => {} + } + + Ok(host) +} + +pub fn should_block_host>(host: &Host) -> Result<(), CustomHttpClientError> { let (ip, host_str): (Option, String) = match host { Host::Ipv4(ip) => (Some(IpAddr::V4(*ip)), ip.to_string()), Host::Ipv6(ip) => (Some(IpAddr::V6(*ip)), ip.to_string()), - Host::Domain(d) => (None, (*d).to_string()), + Host::Domain(d) => (None, d.as_ref().to_string()), }; if let Some(ip) = ip { @@ -134,6 +166,9 @@ pub enum CustomHttpClientError { domain: Option, ip: IpAddr, }, + Invalid { + domain: String, + }, } impl CustomHttpClientError { @@ -155,7 +190,7 @@ impl fmt::Display for CustomHttpClientError { match self { Self::Blocked { domain, - } => write!(f, "Blocked domain: {domain} matched HTTP_REQUEST_BLOCK_REGEX"), + } => write!(f, "Blocked domain: '{domain}' matched HTTP_REQUEST_BLOCK_REGEX"), Self::NonGlobalIp { domain: Some(domain), ip, @@ -163,7 +198,10 @@ impl fmt::Display for CustomHttpClientError { Self::NonGlobalIp { domain: None, ip, - } => write!(f, "IP {ip} is not a global IP!"), + } => write!(f, "IP '{ip}' is not a global IP!"), + Self::Invalid { + domain, + } => write!(f, "Invalid host: '{domain}' contains invalid characters or exceeds the maximum length"), } } } @@ -217,7 +255,13 @@ impl CustomDnsResolver { } fn pre_resolve(name: &str) -> Result<(), CustomHttpClientError> { - if should_block_address(name) { + let Ok(host) = get_valid_host(name) else { + return Err(CustomHttpClientError::Invalid { + domain: name.to_string(), + }); + }; + + if should_block_host(&host).is_err() { return Err(CustomHttpClientError::Blocked { domain: name.to_string(), }); @@ -308,3 +352,209 @@ pub(crate) mod aws { } } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::util::is_global_hardcoded; + use std::net::Ipv4Addr; + use url::Host; + + // === + // IPv4 numeric-format normalization + fn parse_to_ip(s: &str) -> Option { + match Host::parse(s).ok()? { + Host::Ipv4(v4) => Some(IpAddr::V4(v4)), + Host::Ipv6(v6) => Some(IpAddr::V6(v6)), + Host::Domain(_) => None, + } + } + + #[test] + fn dotted_decimal_loopback_normalizes() { + let ip = parse_to_ip("127.0.0.1").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn single_decimal_loopback_normalizes() { + // 127.0.0.1 == 2130706433 + let ip = parse_to_ip("2130706433").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn hex_loopback_normalizes() { + let ip = parse_to_ip("0x7f000001").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn dotted_hex_loopback_normalizes() { + let ip = parse_to_ip("0x7f.0.0.1").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn octal_loopback_normalizes() { + // 017700000001 == 127.0.0.1 + let ip = parse_to_ip("017700000001").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn dotted_octal_loopback_normalizes() { + let ip = parse_to_ip("0177.0.0.01").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn aws_metadata_decimal_blocked() { + // 169.254.169.254 == 2852039166 (link-local, AWS IMDS) + let ip = parse_to_ip("2852039166").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(169, 254, 169, 254))); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn rfc1918_hex_blocked() { + // 10.0.0.1 + let ip = parse_to_ip("0x0a000001").unwrap(); + assert!(!is_global_hardcoded(ip)); + } + + #[test] + fn public_ip_decimal_allowed() { + // 8.8.8.8 == 134744072 + let ip = parse_to_ip("134744072").unwrap(); + assert_eq!(ip, IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8))); + assert!(is_global_hardcoded(ip)); + } + + // === + // get_valid_host integration: numeric forms become Host::Ipv4 + #[test] + fn get_valid_host_normalizes_decimal_int() { + let h = get_valid_host("2130706433").expect("valid"); + assert!(matches!(h, Host::Ipv4(ip) if ip == Ipv4Addr::new(127, 0, 0, 1))); + } + + #[test] + fn get_valid_host_normalizes_hex() { + let h = get_valid_host("0x7f000001").expect("valid"); + assert!(matches!(h, Host::Ipv4(ip) if ip == Ipv4Addr::new(127, 0, 0, 1))); + } + + #[test] + fn get_valid_host_normalizes_octal() { + let h = get_valid_host("017700000001").expect("valid"); + assert!(matches!(h, Host::Ipv4(ip) if ip == Ipv4Addr::new(127, 0, 0, 1))); + } + + // === + // IPv6 formats + #[test] + fn ipv6_loopback_blocked() { + let h = get_valid_host("[::1]").expect("valid"); + let Host::Ipv6(ip) = h else { + panic!("expected v6") + }; + assert!(!is_global_hardcoded(IpAddr::V6(ip))); + } + + #[test] + fn ipv4_mapped_in_ipv6_loopback_blocked() { + // ::ffff:127.0.0.1 — v4-mapped form; is_global_hardcoded blocks via ::ffff:0:0/96 + let h = get_valid_host("[::ffff:127.0.0.1]").expect("valid"); + let Host::Ipv6(ip) = h else { + panic!("expected v6") + }; + assert!(!is_global_hardcoded(IpAddr::V6(ip))); + } + + #[test] + fn ipv6_unique_local_blocked() { + let h = get_valid_host("[fc00::1]").expect("valid"); + let Host::Ipv6(ip) = h else { + panic!("expected v6") + }; + assert!(!is_global_hardcoded(IpAddr::V6(ip))); + } + + // === + // Punycode / IDN + #[test] + fn punycode_passthrough() { + let h = get_valid_host("xn--deadbeafcaf-lbb.test").expect("valid"); + match h { + Host::Domain(d) => assert_eq!(d, "xn--deadbeafcaf-lbb.test"), + _ => panic!("expected domain"), + } + } + + #[test] + fn idn_unicode_gets_punycoded() { + let h = get_valid_host("deadbeafcafé.test").expect("valid"); + match h { + Host::Domain(d) => assert_eq!(d, "xn--deadbeafcaf-lbb.test"), + _ => panic!("expected domain"), + } + } + + #[test] + fn idn_unicode_gets_punycoded_tld() { + let h = get_valid_host("deadbeaf.café").expect("valid"); + match h { + Host::Domain(d) => assert_eq!(d, "deadbeaf.xn--caf-dma"), + _ => panic!("expected domain"), + } + } + + #[test] + fn idn_emoji_gets_punycoded() { + let h = get_valid_host("xn--t88h.test").expect("valid"); // 🛡️.test + match h { + Host::Domain(d) => assert_eq!(d, "xn--t88h.test"), + _ => panic!("expected domain"), + } + } + + #[test] + fn idn_unicode_to_punycode_roundtrip() { + let from_unicode = get_valid_host("🛡️.test").expect("valid"); + let from_puny = get_valid_host("xn--t88h.test").expect("valid"); + match (from_unicode, from_puny) { + (Host::Domain(a), Host::Domain(b)) => assert_eq!(a, b), + _ => panic!("expected domains"), + } + } + + #[test] + fn invalid_punycode_rejected() { + // bare invalid punycode + assert!(get_valid_host("xn--").is_err()); + } + + #[test] + fn underscore_in_label_rejected() { + assert!(get_valid_host("dead_beaf.cafe").is_err()); + } + + #[test] + fn label_too_long_rejected() { + let label = "a".repeat(64); + assert!(get_valid_host(&format!("{label}.test")).is_err()); + } + + #[test] + fn domain_too_long_rejected() { + let big = "a.".repeat(130) + "test"; // > 253 + assert!(get_valid_host(&big).is_err()); + } +} diff --git a/src/main.rs b/src/main.rs index 60c5a593..4ffeacc1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,6 +57,7 @@ mod mail; mod ratelimit; mod sso; mod sso_client; +mod storage; mod util; use crate::api::core::two_factor::duo_oidc::purge_duo_contexts; @@ -70,6 +71,7 @@ pub use util::is_running_in_container; #[rocket::main] async fn main() -> Result<(), Error> { + install_rustls_crypto_provider(); parse_args(); launch_info(); @@ -202,6 +204,14 @@ fn parse_args() { } } +fn install_rustls_crypto_provider() { + if rustls::crypto::CryptoProvider::get_default().is_none() { + rustls::crypto::ring::default_provider() + .install_default() + .expect("failed to install rustls ring crypto provider"); + } +} + fn launch_info() { println!( "\ diff --git a/src/sso.rs b/src/sso.rs index 2f56f3a6..7505f84f 100644 --- a/src/sso.rs +++ b/src/sso.rs @@ -17,7 +17,7 @@ use crate::{ CONFIG, }; -pub static FAKE_SSO_IDENTIFIER: &str = "vaultwarden-dummy-oidc-identifier"; +pub static FAKE_SSO_IDENTIFIER: &str = "00000000-01DC-01DC-01DC-000000000000"; static SSO_JWT_ISSUER: LazyLock = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin())); @@ -188,6 +188,7 @@ pub async fn authorize_url( client_challenge: OIDCCodeChallenge, client_id: &str, raw_redirect_uri: &str, + binding_hash: Option, conn: DbConn, ) -> ApiResult { let redirect_uri = match client_id { @@ -203,7 +204,7 @@ pub async fn authorize_url( _ => err!(format!("Unsupported client {client_id}")), }; - let (auth_url, sso_auth) = Client::authorize_url(state, client_challenge, redirect_uri).await?; + let (auth_url, sso_auth) = Client::authorize_url(state, client_challenge, redirect_uri, binding_hash).await?; sso_auth.save(&conn).await?; Ok(auth_url) } @@ -283,7 +284,7 @@ pub async fn exchange_code( let email_verified = id_claims.email_verified().or(user_info.email_verified()); - let user_name = id_claims.preferred_username().map(|un| un.to_string()); + let user_name = id_claims.preferred_username().or(user_info.preferred_username()).map(|un| un.to_string()); let refresh_token = token_response.refresh_token().map(|t| t.secret()); if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) { diff --git a/src/sso_client.rs b/src/sso_client.rs index 6204ab48..68e171c6 100644 --- a/src/sso_client.rs +++ b/src/sso_client.rs @@ -1,12 +1,13 @@ -use std::{borrow::Cow, sync::LazyLock, time::Duration}; +use std::{borrow::Cow, future::Future, pin::Pin, sync::LazyLock, time::Duration}; -use openidconnect::{core::*, reqwest, *}; +use openidconnect::{core::*, *}; use regex::Regex; use url::Url; use crate::{ api::{ApiResult, EmptyResult}, db::models::SsoAuth, + http_client::get_reqwest_client_builder, sso::{OIDCCode, OIDCCodeChallenge, OIDCCodeVerifier, OIDCState}, CONFIG, }; @@ -46,10 +47,42 @@ pub type RefreshTokenResponse = (Option, String, Option); #[derive(Clone)] pub struct Client { - pub http_client: reqwest::Client, + pub http_client: OidcHttpClient, pub core_client: CustomClient, } +#[derive(Clone)] +pub struct OidcHttpClient { + client: reqwest::Client, +} + +impl OidcHttpClient { + fn new() -> Result { + get_reqwest_client_builder().redirect(reqwest::redirect::Policy::none()).build().map(|client| Self { + client, + }) + } +} + +impl<'c> AsyncHttpClient<'c> for OidcHttpClient { + type Error = HttpClientError; + type Future = Pin> + Send + Sync + 'c>>; + + fn call(&'c self, request: HttpRequest) -> Self::Future { + Box::pin(async move { + let response = self.client.execute(request.try_into().map_err(Box::new)?).await.map_err(Box::new)?; + + let mut builder = http::Response::builder().status(response.status()).version(response.version()); + + for (name, value) in response.headers() { + builder = builder.header(name, value); + } + + builder.body(response.bytes().await.map_err(Box::new)?.to_vec()).map_err(HttpClientError::Http) + }) + } +} + impl Client { // Call the OpenId discovery endpoint to retrieve configuration async fn _get_client() -> ApiResult { @@ -58,7 +91,7 @@ impl Client { let issuer_url = CONFIG.sso_issuer_url()?; - let http_client = match reqwest::ClientBuilder::new().redirect(reqwest::redirect::Policy::none()).build() { + let http_client = match OidcHttpClient::new() { Err(err) => err!(format!("Failed to build http client: {err}")), Ok(client) => client, }; @@ -117,6 +150,7 @@ impl Client { state: OIDCState, client_challenge: OIDCCodeChallenge, redirect_uri: String, + binding_hash: Option, ) -> ApiResult<(Url, SsoAuth)> { let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); let base64_state = data_encoding::BASE64.encode(state.to_string().as_bytes()); @@ -139,7 +173,7 @@ impl Client { } let (auth_url, _, nonce) = auth_req.url(); - Ok((auth_url, SsoAuth::new(state, client_challenge, nonce.secret().clone(), redirect_uri))) + Ok((auth_url, SsoAuth::new(state, client_challenge, nonce.secret().clone(), redirect_uri, binding_hash))) } pub async fn exchange_code( diff --git a/src/static/scripts/admin.css b/src/static/scripts/admin.css index 0df56771..c7c6f443 100644 --- a/src/static/scripts/admin.css +++ b/src/static/scripts/admin.css @@ -1,6 +1,17 @@ body { padding-top: 75px; } +/* Some extra width's for the main layout */ +@media (min-width: 1600px) { + .container-xxl { + max-width: 1520px; + } +} +@media (min-width: 1800px) { + .container-xxl { + max-width: 1720px; + } +} img { width: 48px; height: 48px; @@ -38,8 +49,8 @@ img { max-width: 130px; } #users-table .vw-actions, #orgs-table .vw-actions { - min-width: 155px; - max-width: 160px; + min-width: 170px; + max-width: 180px; } #users-table .vw-org-cell { max-height: 120px; diff --git a/src/static/templates/admin/base.hbs b/src/static/templates/admin/base.hbs index f56d8262..e1dcacb5 100644 --- a/src/static/templates/admin/base.hbs +++ b/src/static/templates/admin/base.hbs @@ -27,7 +27,7 @@