diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 215ee2015c..2bd1c235eb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -261,6 +261,7 @@ jobs: run: | npm ci npm install -D @playwright/test + sudo npx playwright install-deps npx playwright install npx playwright test - uses: actions/upload-artifact@v4 diff --git a/Cargo.lock b/Cargo.lock index 1d2b8a57f3..594d28618a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1584,7 +1584,7 @@ dependencies = [ name = "barebones-template-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -1670,7 +1670,7 @@ version = "0.0.0" dependencies = [ "bevy", "color", - "dioxus", + "dioxus 0.7.1", "dioxus-native", "tracing-subscriber", "wgpu 26.0.1", @@ -3209,7 +3209,7 @@ name = "bluetooth-scanner" version = "0.1.1" dependencies = [ "btleplug", - "dioxus", + "dioxus 0.7.1", "futures", "futures-channel", "tokio", @@ -4258,9 +4258,19 @@ dependencies = [ [[package]] name = "const-serialize" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd339aa356cc6452308fad2ee56623f900a8e68bc0ab9360a0ddb8270e5640c8" +dependencies = [ + "const-serialize-macro 0.7.1", + "serde", +] + +[[package]] +name = "const-serialize" +version = "0.8.0" dependencies = [ - "const-serialize", - "const-serialize-macro", + "const-serialize 0.8.0", + "const-serialize-macro 0.8.0", "rand 0.9.2", "serde", ] @@ -4268,6 +4278,17 @@ dependencies = [ [[package]] name = "const-serialize-macro" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "797d158acb331e2a89d696343a27cd39bf7e36aaef33ba4799a5ef1526e24861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "const-serialize-macro" +version = "0.8.0" dependencies = [ "proc-macro2", "quote", @@ -5346,36 +5367,36 @@ name = "dioxus" version = "0.7.1" dependencies = [ "criterion", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-config-macro", - "dioxus-config-macros", - "dioxus-core", - "dioxus-core-macro", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-config-macro 0.7.1", + "dioxus-config-macros 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", "dioxus-desktop", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack", - "dioxus-fullstack-macro", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack 0.7.1", + "dioxus-fullstack-macro 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-liveview", - "dioxus-logger", + "dioxus-logger 0.7.1", "dioxus-native", "dioxus-router", "dioxus-server", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", - "dioxus-stores", - "dioxus-web", + "dioxus-stores 0.7.1", + "dioxus-web 0.7.1", "env_logger 0.11.8", "futures-util", - "manganis", + "manganis 0.7.1", "rand 0.9.2", "serde", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tracing", @@ -5383,12 +5404,39 @@ dependencies = [ "wasm-splitter", ] +[[package]] +name = "dioxus" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f76e820919058a685a1fdbb2ef4888c73ac77d623c39a7dfde2aa812947246be" +dependencies = [ + "dioxus-asset-resolver 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-config-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-config-macros 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-stores 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-web 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "manganis 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "warnings", +] + [[package]] name = "dioxus-asset-resolver" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-cli-config", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", "http 1.3.1", "infer", "jni 0.21.1", @@ -5404,11 +5452,32 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-asset-resolver" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6a124667ce5565c39fe2f33af45c21fe459c5bfcf7a8074ad12c9e9da5817c" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "http 1.3.1", + "infer", + "jni 0.21.1", + "js-sys", + "ndk 0.9.0", + "ndk-context", + "ndk-sys 0.6.0+11769913", + "percent-encoding", + "thiserror 2.0.17", + "tokio", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "dioxus-autofmt" version = "0.7.1" dependencies = [ - "dioxus-rsx", + "dioxus-rsx 0.7.1", "pretty_assertions", "prettyplease", "proc-macro2", @@ -5452,24 +5521,25 @@ dependencies = [ "clap", "console 0.16.1", "console-subscriber", - "const-serialize", + "const-serialize 0.7.1", + "const-serialize 0.8.0", "convert_case 0.8.0", "crossterm 0.29.0", "ctrlc", "depinfo", "dioxus-autofmt", "dioxus-check", - "dioxus-cli-config", + "dioxus-cli-config 0.7.1", "dioxus-cli-opt", "dioxus-cli-telemetry", "dioxus-component-manifest", - "dioxus-core", - "dioxus-core-types", - "dioxus-devtools-types", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-devtools-types 0.7.1", "dioxus-dx-wire-format", - "dioxus-fullstack", - "dioxus-html", - "dioxus-rsx", + "dioxus-fullstack 0.7.1", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "dioxus-rsx-hotreload", "dioxus-rsx-rosetta", "dircpy", @@ -5495,8 +5565,9 @@ dependencies = [ "krates", "local-ip-address", "log", - "manganis", - "manganis-core", + "manganis 0.7.1", + "manganis-core 0.7.1", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "memmap", "memoize", "notify", @@ -5522,7 +5593,7 @@ dependencies = [ "serde_json5", "shell-words", "strum 0.27.2", - "subsecond-types", + "subsecond-types 0.7.1", "syn 2.0.108", "tar", "target-lexicon 0.13.3", @@ -5560,6 +5631,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "dioxus-cli-config" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "babc8eaf90379352bc4820830749fd231feb9312433d4094b4e7b79d912b3d96" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "dioxus-cli-opt" version = "0.7.1" @@ -5568,13 +5648,13 @@ dependencies = [ "browserslist-rs 0.19.0", "built 0.8.0", "codemap", - "const-serialize", + "const-serialize 0.8.0", "grass", "image", "imagequant", "lightningcss", - "manganis", - "manganis-core", + "manganis 0.7.1", + "manganis-core 0.7.1", "mozjpeg", "object 0.37.3", "png", @@ -5612,7 +5692,16 @@ dependencies = [ name = "dioxus-cli-optimization-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", + "serde", + "serde_json", +] + +[[package]] +name = "dioxus-cli-optimization-test-07" +version = "0.0.1" +dependencies = [ + "dioxus 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", ] @@ -5646,23 +5735,39 @@ dependencies = [ "quote", ] +[[package]] +name = "dioxus-config-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30018b5b95567cee42febbb444d5e5e47dbe3e91fa6e44b9e571edad0184cd36" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "dioxus-config-macros" version = "0.7.1" +[[package]] +name = "dioxus-config-macros" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a16b25f8761253ed5ffa4d0789376310fbbc1bbaa8190fc2f374db82c6285a1" + [[package]] name = "dioxus-core" version = "0.7.1" dependencies = [ "anyhow", "const_format", - "dioxus", - "dioxus-core-types", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "dioxus-ssr", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "longest-increasing-subsequence", "pretty_assertions", "rand 0.9.2", @@ -5672,7 +5777,7 @@ dependencies = [ "serde", "slab", "slotmap", - "subsecond", + "subsecond 0.7.1", "sysinfo 0.35.2", "tokio", "tracing", @@ -5682,14 +5787,37 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-core" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75468d08468919f783b0f7ee826802f4e8e66c5b5a0451245d861c211ca18216" +dependencies = [ + "anyhow", + "const_format", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "longest-increasing-subsequence", + "rustc-hash 2.1.1", + "rustversion", + "serde", + "slab", + "slotmap", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "warnings", +] + [[package]] name = "dioxus-core-macro" version = "0.7.1" dependencies = [ "convert_case 0.8.0", - "dioxus", - "dioxus-html", - "dioxus-rsx", + "dioxus 0.7.1", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "proc-macro2", "quote", "rustversion", @@ -5698,9 +5826,28 @@ dependencies = [ "trybuild", ] +[[package]] +name = "dioxus-core-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f145abdb2a3f858456cb4382390863cf0398c228ad0733618f48891da7687be3" +dependencies = [ + "convert_case 0.8.0", + "dioxus-rsx 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "dioxus-core-types" +version = "0.7.1" + [[package]] name = "dioxus-core-types" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f5ecf5a51de06d78aded3b5f7516a258f53117cba718bc5706317a3c04c844" [[package]] name = "dioxus-desktop" @@ -5711,28 +5858,28 @@ dependencies = [ "bytes", "cocoa", "core-foundation 0.10.1", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-signals 0.7.1", "dioxus-ssr", "dunce", "exitcode", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "global-hotkey", "http-range", "infer", "jni 0.21.1", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "libc", "muda", "ndk 0.9.0", @@ -5765,15 +5912,15 @@ dependencies = [ name = "dioxus-devtools" version = "0.7.1" dependencies = [ - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools-types", - "dioxus-signals", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools-types 0.7.1", + "dioxus-signals 0.7.1", "futures-channel", "futures-util", "serde", "serde_json", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tracing", @@ -5781,28 +5928,77 @@ dependencies = [ "warnings", ] +[[package]] +name = "dioxus-devtools" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eb2c5019b7fa72e8e6b21ba99e9263bd390c9a30bbf09793b72f4b57ed7c3d7" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde", + "serde_json", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "thiserror 2.0.17", + "tracing", + "tungstenite 0.27.0", + "warnings", +] + [[package]] name = "dioxus-devtools-types" version = "0.7.1" dependencies = [ - "dioxus-core", + "dioxus-core 0.7.1", "serde", - "subsecond-types", + "subsecond-types 0.7.1", +] + +[[package]] +name = "dioxus-devtools-types" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b007cec5b8548281921c4e4678926a3936e9d6757e951380685cc6121a6f974" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde", + "subsecond-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "dioxus-document" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-core-macro", - "dioxus-core-types", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "futures-channel", "futures-util", - "generational-box", - "lazy-js-bundle", + "generational-box 0.7.1", + "lazy-js-bundle 0.7.1", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "dioxus-document" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c55bcae9aaf150d4a141c61b3826da5a7ac23dfff09726568525cd46336e9a2" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "tracing", @@ -5813,10 +6009,10 @@ name = "dioxus-dx-wire-format" version = "0.7.1" dependencies = [ "cargo_metadata", - "manganis-core", + "manganis-core 0.7.1", "serde", "serde_json", - "subsecond-types", + "subsecond-types 0.7.1", ] [[package]] @@ -5829,10 +6025,10 @@ dependencies = [ "base64 0.22.1", "bytes", "ciborium", - "dioxus", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-html 0.7.1", "dioxus-ssr", - "dioxus-stores", + "dioxus-stores 0.7.1", "form_urlencoded", "futures", "futures-util", @@ -5884,16 +6080,16 @@ dependencies = [ "const_format", "content_disposition", "derive_more 2.0.1", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-fullstack-core", - "dioxus-fullstack-macro", - "dioxus-hooks", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-fullstack-macro 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-server", - "dioxus-signals", + "dioxus-signals 0.7.1", "form_urlencoded", "futures", "futures-channel", @@ -5934,24 +6130,109 @@ dependencies = [ "xxhash-rust", ] +[[package]] +name = "dioxus-fullstack" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff04cef82d6639eb15186f626298645dbd92978bf66dc3efd2e5984a2ff4a1ff" +dependencies = [ + "anyhow", + "async-stream", + "async-tungstenite", + "axum 0.8.6", + "axum-core 0.5.5", + "base64 0.22.1", + "bytes", + "ciborium", + "const-str 0.7.0", + "const_format", + "content_disposition", + "derive_more 2.0.1", + "dioxus-asset-resolver 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "form_urlencoded", + "futures", + "futures-channel", + "futures-util", + "gloo-net", + "headers", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "js-sys", + "mime", + "pin-project", + "reqwest 0.12.24", + "rustversion", + "send_wrapper", + "serde", + "serde_json", + "serde_qs", + "serde_urlencoded", + "thiserror 2.0.17", + "tokio-util", + "tracing", + "tungstenite 0.27.0", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "xxhash-rust", +] + +[[package]] +name = "dioxus-fullstack-core" +version = "0.7.1" +dependencies = [ + "anyhow", + "axum-core 0.5.5", + "base64 0.22.1", + "ciborium", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-signals 0.7.1", + "futures-channel", + "futures-util", + "generational-box 0.7.1", + "http 1.3.1", + "inventory", + "parking_lot", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tracing", +] + [[package]] name = "dioxus-fullstack-core" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41281c7cd4d311a50933256e19a5d91d0d950ad350dd3232bd4321fdd3a59fb0" dependencies = [ "anyhow", "axum-core 0.5.5", "base64 0.22.1", "ciborium", - "dioxus", - "dioxus-core", - "dioxus-document", - "dioxus-fullstack", - "dioxus-history", - "dioxus-hooks", - "dioxus-signals", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "http 1.3.1", "inventory", "parking_lot", @@ -5969,7 +6250,7 @@ dependencies = [ "axum 0.8.6", "const_format", "convert_case 0.8.0", - "dioxus", + "dioxus 0.7.1", "proc-macro2", "quote", "serde", @@ -5978,12 +6259,36 @@ dependencies = [ "xxhash-rust", ] +[[package]] +name = "dioxus-fullstack-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae73023c8b8fee2692fc50a28063336f0b6930e86727e30c1047c92d30805b49" +dependencies = [ + "const_format", + "convert_case 0.8.0", + "proc-macro2", + "quote", + "syn 2.0.108", + "xxhash-rust", +] + [[package]] name = "dioxus-history" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "tracing", +] + +[[package]] +name = "dioxus-history" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dac73657da5c7a20629482d774b52f4a4f7cb57a520649f1d855d4073e809c98" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", ] @@ -5991,12 +6296,12 @@ dependencies = [ name = "dioxus-hooks" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-signals 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "reqwest 0.12.24", "rustversion", "slab", @@ -6006,29 +6311,46 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-hooks" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffd445f16d64939e06cd71a1c63a665f383fda6b7882f4c6f8f1bd6efca2046" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustversion", + "slab", + "tracing", + "warnings", +] + [[package]] name = "dioxus-html" version = "0.7.1" dependencies = [ "async-trait", "bytes", - "dioxus", - "dioxus-core", - "dioxus-core-macro", - "dioxus-core-types", - "dioxus-hooks", - "dioxus-html-internal-macro", - "dioxus-rsx", - "dioxus-web", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html-internal-macro 0.7.1", + "dioxus-rsx 0.7.1", + "dioxus-web 0.7.1", "enumset", "euclid", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "js-sys", "keyboard-types", - "lazy-js-bundle", - "manganis", + "lazy-js-bundle 0.7.1", + "manganis 0.7.1", "rustversion", "serde", "serde_json", @@ -6037,6 +6359,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "dioxus-html" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f407fc73a9554a644872fcccc9faf762acad8f45158e3d67e42ab8dd42f4586" +dependencies = [ + "async-trait", + "bytes", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html-internal-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "enumset", + "euclid", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "keyboard-types", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustversion", + "tracing", +] + [[package]] name = "dioxus-html-internal-macro" version = "0.7.1" @@ -6048,15 +6394,27 @@ dependencies = [ "trybuild", ] +[[package]] +name = "dioxus-html-internal-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a968aae4bc92de87cbac3d0d043803b25a7c62c187841e61adcc9b49917c2b2a" +dependencies = [ + "convert_case 0.8.0", + "proc-macro2", + "quote", + "syn 2.0.108", +] + [[package]] name = "dioxus-interpreter-js" version = "0.7.1" dependencies = [ - "dioxus-core", - "dioxus-core-types", - "dioxus-html", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "js-sys", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "rustc-hash 2.1.1", "serde", "sledgehammer_bindgen", @@ -6066,22 +6424,38 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-interpreter-js" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83ab170d89308399205f8ad3d43d8d419affe317016b41ca0695186f7593cba2" +dependencies = [ + "js-sys", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-hash 2.1.1", + "sledgehammer_bindgen", + "sledgehammer_utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "dioxus-liveview" version = "0.7.1" dependencies = [ "axum 0.8.6", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-html", - "dioxus-interpreter-js", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "rustc-hash 2.1.1", "serde", "serde_json", @@ -6098,8 +6472,20 @@ dependencies = [ name = "dioxus-logger" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-cli-config", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "tracing", + "tracing-subscriber", + "tracing-wasm", +] + +[[package]] +name = "dioxus-logger" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42237934c6a67f5ed9a8c37e47ca980ee7cfec9e783a9a1f8c2e36c8b96ae74b" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", "tracing-subscriber", "tracing-wasm", @@ -6118,13 +6504,13 @@ dependencies = [ "blitz-paint", "blitz-shell", "blitz-traits", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-html", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", "dioxus-native-dom", "futures-util", "keyboard-types", @@ -6141,9 +6527,9 @@ version = "0.7.1" dependencies = [ "blitz-dom", "blitz-traits", - "dioxus", - "dioxus-core", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-html 0.7.1", "futures-util", "keyboard-types", "rustc-hash 2.1.1", @@ -6154,21 +6540,21 @@ dependencies = [ name = "dioxus-playwright-default-features-disabled-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-error-codes-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-errors-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6177,7 +6563,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-hydration-order-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6186,7 +6572,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-mounted-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6195,7 +6581,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-routing-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6204,14 +6590,14 @@ dependencies = [ name = "dioxus-playwright-fullstack-spread-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "futures", "serde", "tokio", @@ -6222,7 +6608,7 @@ name = "dioxus-playwright-liveview-test" version = "0.0.1" dependencies = [ "axum 0.8.6", - "dioxus", + "dioxus 0.7.1", "dioxus-liveview", "tokio", ] @@ -6231,21 +6617,21 @@ dependencies = [ name = "dioxus-playwright-web-hash-routing-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-web-routing-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-web-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde_json", "tracing", "tracing-wasm", @@ -6257,7 +6643,7 @@ dependencies = [ name = "dioxus-pwa-example" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -6268,17 +6654,17 @@ dependencies = [ "base64 0.22.1", "ciborium", "criterion", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-macro", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-router", "dioxus-router-macro", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", "percent-encoding", "rustversion", @@ -6294,7 +6680,7 @@ version = "0.7.1" dependencies = [ "base16", "digest", - "dioxus", + "dioxus 0.7.1", "proc-macro2", "quote", "sha2", @@ -6314,13 +6700,25 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "dioxus-rsx" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f026380dfda8b93ad995c0a90a62a17b8afeb246baff1b781a52c7b1b3ebd791" +dependencies = [ + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.108", +] + [[package]] name = "dioxus-rsx-hotreload" version = "0.7.1" dependencies = [ - "dioxus-core", - "dioxus-core-types", - "dioxus-rsx", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-rsx 0.7.1", "internment", "proc-macro2", "proc-macro2-diagnostics", @@ -6335,8 +6733,8 @@ version = "0.7.1" dependencies = [ "convert_case 0.8.0", "dioxus-autofmt", - "dioxus-html", - "dioxus-rsx", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "html_parser", "htmlentity", "pretty_assertions", @@ -6357,26 +6755,26 @@ dependencies = [ "chrono", "ciborium", "dashmap 6.1.0", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-macro", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-logger", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-logger 0.7.1", "dioxus-router", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", "enumset", "futures", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "http 1.3.1", "http-body-util", "hyper 1.7.0", @@ -6393,7 +6791,7 @@ dependencies = [ "serde", "serde_json", "serde_qs", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tokio-tungstenite 0.27.0", @@ -6411,11 +6809,11 @@ dependencies = [ name = "dioxus-signals" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", + "dioxus 0.7.1", + "dioxus-core 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "parking_lot", "rand 0.9.2", "reqwest 0.12.24", @@ -6427,14 +6825,30 @@ dependencies = [ "warnings", ] +[[package]] +name = "dioxus-signals" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3895cc17ff5b43ada07743111be586e7a927ed7ec511457020e4235e13e63fe6" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot", + "rustc-hash 2.1.1", + "tracing", + "warnings", +] + [[package]] name = "dioxus-ssr" version = "0.7.1" dependencies = [ "askama_escape", - "dioxus", - "dioxus-core", - "dioxus-core-types", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", "rustc-hash 2.1.1", ] @@ -6442,10 +6856,21 @@ dependencies = [ name = "dioxus-stores" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-signals", - "dioxus-stores-macro", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-signals 0.7.1", + "dioxus-stores-macro 0.7.1", +] + +[[package]] +name = "dioxus-stores" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8521729ac35f362476ac4eb7d1c4ab79e7e92a0facfdea3ee978c0ddf7108d37" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-stores-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -6453,8 +6878,20 @@ name = "dioxus-stores-macro" version = "0.7.1" dependencies = [ "convert_case 0.8.0", - "dioxus", - "dioxus-stores", + "dioxus 0.7.1", + "dioxus-stores 0.7.1", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "dioxus-stores-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23a733d2684dc843e81954f6176b3353e4cfc71b6978a8e464591bb5536f610b" +dependencies = [ + "convert_case 0.8.0", "proc-macro2", "quote", "syn 2.0.108", @@ -6464,8 +6901,8 @@ dependencies = [ name = "dioxus-tailwind" version = "0.0.0" dependencies = [ - "dioxus", - "manganis", + "dioxus 0.7.1", + "manganis 0.7.1", ] [[package]] @@ -6473,26 +6910,26 @@ name = "dioxus-web" version = "0.7.1" dependencies = [ "ciborium", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-types", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-signals 0.7.1", "dioxus-ssr", - "dioxus-web", + "dioxus-web 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "gloo-dialogs", "gloo-timers", "js-sys", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "rustc-hash 2.1.1", "send_wrapper", "serde", @@ -6507,6 +6944,39 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-web" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76155ecd44535e7c096ec8c5aac4a945899e47567ead4869babdaa74f3f9bca0" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-interpreter-js 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "gloo-timers", + "js-sys", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-hash 2.1.1", + "send_wrapper", + "serde", + "serde-wasm-bindgen", + "serde_json", + "tracing", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] + [[package]] name = "dircpy" version = "0.3.19" @@ -6747,7 +7217,7 @@ name = "ecommerce-site" version = "0.1.1" dependencies = [ "chrono", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", ] @@ -7261,7 +7731,7 @@ dependencies = [ name = "file-explorer" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "open", ] @@ -7546,8 +8016,8 @@ dependencies = [ "axum_session", "axum_session_auth", "axum_session_sqlx", - "dioxus", - "dioxus-web", + "dioxus 0.7.1", + "dioxus-web 0.7.1", "execute", "http 1.3.1", "serde", @@ -7561,7 +8031,7 @@ dependencies = [ name = "fullstack-desktop-example" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", ] @@ -7570,7 +8040,7 @@ name = "fullstack-hackernews-example" version = "0.1.0" dependencies = [ "chrono", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", ] @@ -7580,7 +8050,7 @@ name = "fullstack-hello-world-example" version = "0.1.0" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", "serde_json", @@ -7592,7 +8062,7 @@ name = "fullstack-router-example" version = "0.1.0" dependencies = [ "axum 0.8.6", - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -7830,6 +8300,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "generational-box" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3c1ae09dfd2d455484a54b56129b9821241c4b0e412227806b6c3730cd18a29" +dependencies = [ + "parking_lot", + "tracing", +] + [[package]] name = "generic-array" version = "0.14.9" @@ -8797,14 +9277,14 @@ dependencies = [ name = "harness-default-to-non-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-desktop" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8812,7 +9292,7 @@ name = "harness-fullstack-desktop-with-default" version = "0.0.1" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8820,28 +9300,28 @@ name = "harness-fullstack-desktop-with-features" version = "0.0.1" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-multi-target" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-multi-target-no-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-with-optional-tokio" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -8857,14 +9337,14 @@ dependencies = [ name = "harness-renderer-swap" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-dedicated-client" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8875,42 +9355,42 @@ version = "0.0.1" name = "harness-simple-desktop" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack-native-with-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack-with-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-mobile" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-web" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -9152,7 +9632,7 @@ name = "hotdog" version = "0.1.0" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "rusqlite", "serde", @@ -10425,6 +10905,12 @@ dependencies = [ name = "lazy-js-bundle" version = "0.7.1" +[[package]] +name = "lazy-js-bundle" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "409273b42d0e3ae7c8ce6b8cfbc6a27b7c7d83bbb94fc7f93f22cc9b90eea078" + [[package]] name = "lazy_static" version = "1.5.0" @@ -10921,20 +11407,43 @@ dependencies = [ name = "manganis" version = "0.7.1" dependencies = [ - "const-serialize", - "manganis-core", - "manganis-macro", + "const-serialize 0.8.0", + "manganis-core 0.7.1", + "manganis-macro 0.7.1", +] + +[[package]] +name = "manganis" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "124f8f094eb75783b38209ce4d534b9617da4efac652802d9bafe05043a3ec95" +dependencies = [ + "const-serialize 0.7.1", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "manganis-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "manganis-core" +version = "0.7.1" +dependencies = [ + "const-serialize 0.8.0", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core-types 0.7.1", + "manganis 0.7.1", + "serde", ] [[package]] name = "manganis-core" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fbd1fb8c5aabcc54c6b02dbc968e1c89c28f3e543f2789ef9e3ce45dbdf5df" dependencies = [ - "const-serialize", - "dioxus", - "dioxus-cli-config", - "dioxus-core-types", - "manganis", + "const-serialize 0.7.1", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", ] @@ -10944,8 +11453,22 @@ version = "0.7.1" dependencies = [ "dunce", "macro-string", - "manganis", - "manganis-core", + "manganis 0.7.1", + "manganis-core 0.7.1", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "manganis-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d6fec2a8249739bb30b53a08ecbb217f76096c08f1053f38ec3981ba424c11" +dependencies = [ + "dunce", + "macro-string", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2", "quote", "syn 2.0.108", @@ -11396,7 +11919,7 @@ dependencies = [ "blitz-paint", "blitz-traits", "bytemuck", - "dioxus", + "dioxus 0.7.1", "dioxus-native-dom", "futures-util", "pollster 0.4.0", @@ -11420,9 +11943,9 @@ dependencies = [ "bytes", "crossbeam-channel", "data-url 0.3.2", - "dioxus", - "dioxus-asset-resolver", - "dioxus-devtools", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-devtools 0.7.1", "dioxus-native-dom", "paste", "rustc-hash 1.1.0", @@ -11530,7 +12053,7 @@ dependencies = [ name = "nested-suspense" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -16079,7 +16602,7 @@ dependencies = [ name = "ssr-only" version = "0.7.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -16404,7 +16927,26 @@ dependencies = [ "memfd", "memmap2", "serde", - "subsecond-types", + "subsecond-types 0.7.1", + "thiserror 2.0.17", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "subsecond" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "834e8caec50249083ee6972a2f7645c4baadcb39d49ea801da1dc1d5e1c2ccb9" +dependencies = [ + "js-sys", + "libc", + "libloading 0.8.9", + "memfd", + "memmap2", + "serde", + "subsecond-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.17", "wasm-bindgen", "wasm-bindgen-futures", @@ -16417,7 +16959,7 @@ version = "0.1.0" dependencies = [ "cross-tls-crate", "cross-tls-crate-dylib", - "dioxus-devtools", + "dioxus-devtools 0.7.1", ] [[package]] @@ -16427,6 +16969,15 @@ dependencies = [ "serde", ] +[[package]] +name = "subsecond-types" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6beffea67e72a7a530990b270fd0277971eae564fdc10c1e0080e928b477fab" +dependencies = [ + "serde", +] + [[package]] name = "subtle" version = "2.6.1" @@ -16457,7 +17008,7 @@ name = "suspense-carousel" version = "0.7.1" dependencies = [ "async-std", - "dioxus", + "dioxus 0.7.1", "serde", ] @@ -18962,7 +19513,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-compression", - "dioxus", + "dioxus 0.7.1", "dioxus-router", "futures", "getrandom 0.3.4", @@ -19565,7 +20116,7 @@ version = "0.0.0" dependencies = [ "bytemuck", "color", - "dioxus", + "dioxus 0.7.1", "dioxus-native", "tracing-subscriber", "wgpu 26.0.1", diff --git a/Cargo.toml b/Cargo.toml index f5f8d42586..0651d1ce3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -136,6 +136,7 @@ members = [ "packages/playwright-tests/suspense-carousel", "packages/playwright-tests/nested-suspense", "packages/playwright-tests/cli-optimization", + "packages/playwright-tests/cli-optimization-07", "packages/playwright-tests/wasm-split-harness", "packages/playwright-tests/default-features-disabled", "packages/playwright-tests/fullstack-error-codes", @@ -193,9 +194,12 @@ dioxus-cli-opt = { path = "packages/cli-opt", version = "0.7.1" } dioxus-cli-telemetry = { path = "packages/cli-telemetry", version = "0.7.1" } dioxus-cli-config = { path = "packages/cli-config", version = "0.7.1" } -# const-serializea -const-serialize = { path = "packages/const-serialize", version = "0.7.1" } -const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.7.1" } +# const-serialize +const-serialize = { path = "packages/const-serialize", version = "0.8.0" } +const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.8.0" } + +# The version of const-serialize published with 0.7.0 and 0.7.1 that the CLI should still support +const-serialize-07 = { package = "const-serialize", version = "0.7.1" } # subsecond subsecond-types = { path = "packages/subsecond/subsecond-types", version = "0.7.1" } @@ -206,6 +210,9 @@ manganis = { path = "packages/manganis/manganis", version = "0.7.1" } manganis-core = { path = "packages/manganis/manganis-core", version = "0.7.1" } manganis-macro = { path = "packages/manganis/manganis-macro", version = "0.7.1" } +# The version of assets published with 0.7.0 and 0.7.1 that the CLI should still support +manganis-core-07 = { package = "manganis-core", version = "0.7.1" } + # wasm-split wasm-splitter = { path = "packages/wasm-split/wasm-split", version = "0.7.1" } wasm-split-macro = { path = "packages/wasm-split/wasm-split-macro", version = "0.7.1" } diff --git a/packages/cli/Cargo.toml b/packages/cli/Cargo.toml index ddbe943d19..cc4b1ff10c 100644 --- a/packages/cli/Cargo.toml +++ b/packages/cli/Cargo.toml @@ -98,6 +98,7 @@ brotli = "8.0.1" ignore = "0.4.23" env_logger = { workspace = true } const-serialize = { workspace = true, features = ["serde"] } +const-serialize-07 = { workspace = true, features = ["serde"] } tracing-subscriber = { version = "0.3.19", features = [ "std", @@ -122,6 +123,7 @@ log = { version = "0.4", features = ["max_level_off", "release_max_level_off"] } tempfile = "3.19.1" manganis = { workspace = true } manganis-core = { workspace = true } +manganis-core-07 = { workspace = true } target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] } wasm-encoder = "0.235.0" diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index 4ca40f3a53..2b39a4da50 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -11,10 +11,10 @@ //! process in the build system. //! //! We use the same lessons learned from the hot-patching engine which parses the binary file and its -//! symbol table to find symbols that match the `__MANGANIS__` prefix. These symbols are ideally data +//! symbol table to find symbols that match the `__ASSETS__` prefix. These symbols are ideally data //! symbols and contain the BundledAsset data type which implements ConstSerialize and ConstDeserialize. //! -//! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __MANGANIS__ +//! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __ASSETS__ //! symbols, which we process here. After reading the metadata directly from the executable, we then //! hash it and write the hash directly into the binary file. //! @@ -23,7 +23,7 @@ //! can be found relative to the current exe. Unfortunately, on android, the `current_exe` path is wrong, //! so the assets are resolved against the "asset root" - which is covered by the asset loader crate. //! -//! Finding the __MANGANIS__ symbols is not quite straightforward when hotpatching, especially on WASM +//! Finding the __ASSETS__ symbols is not quite straightforward when hotpatching, especially on WASM //! since we build and link the module as relocatable, which is not a stable WASM proposal. In this //! implementation, we handle both the non-PIE *and* PIC cases which are rather bespoke to our whole //! build system. @@ -35,9 +35,9 @@ use std::{ use crate::Result; use anyhow::{bail, Context}; -use const_serialize::{ConstVec, SerializeConst}; +use const_serialize::{serialize_const, ConstVec, SerializeConst}; use dioxus_cli_opt::AssetManifest; -use manganis::BundledAsset; +use manganis::{AssetOptions, AssetVariant, BundledAsset, ImageFormat, ImageSize}; use object::{File, Object, ObjectSection, ObjectSymbol, ReadCache, ReadRef, Section, Symbol}; use pdb::FallibleIterator; use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; @@ -45,24 +45,204 @@ use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; /// Extract all manganis symbols and their sections from the given object file. fn manganis_symbols<'a, 'b, R: ReadRef<'a>>( file: &'b File<'a, R>, -) -> impl Iterator, Section<'a, 'b, R>)> + 'b { - file.symbols() - .filter(|symbol| { - if let Ok(name) = symbol.name() { - looks_like_manganis_symbol(name) - } else { - false +) -> impl Iterator, Section<'a, 'b, R>)> + 'b { + file.symbols().filter_map(move |symbol| { + let name = symbol.name().ok()?; + let version = looks_like_manganis_symbol(name)?; + let section_index = symbol.section_index()?; + let section = file.section_by_index(section_index).ok()?; + Some((version, symbol, section)) + }) +} + +#[derive(Copy, Clone)] +enum ManganisVersion { + /// The legacy version of the manganis format published with 0.7.0 and 0.7.1 + Legacy, + /// The new version of the manganis format 0.7.2 onward + New, +} + +impl ManganisVersion { + fn size(&self) -> usize { + match self { + ManganisVersion::Legacy => { + ::MEMORY_LAYOUT.size() + } + ManganisVersion::New => BundledAsset::MEMORY_LAYOUT.size(), + } + } + + fn deserialize(&self, data: &[u8]) -> Option { + match self { + ManganisVersion::Legacy => { + let buffer = const_serialize_07::ConstReadBuffer::new(data); + + let (_, legacy_asset) = + const_serialize_07::deserialize_const!(manganis_core_07::BundledAsset, buffer)?; + + Some(legacy_asset_to_modern_asset(&legacy_asset)) } - }) - .filter_map(move |symbol| { - let section_index = symbol.section_index()?; - let section = file.section_by_index(section_index).ok()?; - Some((symbol, section)) - }) + ManganisVersion::New => { + let (_, asset) = + const_serialize::deserialize_const!(manganis_core::BundledAsset, data)?; + + Some(asset) + } + } + } + + fn serialize(&self, asset: &BundledAsset) -> Vec { + match self { + ManganisVersion::Legacy => { + let legacy_asset = modern_asset_to_legacy_asset(asset); + let buffer = const_serialize_07::serialize_const( + &legacy_asset, + const_serialize_07::ConstVec::new(), + ); + buffer.as_ref().to_vec() + } + ManganisVersion::New => { + let buffer = serialize_const(asset, ConstVec::new()); + buffer.as_ref().to_vec() + } + } + } +} + +fn legacy_asset_to_modern_asset( + legacy_asset: &manganis_core_07::BundledAsset, +) -> manganis_core::BundledAsset { + let bundled_path = legacy_asset.bundled_path(); + let absolute_path = legacy_asset.absolute_source_path(); + let legacy_options = legacy_asset.options(); + let add_hash = legacy_options.hash_suffix(); + let options = match legacy_options.variant() { + manganis_core_07::AssetVariant::Image(image) => { + let format = match image.format() { + manganis_core_07::ImageFormat::Png => ImageFormat::Png, + manganis_core_07::ImageFormat::Jpg => ImageFormat::Jpg, + manganis_core_07::ImageFormat::Webp => ImageFormat::Webp, + manganis_core_07::ImageFormat::Avif => ImageFormat::Avif, + manganis_core_07::ImageFormat::Unknown => ImageFormat::Unknown, + }; + let size = match image.size() { + manganis_core_07::ImageSize::Automatic => ImageSize::Automatic, + manganis_core_07::ImageSize::Manual { width, height } => { + ImageSize::Manual { width, height } + } + }; + let preload = image.preloaded(); + + AssetOptions::image() + .with_format(format) + .with_size(size) + .with_preload(preload) + .with_hash_suffix(add_hash) + .into_asset_options() + } + manganis_core_07::AssetVariant::Folder(_) => AssetOptions::folder() + .with_hash_suffix(add_hash) + .into_asset_options(), + manganis_core_07::AssetVariant::Css(css) => AssetOptions::css() + .with_hash_suffix(add_hash) + .with_minify(css.minified()) + .with_preload(css.preloaded()) + .with_static_head(css.static_head()) + .into_asset_options(), + manganis_core_07::AssetVariant::CssModule(css_module) => AssetOptions::css_module() + .with_hash_suffix(add_hash) + .with_minify(css_module.minified()) + .with_preload(css_module.preloaded()) + .into_asset_options(), + manganis_core_07::AssetVariant::Js(js) => AssetOptions::js() + .with_hash_suffix(add_hash) + .with_minify(js.minified()) + .with_preload(js.preloaded()) + .with_static_head(js.static_head()) + .into_asset_options(), + _ => AssetOptions::builder().into_asset_options(), + }; + + BundledAsset::new(absolute_path, bundled_path, options) +} + +fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07::BundledAsset { + let bundled_path = modern_asset.bundled_path(); + let absolute_path = modern_asset.absolute_source_path(); + let legacy_options = modern_asset.options(); + let add_hash = legacy_options.hash_suffix(); + let options = match legacy_options.variant() { + AssetVariant::Image(image) => { + let format = match image.format() { + ImageFormat::Png => manganis_core_07::ImageFormat::Png, + ImageFormat::Jpg => manganis_core_07::ImageFormat::Jpg, + ImageFormat::Webp => manganis_core_07::ImageFormat::Webp, + ImageFormat::Avif => manganis_core_07::ImageFormat::Avif, + ImageFormat::Unknown => manganis_core_07::ImageFormat::Unknown, + }; + let size = match image.size() { + ImageSize::Automatic => manganis_core_07::ImageSize::Automatic, + ImageSize::Manual { width, height } => { + manganis_core_07::ImageSize::Manual { width, height } + } + }; + let preload = image.preloaded(); + + manganis_core_07::AssetOptions::image() + .with_format(format) + .with_size(size) + .with_preload(preload) + .with_hash_suffix(add_hash) + .into_asset_options() + } + AssetVariant::Folder(_) => manganis_core_07::AssetOptions::folder() + .with_hash_suffix(add_hash) + .into_asset_options(), + AssetVariant::Css(css) => manganis_core_07::AssetOptions::css() + .with_hash_suffix(add_hash) + .with_minify(css.minified()) + .with_preload(css.preloaded()) + .with_static_head(css.static_head()) + .into_asset_options(), + AssetVariant::CssModule(css_module) => manganis_core_07::AssetOptions::css_module() + .with_hash_suffix(add_hash) + .with_minify(css_module.minified()) + .with_preload(css_module.preloaded()) + .into_asset_options(), + AssetVariant::Js(js) => manganis_core_07::AssetOptions::js() + .with_hash_suffix(add_hash) + .with_minify(js.minified()) + .with_preload(js.preloaded()) + .with_static_head(js.static_head()) + .into_asset_options(), + _ => manganis_core_07::AssetOptions::builder().into_asset_options(), + }; + + manganis_core_07::BundledAsset::new(absolute_path, bundled_path, options) +} + +fn looks_like_manganis_symbol(name: &str) -> Option { + if name.contains("__MANGANIS__") { + Some(ManganisVersion::Legacy) + } else if name.contains("__ASSETS__") { + Some(ManganisVersion::New) + } else { + None + } } -fn looks_like_manganis_symbol(name: &str) -> bool { - name.contains("__MANGANIS__") +/// An asset offset in the binary +#[derive(Clone, Copy)] +struct ManganisSymbolOffset { + version: ManganisVersion, + offset: u64, +} + +impl ManganisSymbolOffset { + fn new(version: ManganisVersion, offset: u64) -> Self { + Self { version, offset } + } } /// Find the offsets of any manganis symbols in the given file. @@ -70,7 +250,7 @@ fn find_symbol_offsets<'a, R: ReadRef<'a>>( path: &Path, file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let pdb_file = find_pdb_file(path); match file.format() { @@ -118,7 +298,7 @@ fn find_pdb_file(path: &Path) -> Option { } /// Find the offsets of any manganis symbols in a pdb file. -fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { +fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { let pdb_file_handle = std::fs::File::open(pdb_file)?; let mut pdb_file = pdb::PDB::open(pdb_file_handle).context("Failed to open PDB file")?; let Ok(Some(sections)) = pdb_file.sections() else { @@ -142,26 +322,31 @@ fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { }; let name = data.name.to_string(); - if name.contains("__MANGANIS__") { + if let Some(version) = looks_like_manganis_symbol(&name) { let section = sections .get(rva.section as usize - 1) .expect("Section index out of bounds"); - addresses.push((section.pointer_to_raw_data + rva.offset) as u64); + addresses.push(ManganisSymbolOffset::new( + version, + (section.pointer_to_raw_data + rva.offset) as u64, + )); } } Ok(addresses) } /// Find the offsets of any manganis symbols in a native object file. -fn find_native_symbol_offsets<'a, R: ReadRef<'a>>(file: &File<'a, R>) -> Result> { +fn find_native_symbol_offsets<'a, R: ReadRef<'a>>( + file: &File<'a, R>, +) -> Result> { let mut offsets = Vec::new(); - for (symbol, section) in manganis_symbols(file) { + for (version, symbol, section) in manganis_symbols(file) { let virtual_address = symbol.address(); let Some((section_range_start, _)) = section.file_range() else { tracing::error!( - "Found __MANGANIS__ symbol {:?} in section {}, but the section has no file range", + "Found __ASSETS__ symbol {:?} in section {}, but the section has no file range", symbol.name(), section.index() ); @@ -172,7 +357,7 @@ fn find_native_symbol_offsets<'a, R: ReadRef<'a>>(file: &File<'a, R>) -> Result< .try_into() .expect("Virtual address should be greater than or equal to section address"); let file_offset = section_range_start + section_relative_address; - offsets.push(file_offset); + offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) @@ -198,7 +383,7 @@ fn eval_walrus_global_expr(module: &walrus::Module, expr: &walrus::ConstExpr) -> fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let Some(section) = file .sections() .find(|section| section.name() == Ok("")) @@ -259,9 +444,9 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( eval_walrus_global_expr(&module, &main_memory_offset).unwrap_or_default(); for export in module.exports.iter() { - if !looks_like_manganis_symbol(&export.name) { + let Some(version) = looks_like_manganis_symbol(&export.name) else { continue; - } + }; let walrus::ExportItem::Global(global) = export.item else { continue; @@ -273,7 +458,7 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( let Some(virtual_address) = eval_walrus_global_expr(&module, &pointer) else { tracing::error!( - "Found __MANGANIS__ symbol {:?} in WASM file, but the global expression could not be evaluated", + "Found __ASSETS__ symbol {:?} in WASM file, but the global expression could not be evaluated", export.name ); continue; @@ -285,7 +470,7 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( .expect("Virtual address should be greater than or equal to section address"); let file_offset = data_start_offset + section_relative_address; - offsets.push(file_offset); + offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) @@ -311,15 +496,14 @@ pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result) -> Result, generics: &Generics) { +fn add_bounds(where_clause: &mut Option, generics: &Generics, krate: &Path) { let bounds = generics.params.iter().filter_map(|param| match param { syn::GenericParam::Type(ty) => { - Some::(parse_quote! { #ty: const_serialize::SerializeConst, }) + Some::(parse_quote! { #ty: #krate::SerializeConst, }) } syn::GenericParam::Lifetime(_) => None, syn::GenericParam::Const(_) => None, @@ -19,10 +19,33 @@ fn add_bounds(where_clause: &mut Option, generics: &Generics) { } /// Derive the const serialize trait for a struct -#[proc_macro_derive(SerializeConst)] -pub fn derive_parse(input: TokenStream) -> TokenStream { +#[proc_macro_derive(SerializeConst, attributes(const_serialize))] +pub fn derive_parse(raw_input: TokenStream) -> TokenStream { // Parse the input tokens into a syntax tree - let input = parse_macro_input!(input as DeriveInput); + let input = parse_macro_input!(raw_input as DeriveInput); + let krate = input.attrs.iter().find_map(|attr| { + attr.path() + .is_ident("const_serialize") + .then(|| { + let mut path = None; + if let Err(err) = attr.parse_nested_meta(|meta| { + if meta.path.is_ident("crate") { + let ident: Path = meta.value()?.parse()?; + path = Some(ident); + } + Ok(()) + }) { + return Some(Err(err)); + } + path.map(Ok) + }) + .flatten() + }); + let krate = match krate { + Some(Ok(path)) => path, + Some(Err(err)) => return err.into_compile_error().into(), + None => parse_quote! { const_serialize }, + }; match input.data { syn::Data::Struct(data) => match data.fields { @@ -30,7 +53,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); let field_names = data.fields.iter().enumerate().map(|(i, field)| { field .ident @@ -43,13 +66,14 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { }); let field_types = data.fields.iter().map(|field| &field.ty); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Struct(const_serialize::StructLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Struct(#krate::StructLayout::new( std::mem::size_of::(), &[#( - const_serialize::StructFieldLayout::new( + #krate::StructFieldLayout::new( + stringify!(#field_names), std::mem::offset_of!(#ty, #field_names), - <#field_types as const_serialize::SerializeConst>::MEMORY_LAYOUT, + <#field_types as #krate::SerializeConst>::MEMORY_LAYOUT, ), )*], )); @@ -60,10 +84,10 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Struct(const_serialize::StructLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Struct(#krate::StructLayout::new( std::mem::size_of::(), &[], )); @@ -137,7 +161,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); let mut last_discriminant = None; let variants = data.variants.iter().map(|variant| { let discriminant = variant @@ -151,6 +175,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { } }); last_discriminant = Some(discriminant.clone()); + let variant_name = &variant.ident; let field_names = variant.fields.iter().enumerate().map(|(i, field)| { field .ident @@ -162,17 +187,19 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { quote! { { #[allow(unused)] - #[derive(const_serialize::SerializeConst)] + #[derive(#krate::SerializeConst)] + #[const_serialize(crate = #krate)] #[repr(C)] struct VariantStruct #generics { #( #field_names: #field_types, )* } - const_serialize::EnumVariant::new( + #krate::EnumVariant::new( + stringify!(#variant_name), #discriminant as u32, - match VariantStruct::MEMORY_LAYOUT { - const_serialize::Layout::Struct(layout) => layout, + match ::MEMORY_LAYOUT { + #krate::Layout::Struct(layout) => layout, _ => panic!("VariantStruct::MEMORY_LAYOUT must be a struct"), }, ::std::mem::align_of::(), @@ -181,14 +208,14 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { } }); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Enum(const_serialize::EnumLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Enum(#krate::EnumLayout::new( ::std::mem::size_of::(), - const_serialize::PrimitiveLayout::new( + #krate::PrimitiveLayout::new( #discriminant_size as usize, ), { - const DATA: &'static [const_serialize::EnumVariant] = &[ + const DATA: &'static [#krate::EnumVariant] = &[ #( #variants, )* diff --git a/packages/const-serialize/Cargo.toml b/packages/const-serialize/Cargo.toml index 9d4b4e2647..567f7d1604 100644 --- a/packages/const-serialize/Cargo.toml +++ b/packages/const-serialize/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "const-serialize" -version = { workspace = true } +version = "0.8.0" authors = ["Evan Almloff"] edition = "2021" description = "A serialization framework that works in const contexts" @@ -8,7 +8,7 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/dioxuslabs/dioxus" homepage = "https://dioxuslabs.com/learn/0.5/getting_started" keywords = ["const", "serialize"] -rust-version = "1.80.0" +rust-version = "1.83.0" [dependencies] const-serialize-macro = { workspace = true } diff --git a/packages/const-serialize/README.md b/packages/const-serialize/README.md index dfa66de631..25c3a36c63 100644 --- a/packages/const-serialize/README.md +++ b/packages/const-serialize/README.md @@ -29,7 +29,7 @@ const { }; 3]; let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let (buf, deserialized) = match deserialize_const!([Struct; 3], buf) { Some(data) => data, None => panic!("data mismatch"), @@ -54,4 +54,4 @@ The rust [nomicon](https://doc.rust-lang.org/nomicon/data.html) defines the memo - Only constant sized types are supported. This means that you can't serialize a type like `Vec`. These types are difficult to create in const contexts in general - Only types with a well defined memory layout are supported (see and ). `repr(Rust)` enums don't have a well defined layout, so they are not supported. `repr(C, u8)` enums can be used instead -- Const rust does not support mutable references or points, so this crate leans heavily on function data structures for data processing. +- Const rust does not support mutable references or points, so this crate leans heavily on functional data structures for data processing. diff --git a/packages/const-serialize/src/array.rs b/packages/const-serialize/src/array.rs new file mode 100644 index 0000000000..c38b9356df --- /dev/null +++ b/packages/const-serialize/src/array.rs @@ -0,0 +1,64 @@ +use crate::*; + +/// The layout for a constant sized array. The array layout is just a length and an item layout. +#[derive(Debug, Copy, Clone)] +pub struct ArrayLayout { + pub(crate) len: usize, + pub(crate) item_layout: &'static Layout, +} + +impl ArrayLayout { + /// Create a new array layout + pub const fn new(len: usize, item_layout: &'static Layout) -> Self { + Self { len, item_layout } + } +} + +unsafe impl SerializeConst for [T; N] { + const MEMORY_LAYOUT: Layout = Layout::Array(ArrayLayout { + len: N, + item_layout: &T::MEMORY_LAYOUT, + }); +} + +/// Serialize a constant sized array that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_array( + ptr: *const (), + mut to: ConstVec, + layout: &ArrayLayout, +) -> ConstVec { + let len = layout.len; + let mut i = 0; + to = write_array(to, len); + while i < len { + let field = ptr.wrapping_byte_offset((i * layout.item_layout.size()) as _); + to = serialize_const_ptr(field, to, layout.item_layout); + i += 1; + } + to +} + +/// Deserialize an array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_array<'a>( + from: &'a [u8], + layout: &ArrayLayout, + mut out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let item_layout = layout.item_layout; + let Ok((_, mut from)) = take_array(from) else { + return None; + }; + let mut i = 0; + while i < layout.len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, out) else { + return None; + }; + let Some((_, item_out)) = out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + out = item_out; + from = new_from; + i += 1; + } + Some(from) +} diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs new file mode 100644 index 0000000000..bc37cc1759 --- /dev/null +++ b/packages/const-serialize/src/cbor.rs @@ -0,0 +1,597 @@ +//! Const serialization utilities for the CBOR data format. +//! +//! ## Overview of the format +//! +//! Const serialize only supports a subset of the CBOR format, specifically the major types: +//! - UnsignedInteger +//! - NegativeInteger +//! - Bytes +//! - String +//! - Array +//! +//! Each item in CBOR starts with a leading byte, which determines the type of the item and additional information. +//! The additional information is encoded in the lower 5 bits of the leading byte and generally indicates either a +//! small number or how many of the next bytes are part of the first number. +//! +//! Resources: +//! The spec: +//! A playground to check examples against: + +use crate::ConstVec; + +/// Each item in CBOR starts with a leading byte, which determines the type of the item and additional information. +/// +/// The first 3 bits of the leading byte are the major type, which indicates the type of the item. +#[repr(u8)] +#[derive(PartialEq)] +enum MajorType { + /// An unsigned integer in the range 0..2^64. The value of the number is encoded in the remaining bits of the leading byte and any additional bytes. + UnsignedInteger = 0, + /// An unsigned integer in the range -2^64..-1. The value of the number is encoded in the remaining bits of the leading byte and any additional bytes + NegativeInteger = 1, + /// A byte sequence. The number of bytes in the sequence is encoded in the remaining bits of the leading byte and any additional bytes. + Bytes = 2, + /// A text sequence. The number of bytes in the sequence is encoded in the remaining bits of the leading byte and any additional bytes. + Text = 3, + /// A dynamically sized array of non-uniform data items. The number of items in the array is encoded in the remaining bits of the leading byte and any additional bytes. + Array = 4, + /// A map of pairs of data items. The first item in each pair is the key and the second item is the value. The number of items in the array is encoded in the remaining bits of the leading byte and any additional bytes. + Map = 5, + /// Tagged values - not supported + Tagged = 6, + /// Floating point values - not supported + Float = 7, +} + +impl MajorType { + /// The bitmask for the major type in the leading byte + const MASK: u8 = 0b0001_1111; + + const fn from_byte(byte: u8) -> Self { + match byte >> 5 { + 0 => MajorType::UnsignedInteger, + 1 => MajorType::NegativeInteger, + 2 => MajorType::Bytes, + 3 => MajorType::Text, + 4 => MajorType::Array, + 5 => MajorType::Map, + 6 => MajorType::Tagged, + 7 => MajorType::Float, + _ => panic!("Invalid major type"), + } + } +} + +/// Get the length of the item in bytes without deserialization. +const fn item_length(bytes: &[u8]) -> Result { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + let length_of_item = match major { + // The length of the number is the total of: + // - The length of the number (which may be 0 if the number is encoded in additional information) + MajorType::UnsignedInteger | MajorType::NegativeInteger => { + get_length_of_number(additional_information) as usize + } + // The length of the text or bytes is the total of: + // - The length of the number that denotes the length of the text or bytes + // - The length of the text or bytes themselves + MajorType::Text | MajorType::Bytes => { + let length_of_number = get_length_of_number(additional_information); + let Ok((length_of_bytes, _)) = + grab_u64_with_byte_length(rest, length_of_number, additional_information) + else { + return Err(()); + }; + length_of_number as usize + length_of_bytes as usize + } + // The length of the map is the total of: + // - The length of the number that denotes the number of items + // - The length of the pairs of items themselves + MajorType::Array | MajorType::Map => { + let length_of_number = get_length_of_number(additional_information); + let Ok((length_of_items, _)) = + grab_u64_with_byte_length(rest, length_of_number, additional_information) + else { + return Err(()); + }; + let mut total_length = length_of_number as usize; + let mut items_left = length_of_items * if let MajorType::Map = major { 2 } else { 1 }; + while items_left > 0 { + let Some((_, after)) = rest.split_at_checked(total_length) else { + return Err(()); + }; + let Ok(item_length) = item_length(after) else { + return Err(()); + }; + total_length += item_length; + items_left -= 1; + } + total_length + } + _ => return Err(()), + }; + let length_of_head = 1; + Ok(length_of_head + length_of_item) +} + +/// Read a number from the buffer, returning the number and the remaining bytes. +pub(crate) const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + match major { + MajorType::UnsignedInteger => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((number as i64, rest)) + } + MajorType::NegativeInteger => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((-(1 + number as i64), rest)) + } + _ => Err(()), + } +} + +/// Write a number to the buffer +pub(crate) const fn write_number( + vec: ConstVec, + number: i64, +) -> ConstVec { + match number { + 0.. => write_major_type_and_u64(vec, MajorType::UnsignedInteger, number as u64), + ..0 => write_major_type_and_u64(vec, MajorType::NegativeInteger, (-(number + 1)) as u64), + } +} + +/// Write the major type along with a number to the buffer. The first byte +/// contains both the major type and the additional information which contains +/// either the number itself or the number of extra bytes the number occupies. +const fn write_major_type_and_u64( + vec: ConstVec, + major: MajorType, + number: u64, +) -> ConstVec { + let major = (major as u8) << 5; + match number { + // For numbers less than 24, store the number in the lower bits + // of the first byte + 0..24 => { + let additional_information = number as u8; + let byte = major | additional_information; + vec.push(byte) + } + // For larger numbers, store the number of extra bytes the number occupies + 24.. => { + let log2_additional_bytes = log2_bytes_for_number(number); + let additional_bytes = 1 << log2_additional_bytes; + let additional_information = log2_additional_bytes + 24; + let byte = major | additional_information; + let mut vec = vec.push(byte); + let mut byte = 0; + while byte < additional_bytes { + vec = vec.push((number >> ((additional_bytes - byte - 1) * 8)) as u8); + byte += 1; + } + vec + } + } +} + +/// Find the number of bytes required to store a number and return the log2 of the number of bytes. +/// This is the number stored in the additional information field if the number is more than 24. +const fn log2_bytes_for_number(number: u64) -> u8 { + let required_bytes = ((64 - number.leading_zeros()).div_ceil(8)) as u8; + #[allow(clippy::match_overlapping_arm)] + match required_bytes { + ..=1 => 0, + ..=2 => 1, + ..=4 => 2, + _ => 3, + } +} + +/// Take bytes from a slice and return the bytes and the remaining slice. +pub(crate) const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + if let MajorType::Bytes = major { + take_bytes_from(rest, additional_information) + } else { + Err(()) + } +} + +/// Write bytes to a buffer and return the new buffer. +pub(crate) const fn write_bytes( + vec: ConstVec, + bytes: &[u8], +) -> ConstVec { + let vec = write_major_type_and_u64(vec, MajorType::Bytes, bytes.len() as u64); + vec.extend(bytes) +} + +/// Take a string from a buffer and return the string and the remaining buffer. +pub(crate) const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + if let MajorType::Text = major { + let Ok((bytes, rest)) = take_bytes_from(rest, additional_information) else { + return Err(()); + }; + let Ok(string) = std::str::from_utf8(bytes) else { + return Err(()); + }; + Ok((string, rest)) + } else { + Err(()) + } +} + +/// Write a string to a buffer and return the new buffer. +pub(crate) const fn write_str( + vec: ConstVec, + string: &str, +) -> ConstVec { + let vec = write_major_type_and_u64(vec, MajorType::Text, string.len() as u64); + vec.extend(string.as_bytes()) +} + +/// Take the length and header of an array from a buffer and return the length and the remaining buffer. +/// You must loop over the elements of the array and parse them outside of this method. +pub(crate) const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + if let MajorType::Array = major { + let Ok((length, rest)) = take_len_from(rest, additional_information) else { + return Err(()); + }; + Ok((length as usize, rest)) + } else { + Err(()) + } +} + +/// Write the header and length of an array. +pub(crate) const fn write_array( + vec: ConstVec, + len: usize, +) -> ConstVec { + write_major_type_and_u64(vec, MajorType::Array, len as u64) +} + +/// Write the header and length of a map. +pub(crate) const fn write_map( + vec: ConstVec, + len: usize, +) -> ConstVec { + // We write 2 * len as the length of the map because each key-value pair is a separate entry. + write_major_type_and_u64(vec, MajorType::Map, len as u64) +} + +/// Write the key of a map entry. +pub(crate) const fn write_map_key( + value: ConstVec, + key: &str, +) -> ConstVec { + write_str(value, key) +} + +/// Take a map from the byte slice and return the map reference and the remaining bytes. +pub(crate) const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & MajorType::MASK; + if let MajorType::Map = major { + let Ok((length, rest)) = take_len_from(rest, additional_information) else { + return Err(()); + }; + let mut after_map = rest; + let mut items_left = length * 2; + while items_left > 0 { + // Skip the value + let Ok(len) = item_length(after_map) else { + return Err(()); + }; + let Some((_, rest)) = after_map.split_at_checked(len) else { + return Err(()); + }; + after_map = rest; + items_left -= 1; + } + Ok((MapRef::new(rest, length as usize), after_map)) + } else { + Err(()) + } +} + +/// A reference to a CBOR map. +pub(crate) struct MapRef<'a> { + /// The bytes of the map. + pub(crate) bytes: &'a [u8], + /// The length of the map. + pub(crate) len: usize, +} + +impl<'a> MapRef<'a> { + /// Create a new map reference. + const fn new(bytes: &'a [u8], len: usize) -> Self { + Self { bytes, len } + } + + /// Find a key in the map and return the buffer associated with it. + pub(crate) const fn find(&self, key: &str) -> Result, ()> { + let mut bytes = self.bytes; + let mut items_left = self.len; + while items_left > 0 { + let Ok((str, rest)) = take_str(bytes) else { + return Err(()); + }; + if str_eq(key, str) { + return Ok(Some(rest)); + } + // Skip the value associated with the key we don't care about + let Ok(len) = item_length(rest) else { + return Err(()); + }; + let Some((_, rest)) = rest.split_at_checked(len) else { + return Err(()); + }; + bytes = rest; + items_left -= 1; + } + Ok(None) + } +} + +/// Compare two strings for equality at compile time. +pub(crate) const fn str_eq(a: &str, b: &str) -> bool { + let a_bytes = a.as_bytes(); + let b_bytes = b.as_bytes(); + let a_len = a_bytes.len(); + let b_len = b_bytes.len(); + if a_len != b_len { + return false; + } + let mut index = 0; + while index < a_len { + if a_bytes[index] != b_bytes[index] { + return false; + } + index += 1; + } + true +} + +/// Take the length from the additional information byte and return it along with the remaining bytes. +const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { + match additional_information { + // If additional_information < 24, the argument's value is the value of the additional information. + 0..24 => Ok((additional_information as u64, rest)), + // If additional_information is between 24 and 28, the argument's value is held in the n following bytes. + 24..28 => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((number, rest)) + } + _ => Err(()), + } +} + +/// Take a list of bytes from the byte slice and the additional information byte +/// and return the bytes and the remaining bytes. +pub(crate) const fn take_bytes_from( + rest: &[u8], + additional_information: u8, +) -> Result<(&[u8], &[u8]), ()> { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + let Some((bytes, rest)) = rest.split_at_checked(number as usize) else { + return Err(()); + }; + Ok((bytes, rest)) +} + +/// Find the length of the number based on the additional information byte. +const fn get_length_of_number(additional_information: u8) -> u8 { + match additional_information { + 0..24 => 0, + 24..28 => 1 << (additional_information - 24), + _ => 0, + } +} + +/// Read a u64 from the byte slice and the additional information byte. +const fn grab_u64(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { + grab_u64_with_byte_length( + rest, + get_length_of_number(additional_information), + additional_information, + ) +} + +/// Read a u64 from the byte slice and the additional information byte along with the byte length. +const fn grab_u64_with_byte_length( + mut rest: &[u8], + byte_length: u8, + additional_information: u8, +) -> Result<(u64, &[u8]), ()> { + match byte_length { + 0 => Ok((additional_information as u64, rest)), + n => { + let mut value = 0; + let mut count = 0; + while count < n { + let [next, remaining @ ..] = rest else { + return Err(()); + }; + value = (value << 8) | *next as u64; + rest = remaining; + count += 1; + } + Ok((value, rest)) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_byte() { + for byte in 0..=255 { + let bytes = if byte < 24 { [byte, 0] } else { [24, byte] }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 1..=255 { + let bytes = if byte < 24 { + [(byte - 1) | 0b0010_0000, 0] + } else { + [0b0010_0000 | 24, byte - 1] + }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, -(byte as i64)); + } + } + + #[test] + fn test_byte_roundtrip() { + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), byte as _); + println!("{vec:?}"); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), -(byte as i64)); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, -(byte as i64)); + } + } + + #[test] + fn test_number_roundtrip() { + for _ in 0..100 { + let value = rand::random::(); + let vec = write_number(ConstVec::new(), value); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, value); + } + } + + #[test] + fn test_bytes_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let bytes = rand::random::<[u8; 100]>(); + let vec = write_bytes(ConstVec::new(), &bytes[..len]); + let (item, _) = take_bytes(vec.as_ref()).unwrap(); + assert_eq!(item, &bytes[..len]); + } + } + + #[test] + fn test_array_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let mut vec = write_array(ConstVec::new(), len); + for i in 0..len { + vec = write_number(vec, i as _); + } + let (len, mut remaining) = take_array(vec.as_ref()).unwrap(); + for i in 0..len { + let (item, rest) = take_number(remaining).unwrap(); + remaining = rest; + assert_eq!(item, i as i64); + } + } + } + + #[test] + fn test_map_roundtrip() { + use rand::prelude::SliceRandom; + for _ in 0..100 { + let len = (rand::random::() % 10) as usize; + let mut vec = write_map(ConstVec::new(), len); + let mut random_order_indexes = (0..len).collect::>(); + random_order_indexes.shuffle(&mut rand::rng()); + for &i in &random_order_indexes { + vec = write_map_key(vec, &i.to_string()); + vec = write_number(vec, i as _); + } + println!("len: {}", len); + println!("Map: {:?}", vec); + let (map, remaining) = take_map(vec.as_ref()).unwrap(); + println!("remaining: {:?}", remaining); + assert!(remaining.is_empty()); + for i in 0..len { + let key = i.to_string(); + let key_location = map + .find(&key) + .expect("encoding is valid") + .expect("key exists"); + let (value, _) = take_number(key_location).unwrap(); + assert_eq!(value, i as i64); + } + } + } + + #[test] + fn test_item_length_str() { + #[rustfmt::skip] + let input = [ + /* text(1) */ 0x61, + /* "1" */ 0x31, + /* text(1) */ 0x61, + /* "1" */ 0x31, + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, 2); + } + + #[test] + fn test_item_length_map() { + #[rustfmt::skip] + let input = [ + /* map(1) */ 0xA1, + /* text(1) */ 0x61, + /* "A" */ 0x41, + /* map(2) */ 0xA2, + /* text(3) */ 0x63, + /* "one" */ 0x6F, 0x6E, 0x65, + /* unsigned(286331153) */ 0x1A, 0x11, 0x11, 0x11, 0x11, + /* text(3) */ 0x63, + /* "two" */ 0x74, 0x77, 0x6F, + /* unsigned(34) */ 0x18, 0x22, + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, input.len()); + } +} diff --git a/packages/const-serialize/src/const_buffers.rs b/packages/const-serialize/src/const_buffers.rs deleted file mode 100644 index 4e93ddbdbc..0000000000 --- a/packages/const-serialize/src/const_buffers.rs +++ /dev/null @@ -1,38 +0,0 @@ -/// A buffer that can be read from at compile time. This is very similar to [Cursor](std::io::Cursor) but is -/// designed to be used in const contexts. -#[derive(Debug, Clone, Copy, PartialEq)] -pub struct ConstReadBuffer<'a> { - location: usize, - memory: &'a [u8], -} - -impl<'a> ConstReadBuffer<'a> { - /// Create a new buffer from a byte slice - pub const fn new(memory: &'a [u8]) -> Self { - Self { - location: 0, - memory, - } - } - - /// Get the next byte from the buffer. Returns `None` if the buffer is empty. - /// This will return the new version of the buffer with the first byte removed. - pub const fn get(mut self) -> Option<(Self, u8)> { - if self.location >= self.memory.len() { - return None; - } - let value = self.memory[self.location]; - self.location += 1; - Some((self, value)) - } - - /// Get a reference to the underlying byte slice - pub const fn as_ref(&self) -> &[u8] { - self.memory - } - - /// Get a slice of the buffer from the current location to the end of the buffer - pub const fn remaining(&self) -> &[u8] { - self.memory.split_at(self.location).1 - } -} diff --git a/packages/const-serialize/src/const_vec.rs b/packages/const-serialize/src/const_vec.rs index 4c3c9a4a2a..5b618bd80c 100644 --- a/packages/const-serialize/src/const_vec.rs +++ b/packages/const-serialize/src/const_vec.rs @@ -1,8 +1,6 @@ #![allow(dead_code)] use std::{fmt::Debug, hash::Hash, mem::MaybeUninit}; -use crate::ConstReadBuffer; - const DEFAULT_MAX_SIZE: usize = 2usize.pow(10); /// [`ConstVec`] is a version of [`Vec`] that is usable in const contexts. It has @@ -327,22 +325,6 @@ impl ConstVec { } } -impl ConstVec { - /// Convert the [`ConstVec`] into a [`ConstReadBuffer`] - /// - /// # Example - /// ```rust - /// # use const_serialize::{ConstVec, ConstReadBuffer}; - /// const EMPTY: ConstVec = ConstVec::new(); - /// const ONE: ConstVec = EMPTY.push(1); - /// const TWO: ConstVec = ONE.push(2); - /// const READ: ConstReadBuffer = TWO.read(); - /// ``` - pub const fn read(&self) -> ConstReadBuffer<'_> { - ConstReadBuffer::new(self.as_ref()) - } -} - #[test] fn test_const_vec() { const VEC: ConstVec = { diff --git a/packages/const-serialize/src/enum.rs b/packages/const-serialize/src/enum.rs new file mode 100644 index 0000000000..953af21474 --- /dev/null +++ b/packages/const-serialize/src/enum.rs @@ -0,0 +1,135 @@ +use crate::*; + +/// Serialize an enum that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_enum( + ptr: *const (), + mut to: ConstVec, + layout: &EnumLayout, +) -> ConstVec { + let byte_ptr = ptr as *const u8; + let discriminant = layout.discriminant.read(byte_ptr); + + let mut i = 0; + while i < layout.variants.len() { + // If the variant is the discriminated one, serialize it + let EnumVariant { + tag, name, data, .. + } = &layout.variants[i]; + if discriminant == *tag { + to = write_map(to, 1); + to = write_map_key(to, name); + let data_ptr = ptr.wrapping_byte_offset(layout.variants_offset as _); + to = serialize_const_struct(data_ptr, to, data); + break; + } + i += 1; + } + to +} + +/// The layout for an enum. The enum layout is just a discriminate size and a tag layout. +#[derive(Debug, Copy, Clone)] +pub struct EnumLayout { + pub(crate) size: usize, + discriminant: PrimitiveLayout, + variants_offset: usize, + variants: &'static [EnumVariant], +} + +impl EnumLayout { + /// Create a new enum layout + pub const fn new( + size: usize, + discriminant: PrimitiveLayout, + variants: &'static [EnumVariant], + ) -> Self { + let mut max_align = 1; + let mut i = 0; + while i < variants.len() { + let EnumVariant { align, .. } = &variants[i]; + if *align > max_align { + max_align = *align; + } + i += 1; + } + + let variants_offset_raw = discriminant.size; + let padding = (max_align - (variants_offset_raw % max_align)) % max_align; + let variants_offset = variants_offset_raw + padding; + + assert!(variants_offset % max_align == 0); + + Self { + size, + discriminant, + variants_offset, + variants, + } + } +} + +/// The layout for an enum variant. The enum variant layout is just a struct layout with a tag and alignment. +#[derive(Debug, Copy, Clone)] +pub struct EnumVariant { + name: &'static str, + // Note: tags may not be sequential + tag: u32, + data: StructLayout, + align: usize, +} + +impl EnumVariant { + /// Create a new enum variant layout + pub const fn new(name: &'static str, tag: u32, data: StructLayout, align: usize) -> Self { + Self { + name, + tag, + data, + align, + } + } +} + +/// Deserialize an enum type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_enum<'a>( + from: &'a [u8], + layout: &EnumLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + // First, deserialize the map + let Ok((map, remaining)) = take_map(from) else { + return None; + }; + + // Then get the only field which is the tag + let Ok((deserilized_name, from)) = take_str(map.bytes) else { + return None; + }; + + // Then, deserialize the variant + let mut i = 0; + let mut matched_variant = false; + while i < layout.variants.len() { + // If the variant is the discriminated one, deserialize it + let EnumVariant { + name, data, tag, .. + } = &layout.variants[i]; + if str_eq(deserilized_name, name) { + layout.discriminant.write(*tag, out); + let Some((_, out)) = out.split_at_mut_checked(layout.variants_offset) else { + return None; + }; + if deserialize_const_struct(from, data, out).is_none() { + return None; + } + matched_variant = true; + break; + } + i += 1; + } + if !matched_variant { + return None; + } + + Some(remaining) +} diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 4cc5dcff1a..fa89945ea5 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -1,126 +1,30 @@ #![doc = include_str!("../README.md")] #![warn(missing_docs)] -use std::{char, mem::MaybeUninit}; +use std::mem::MaybeUninit; -mod const_buffers; +mod cbor; mod const_vec; +mod r#enum; +pub use r#enum::*; +mod r#struct; +pub use r#struct::*; +mod primitive; +pub use primitive::*; +mod list; +pub use list::*; +mod array; +pub use array::*; +mod str; +pub use str::*; -pub use const_buffers::ConstReadBuffer; pub use const_serialize_macro::SerializeConst; pub use const_vec::ConstVec; -/// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. -#[derive(Debug, Copy, Clone)] -pub struct StructFieldLayout { - offset: usize, - layout: Layout, -} - -impl StructFieldLayout { - /// Create a new struct field layout - pub const fn new(offset: usize, layout: Layout) -> Self { - Self { offset, layout } - } -} - -/// Layout for a struct. The struct layout is just a list of fields with offsets -#[derive(Debug, Copy, Clone)] -pub struct StructLayout { - size: usize, - data: &'static [StructFieldLayout], -} - -impl StructLayout { - /// Create a new struct layout - pub const fn new(size: usize, data: &'static [StructFieldLayout]) -> Self { - Self { size, data } - } -} - -/// The layout for an enum. The enum layout is just a discriminate size and a tag layout. -#[derive(Debug, Copy, Clone)] -pub struct EnumLayout { - size: usize, - discriminant: PrimitiveLayout, - variants_offset: usize, - variants: &'static [EnumVariant], -} - -impl EnumLayout { - /// Create a new enum layout - pub const fn new( - size: usize, - discriminant: PrimitiveLayout, - variants: &'static [EnumVariant], - ) -> Self { - let mut max_align = 1; - let mut i = 0; - while i < variants.len() { - let EnumVariant { align, .. } = &variants[i]; - if *align > max_align { - max_align = *align; - } - i += 1; - } - - let variants_offset_raw = discriminant.size; - let padding = (max_align - (variants_offset_raw % max_align)) % max_align; - let variants_offset = variants_offset_raw + padding; - - assert!(variants_offset % max_align == 0); - - Self { - size, - discriminant, - variants_offset, - variants, - } - } -} - -/// The layout for an enum variant. The enum variant layout is just a struct layout with a tag and alignment. -#[derive(Debug, Copy, Clone)] -pub struct EnumVariant { - // Note: tags may not be sequential - tag: u32, - data: StructLayout, - align: usize, -} - -impl EnumVariant { - /// Create a new enum variant layout - pub const fn new(tag: u32, data: StructLayout, align: usize) -> Self { - Self { tag, data, align } - } -} - -/// The layout for a constant sized array. The array layout is just a length and an item layout. -#[derive(Debug, Copy, Clone)] -pub struct ListLayout { - len: usize, - item_layout: &'static Layout, -} - -impl ListLayout { - /// Create a new list layout - pub const fn new(len: usize, item_layout: &'static Layout) -> Self { - Self { len, item_layout } - } -} - -/// The layout for a primitive type. The bytes will be reversed if the target is big endian. -#[derive(Debug, Copy, Clone)] -pub struct PrimitiveLayout { - size: usize, -} - -impl PrimitiveLayout { - /// Create a new primitive layout - pub const fn new(size: usize) -> Self { - Self { size } - } -} +use crate::cbor::{ + str_eq, take_array, take_bytes, take_map, take_number, take_str, write_array, write_bytes, + write_map, write_map_key, write_number, +}; /// The layout for a type. This layout defines a sequence of locations and reversed or not bytes. These bytes will be copied from during serialization and copied into during deserialization. #[derive(Debug, Copy, Clone)] @@ -129,10 +33,12 @@ pub enum Layout { Enum(EnumLayout), /// A struct layout Struct(StructLayout), - /// A list layout - List(ListLayout), + /// An array layout + Array(ArrayLayout), /// A primitive layout Primitive(PrimitiveLayout), + /// A dynamically sized list layout + List(ListLayout), } impl Layout { @@ -141,7 +47,8 @@ impl Layout { match self { Layout::Enum(layout) => layout.size, Layout::Struct(layout) => layout.size, - Layout::List(layout) => layout.len * layout.item_layout.size(), + Layout::Array(layout) => layout.len * layout.item_layout.size(), + Layout::List(layout) => layout.size, Layout::Primitive(layout) => layout.size, } } @@ -158,533 +65,16 @@ pub unsafe trait SerializeConst: Sized { const _ASSERT: () = assert!(Self::MEMORY_LAYOUT.size() == std::mem::size_of::()); } -macro_rules! impl_serialize_const { - ($type:ty) => { - unsafe impl SerializeConst for $type { - const MEMORY_LAYOUT: Layout = Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::<$type>(), - }); - } - }; -} - -impl_serialize_const!(u8); -impl_serialize_const!(u16); -impl_serialize_const!(u32); -impl_serialize_const!(u64); -impl_serialize_const!(i8); -impl_serialize_const!(i16); -impl_serialize_const!(i32); -impl_serialize_const!(i64); -impl_serialize_const!(bool); -impl_serialize_const!(f32); -impl_serialize_const!(f64); - -unsafe impl SerializeConst for [T; N] { - const MEMORY_LAYOUT: Layout = Layout::List(ListLayout { - len: N, - item_layout: &T::MEMORY_LAYOUT, - }); -} - -macro_rules! impl_serialize_const_tuple { - ($($generic:ident: $generic_number:expr),*) => { - impl_serialize_const_tuple!(@impl ($($generic,)*) = $($generic: $generic_number),*); - }; - (@impl $inner:ty = $($generic:ident: $generic_number:expr),*) => { - unsafe impl<$($generic: SerializeConst),*> SerializeConst for ($($generic,)*) { - const MEMORY_LAYOUT: Layout = { - Layout::Struct(StructLayout { - size: std::mem::size_of::<($($generic,)*)>(), - data: &[ - $( - StructFieldLayout::new(std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), - )* - ], - }) - }; - } - }; -} - -impl_serialize_const_tuple!(T1: 0); -impl_serialize_const_tuple!(T1: 0, T2: 1); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8, T10: 9); - -const MAX_STR_SIZE: usize = 256; - -/// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time -#[derive(Eq, PartialEq, PartialOrd, Clone, Copy, Hash)] -pub struct ConstStr { - bytes: [u8; MAX_STR_SIZE], - len: u32, -} - -#[cfg(feature = "serde")] -mod serde_bytes { - use serde::{Deserialize, Serialize, Serializer}; - - use crate::ConstStr; - - impl Serialize for ConstStr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.as_str()) - } - } - - impl<'de> Deserialize<'de> for ConstStr { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Ok(ConstStr::new(&s)) - } - } -} - -unsafe impl SerializeConst for ConstStr { - const MEMORY_LAYOUT: Layout = Layout::Struct(StructLayout { - size: std::mem::size_of::(), - data: &[ - StructFieldLayout::new( - std::mem::offset_of!(Self, bytes), - Layout::List(ListLayout { - len: MAX_STR_SIZE, - item_layout: &Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::(), - }), - }), - ), - StructFieldLayout::new( - std::mem::offset_of!(Self, len), - Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::(), - }), - ), - ], - }); -} - -impl ConstStr { - /// Create a new constant string - pub const fn new(s: &str) -> Self { - let str_bytes = s.as_bytes(); - let mut bytes = [0; MAX_STR_SIZE]; - let mut i = 0; - while i < str_bytes.len() { - bytes[i] = str_bytes[i]; - i += 1; - } - Self { - bytes, - len: str_bytes.len() as u32, - } - } - - /// Get a reference to the string - pub const fn as_str(&self) -> &str { - let str_bytes = self.bytes.split_at(self.len as usize).0; - match std::str::from_utf8(str_bytes) { - Ok(s) => s, - Err(_) => panic!( - "Invalid utf8; ConstStr should only ever be constructed from valid utf8 strings" - ), - } - } - - /// Get the length of the string - pub const fn len(&self) -> usize { - self.len as usize - } - - /// Check if the string is empty - pub const fn is_empty(&self) -> bool { - self.len == 0 - } - - /// Push a character onto the string - pub const fn push(self, byte: char) -> Self { - assert!(byte.is_ascii(), "Only ASCII bytes are supported"); - let (bytes, len) = char_to_bytes(byte); - let (str, _) = bytes.split_at(len); - let Ok(str) = std::str::from_utf8(str) else { - panic!("Invalid utf8; char_to_bytes should always return valid utf8 bytes") - }; - self.push_str(str) - } - - /// Push a str onto the string - pub const fn push_str(self, str: &str) -> Self { - let Self { mut bytes, len } = self; - assert!( - str.len() + len as usize <= MAX_STR_SIZE, - "String is too long" - ); - let str_bytes = str.as_bytes(); - let new_len = len as usize + str_bytes.len(); - let mut i = 0; - while i < str_bytes.len() { - bytes[len as usize + i] = str_bytes[i]; - i += 1; - } - Self { - bytes, - len: new_len as u32, - } - } - - /// Split the string at a byte index. The byte index must be a char boundary - pub const fn split_at(self, index: usize) -> (Self, Self) { - let (left, right) = self.bytes.split_at(index); - let left = match std::str::from_utf8(left) { - Ok(s) => s, - Err(_) => { - panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") - } - }; - let right = match std::str::from_utf8(right) { - Ok(s) => s, - Err(_) => { - panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") - } - }; - (Self::new(left), Self::new(right)) - } - - /// Split the string at the last occurrence of a character - pub const fn rsplit_once(&self, char: char) -> Option<(Self, Self)> { - let str = self.as_str(); - let mut index = str.len() - 1; - // First find the bytes we are searching for - let (char_bytes, len) = char_to_bytes(char); - let (char_bytes, _) = char_bytes.split_at(len); - let bytes = str.as_bytes(); - - // Then walk backwards from the end of the string - loop { - let byte = bytes[index]; - // Look for char boundaries in the string and check if the bytes match - if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { - // Split up the string into three sections: [before_char, in_char, after_char] - let (before_char, after_index) = bytes.split_at(index); - let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); - if in_char.len() != char_boundary_len as usize { - panic!("in_char.len() should always be equal to char_boundary_len as usize") - } - // Check if the bytes for the current char and the target char match - let mut in_char_eq = true; - let mut i = 0; - let min_len = if in_char.len() < char_bytes.len() { - in_char.len() - } else { - char_bytes.len() - }; - while i < min_len { - in_char_eq &= in_char[i] == char_bytes[i]; - i += 1; - } - // If they do, convert the bytes to strings and return the split strings - if in_char_eq { - let Ok(before_char_str) = std::str::from_utf8(before_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - let Ok(after_char_str) = std::str::from_utf8(after_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - return Some((Self::new(before_char_str), Self::new(after_char_str))); - } - } - match index.checked_sub(1) { - Some(new_index) => index = new_index, - None => return None, - } - } - } - - /// Split the string at the first occurrence of a character - pub const fn split_once(&self, char: char) -> Option<(Self, Self)> { - let str = self.as_str(); - let mut index = 0; - // First find the bytes we are searching for - let (char_bytes, len) = char_to_bytes(char); - let (char_bytes, _) = char_bytes.split_at(len); - let bytes = str.as_bytes(); - - // Then walk forwards from the start of the string - while index < bytes.len() { - let byte = bytes[index]; - // Look for char boundaries in the string and check if the bytes match - if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { - // Split up the string into three sections: [before_char, in_char, after_char] - let (before_char, after_index) = bytes.split_at(index); - let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); - if in_char.len() != char_boundary_len as usize { - panic!("in_char.len() should always be equal to char_boundary_len as usize") - } - // Check if the bytes for the current char and the target char match - let mut in_char_eq = true; - let mut i = 0; - let min_len = if in_char.len() < char_bytes.len() { - in_char.len() - } else { - char_bytes.len() - }; - while i < min_len { - in_char_eq &= in_char[i] == char_bytes[i]; - i += 1; - } - // If they do, convert the bytes to strings and return the split strings - if in_char_eq { - let Ok(before_char_str) = std::str::from_utf8(before_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - let Ok(after_char_str) = std::str::from_utf8(after_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - return Some((Self::new(before_char_str), Self::new(after_char_str))); - } - } - index += 1 - } - None - } -} - -impl std::fmt::Debug for ConstStr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.as_str()) - } -} - -#[test] -fn test_rsplit_once() { - let str = ConstStr::new("hello world"); - assert_eq!( - str.rsplit_once(' '), - Some((ConstStr::new("hello"), ConstStr::new("world"))) - ); - - let unicode_str = ConstStr::new("hi😀hello😀world😀world"); - assert_eq!( - unicode_str.rsplit_once('😀'), - Some((ConstStr::new("hi😀hello😀world"), ConstStr::new("world"))) - ); - assert_eq!(unicode_str.rsplit_once('❌'), None); - - for _ in 0..100 { - let random_str: String = (0..rand::random::() % 50) - .map(|_| rand::random::()) - .collect(); - let konst = ConstStr::new(&random_str); - let mut seen_chars = std::collections::HashSet::new(); - for char in random_str.chars().rev() { - let (char_bytes, len) = char_to_bytes(char); - let char_bytes = &char_bytes[..len]; - assert_eq!(char_bytes, char.to_string().as_bytes()); - if seen_chars.contains(&char) { - continue; - } - seen_chars.insert(char); - let (correct_left, correct_right) = random_str.rsplit_once(char).unwrap(); - let (left, right) = konst.rsplit_once(char).unwrap(); - println!("splitting {random_str:?} at {char:?}"); - assert_eq!(left.as_str(), correct_left); - assert_eq!(right.as_str(), correct_right); - } - } -} - -const CONTINUED_CHAR_MASK: u8 = 0b10000000; -const BYTE_CHAR_BOUNDARIES: [u8; 4] = [0b00000000, 0b11000000, 0b11100000, 0b11110000]; - -// Const version of https://doc.rust-lang.org/src/core/char/methods.rs.html#1765-1797 -const fn char_to_bytes(char: char) -> ([u8; 4], usize) { - let code = char as u32; - let len = char.len_utf8(); - let mut bytes = [0; 4]; - match len { - 1 => { - bytes[0] = code as u8; - } - 2 => { - bytes[0] = ((code >> 6) & 0x1F) as u8 | BYTE_CHAR_BOUNDARIES[1]; - bytes[1] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - 3 => { - bytes[0] = ((code >> 12) & 0x0F) as u8 | BYTE_CHAR_BOUNDARIES[2]; - bytes[1] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[2] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - 4 => { - bytes[0] = ((code >> 18) & 0x07) as u8 | BYTE_CHAR_BOUNDARIES[3]; - bytes[1] = ((code >> 12) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[2] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[3] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - _ => panic!( - "encode_utf8: need more than 4 bytes to encode the unicode character, but the buffer has 4 bytes" - ), - }; - (bytes, len) -} - -#[test] -fn fuzz_char_to_bytes() { - use std::char; - for _ in 0..100 { - let char = rand::random::(); - let (bytes, len) = char_to_bytes(char); - let str = std::str::from_utf8(&bytes[..len]).unwrap(); - assert_eq!(char.to_string(), str); - } -} - -const fn utf8_char_boundary_to_char_len(byte: u8) -> Option { - match byte { - 0b00000000..=0b01111111 => Some(1), - 0b11000000..=0b11011111 => Some(2), - 0b11100000..=0b11101111 => Some(3), - 0b11110000..=0b11111111 => Some(4), - _ => None, - } -} - -#[test] -fn fuzz_utf8_byte_to_char_len() { - for _ in 0..100 { - let random_string: String = (0..rand::random::()) - .map(|_| rand::random::()) - .collect(); - let bytes = random_string.as_bytes(); - let chars: std::collections::HashMap<_, _> = random_string.char_indices().collect(); - for (i, byte) in bytes.iter().enumerate() { - match utf8_char_boundary_to_char_len(*byte) { - Some(char_len) => { - let char = chars - .get(&i) - .unwrap_or_else(|| panic!("{byte:b} is not a character boundary")); - assert_eq!(char.len_utf8(), char_len as usize); - } - None => { - assert!(!chars.contains_key(&i), "{byte:b} is a character boundary"); - } - } - } - } -} - -/// Serialize a struct that is stored at the pointer passed in -const fn serialize_const_struct( - ptr: *const (), - mut to: ConstVec, - layout: &StructLayout, -) -> ConstVec { - let mut i = 0; - while i < layout.data.len() { - // Serialize the field at the offset pointer in the struct - let StructFieldLayout { offset, layout } = &layout.data[i]; - let field = ptr.wrapping_byte_add(*offset as _); - to = serialize_const_ptr(field, to, layout); - i += 1; - } - to -} - -/// Serialize an enum that is stored at the pointer passed in -const fn serialize_const_enum( - ptr: *const (), - mut to: ConstVec, - layout: &EnumLayout, -) -> ConstVec { - let mut discriminant = 0; - - let byte_ptr = ptr as *const u8; - let mut offset = 0; - while offset < layout.discriminant.size { - // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - let byte = if cfg!(target_endian = "big") { - unsafe { - byte_ptr - .wrapping_byte_add((layout.discriminant.size - offset - 1) as _) - .read() - } - } else { - unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } - }; - to = to.push(byte); - discriminant |= (byte as u32) << (offset * 8); - offset += 1; - } - - let mut i = 0; - while i < layout.variants.len() { - // If the variant is the discriminated one, serialize it - let EnumVariant { tag, data, .. } = &layout.variants[i]; - if discriminant == *tag { - let data_ptr = ptr.wrapping_byte_offset(layout.variants_offset as _); - to = serialize_const_struct(data_ptr, to, data); - break; - } - i += 1; - } - to -} - -/// Serialize a primitive type that is stored at the pointer passed in -const fn serialize_const_primitive( - ptr: *const (), - mut to: ConstVec, - layout: &PrimitiveLayout, -) -> ConstVec { - let ptr = ptr as *const u8; - let mut offset = 0; - while offset < layout.size { - // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - to = to.push(unsafe { - ptr.wrapping_byte_offset((layout.size - offset - 1) as _) - .read() - }); - } else { - to = to.push(unsafe { ptr.wrapping_byte_offset(offset as _).read() }); - } - offset += 1; - } - to -} - -/// Serialize a constant sized array that is stored at the pointer passed in -const fn serialize_const_list( +/// Serialize a pointer to a type that is stored at the pointer passed in +const unsafe fn serialize_const_ptr( ptr: *const (), - mut to: ConstVec, - layout: &ListLayout, + to: ConstVec, + layout: &Layout, ) -> ConstVec { - let len = layout.len; - let mut i = 0; - while i < len { - let field = ptr.wrapping_byte_offset((i * layout.item_layout.size()) as _); - to = serialize_const_ptr(field, to, layout.item_layout); - i += 1; - } - to -} - -/// Serialize a pointer to a type that is stored at the pointer passed in -const fn serialize_const_ptr(ptr: *const (), to: ConstVec, layout: &Layout) -> ConstVec { match layout { Layout::Enum(layout) => serialize_const_enum(ptr, to, layout), Layout::Struct(layout) => serialize_const_struct(ptr, to, layout), + Layout::Array(layout) => serialize_const_array(ptr, to, layout), Layout::List(layout) => serialize_const_list(ptr, to, layout), Layout::Primitive(layout) => serialize_const_primitive(ptr, to, layout), } @@ -710,156 +100,31 @@ const fn serialize_const_ptr(ptr: *const (), to: ConstVec, layout: &Layout) /// b: 0x22, /// c: 0x33333333, /// }, buffer); -/// let buf = buffer.read(); -/// assert_eq!(buf.as_ref(), &[0x11, 0x11, 0x11, 0x11, 0x22, 0x33, 0x33, 0x33, 0x33]); +/// assert_eq!(buffer.as_ref(), &[0xa3, 0x61, 0x61, 0x1a, 0x11, 0x11, 0x11, 0x11, 0x61, 0x62, 0x18, 0x22, 0x61, 0x63, 0x1a, 0x33, 0x33, 0x33, 0x33]); /// ``` #[must_use = "The data is serialized into the returned buffer"] pub const fn serialize_const(data: &T, to: ConstVec) -> ConstVec { let ptr = data as *const T as *const (); - serialize_const_ptr(ptr, to, &T::MEMORY_LAYOUT) -} - -/// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_primitive<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, - layout: &PrimitiveLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let mut offset = 0; - while offset < layout.size { - // If the bytes are reversed, walk backwards from the end of the number when filling in bytes - let (from_new, value) = match from.get() { - Some(data) => data, - None => return None, - }; - from = from_new; - if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - out[start + layout.size - offset - 1] = MaybeUninit::new(value); - } else { - out[start + offset] = MaybeUninit::new(value); - } - offset += 1; - } - Some((from, out)) -} - -/// Deserialize a struct type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_struct<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, - layout: &StructLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let mut i = 0; - while i < layout.data.len() { - // Deserialize the field at the offset pointer in the struct - let StructFieldLayout { offset, layout } = &layout.data[i]; - let (new_from, new_out) = match deserialize_const_ptr(from, layout, (start + *offset, out)) - { - Some(data) => data, - None => return None, - }; - from = new_from; - out = new_out; - i += 1; - } - Some((from, out)) -} - -/// Deserialize an enum type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_enum<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, - layout: &EnumLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let mut discriminant = 0; - - // First, deserialize the discriminant - let mut offset = 0; - while offset < layout.discriminant.size { - // If the bytes are reversed, walk backwards from the end of the number when filling in bytes - let (from_new, value) = match from.get() { - Some(data) => data, - None => return None, - }; - from = from_new; - if cfg!(target_endian = "big") { - out[start + layout.size - offset - 1] = MaybeUninit::new(value); - discriminant |= (value as u32) << ((layout.discriminant.size - offset - 1) * 8); - } else { - out[start + offset] = MaybeUninit::new(value); - discriminant |= (value as u32) << (offset * 8); - } - offset += 1; - } - - // Then, deserialize the variant - let mut i = 0; - let mut matched_variant = false; - while i < layout.variants.len() { - // If the variant is the discriminated one, deserialize it - let EnumVariant { tag, data, .. } = &layout.variants[i]; - if discriminant == *tag { - let offset = layout.variants_offset; - let (new_from, new_out) = - match deserialize_const_struct(from, data, (start + offset, out)) { - Some(data) => data, - None => return None, - }; - from = new_from; - out = new_out; - matched_variant = true; - break; - } - i += 1; - } - if !matched_variant { - return None; - } - - Some((from, out)) -} - -/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_list<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, - layout: &ListLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let len = layout.len; - let item_layout = layout.item_layout; - let mut i = 0; - while i < len { - let (new_from, new_out) = - match deserialize_const_ptr(from, item_layout, (start + i * item_layout.size(), out)) { - Some(data) => data, - None => return None, - }; - from = new_from; - out = new_out; - i += 1; - } - Some((from, out)) + // SAFETY: The pointer is valid and the layout is correct + unsafe { serialize_const_ptr(ptr, to, &T::MEMORY_LAYOUT) } } /// Deserialize a type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_ptr<'a, const N: usize>( - from: ConstReadBuffer<'a>, +const fn deserialize_const_ptr<'a>( + from: &'a [u8], layout: &Layout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { match layout { Layout::Enum(layout) => deserialize_const_enum(from, layout, out), Layout::Struct(layout) => deserialize_const_struct(from, layout, out), + Layout::Array(layout) => deserialize_const_array(from, layout, out), Layout::List(layout) => deserialize_const_list(from, layout, out), Layout::Primitive(layout) => deserialize_const_primitive(from, layout, out), } } -/// Deserialize a type into the output buffer. Accepts `(type, ConstVec)` as input and returns `Option<(ConstReadBuffer, Instance of type)>` +/// Deserialize a type into the output buffer. Accepts `(type, ConstVec)` as input and returns `Option<(&'a [u8], Instance of type)>` /// /// # Example /// ```rust @@ -879,7 +144,7 @@ const fn deserialize_const_ptr<'a, const N: usize>( /// c: 0x33333333, /// d: 0x44444444, /// }, buffer); -/// let buf = buffer.read(); +/// let buf = buffer.as_ref(); /// assert_eq!(deserialize_const!(Struct, buf).unwrap().1, Struct { /// a: 0x11111111, /// b: 0x22, @@ -902,14 +167,13 @@ macro_rules! deserialize_const { /// N must be `std::mem::size_of::()` #[must_use = "The data is deserialized from the input buffer"] pub const unsafe fn deserialize_const_raw( - from: ConstReadBuffer, -) -> Option<(ConstReadBuffer, T)> { + from: &[u8], +) -> Option<(&[u8], T)> { // Create uninitized memory with the size of the type - let out = [MaybeUninit::uninit(); N]; + let mut out = [MaybeUninit::uninit(); N]; // Fill in the bytes into the buffer for the type - let (from, out) = match deserialize_const_ptr(from, &T::MEMORY_LAYOUT, (0, out)) { - Some(data) => data, - None => return None, + let Some(from) = deserialize_const_ptr(from, &T::MEMORY_LAYOUT, &mut out) else { + return None; }; // Now that the memory is filled in, transmute it into the type Some((from, unsafe { diff --git a/packages/const-serialize/src/list.rs b/packages/const-serialize/src/list.rs new file mode 100644 index 0000000000..1b94a2100b --- /dev/null +++ b/packages/const-serialize/src/list.rs @@ -0,0 +1,119 @@ +use crate::*; + +/// The layout for a dynamically sized list. The list layout is just a length and an item layout. +#[derive(Debug, Copy, Clone)] +pub struct ListLayout { + /// The size of the struct backing the list + pub(crate) size: usize, + /// The byte offset of the length field + len_offset: usize, + /// The layout of the length field + len_layout: PrimitiveLayout, + /// The byte offset of the data field + data_offset: usize, + /// The layout of the data field + data_layout: ArrayLayout, +} + +impl ListLayout { + /// Create a new list layout + pub const fn new( + size: usize, + len_offset: usize, + len_layout: PrimitiveLayout, + data_offset: usize, + data_layout: ArrayLayout, + ) -> Self { + Self { + size, + len_offset, + len_layout, + data_offset, + data_layout, + } + } +} + +/// Serialize a dynamically sized list that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_list( + ptr: *const (), + mut to: ConstVec, + layout: &ListLayout, +) -> ConstVec { + // Read the length of the list + let len_ptr = ptr.wrapping_byte_offset(layout.len_offset as _); + let len = layout.len_layout.read(len_ptr as *const u8) as usize; + + let data_ptr = ptr.wrapping_byte_offset(layout.data_offset as _); + let item_layout = layout.data_layout.item_layout; + // If the item size is 1, deserialize as bytes directly + if item_layout.size() == 1 { + let slice = std::slice::from_raw_parts(data_ptr as *const u8, len); + to = write_bytes(to, slice); + } + // Otherwise, deserialize as a list of items + else { + let mut i = 0; + to = write_array(to, len); + while i < len { + let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); + to = serialize_const_ptr(item, to, item_layout); + i += 1; + } + } + to +} + +/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_list<'a>( + from: &'a [u8], + layout: &ListLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Some((_, len_out)) = out.split_at_mut_checked(layout.len_offset) else { + return None; + }; + + // If the list items are only one byte, serialize as bytes directly + let item_layout = layout.data_layout.item_layout; + if item_layout.size() == 1 { + let Ok((bytes, new_from)) = take_bytes(from) else { + return None; + }; + // Write out the length of the list + layout.len_layout.write(bytes.len() as u32, len_out); + let Some((_, data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + let mut offset = 0; + while offset < bytes.len() { + data_out[offset] = MaybeUninit::new(bytes[offset]); + offset += 1; + } + Some(new_from) + } + // Otherwise, serialize as an list of objects + else { + let Ok((len, mut from)) = take_array(from) else { + return None; + }; + // Write out the length of the list + layout.len_layout.write(len as u32, len_out); + let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + let mut i = 0; + while i < len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { + return None; + }; + let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + data_out = item_out; + from = new_from; + i += 1; + } + Some(from) + } +} diff --git a/packages/const-serialize/src/primitive.rs b/packages/const-serialize/src/primitive.rs new file mode 100644 index 0000000000..0c511c3887 --- /dev/null +++ b/packages/const-serialize/src/primitive.rs @@ -0,0 +1,121 @@ +use crate::*; +use std::mem::MaybeUninit; + +/// The layout for a primitive type. The bytes will be reversed if the target is big endian. +#[derive(Debug, Copy, Clone)] +pub struct PrimitiveLayout { + pub(crate) size: usize, +} + +impl PrimitiveLayout { + /// Create a new primitive layout + pub const fn new(size: usize) -> Self { + Self { size } + } + + /// Read the value from the given pointer + /// + /// # Safety + /// The pointer must be valid for reads of `self.size` bytes. + pub const unsafe fn read(self, byte_ptr: *const u8) -> u32 { + let mut value = 0; + let mut offset = 0; + while offset < self.size { + // If the bytes are reversed, walk backwards from the end of the number when pushing bytes + let byte = if cfg!(target_endian = "big") { + unsafe { + byte_ptr + .wrapping_byte_add((self.size - offset - 1) as _) + .read() + } + } else { + unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } + }; + value |= (byte as u32) << (offset * 8); + offset += 1; + } + value + } + + /// Write the value to the given buffer + pub const fn write(self, value: u32, out: &mut [MaybeUninit]) { + let bytes = value.to_ne_bytes(); + let mut offset = 0; + while offset < self.size { + out[offset] = MaybeUninit::new(bytes[offset]); + offset += 1; + } + } +} + +macro_rules! impl_serialize_const { + ($type:ty) => { + unsafe impl SerializeConst for $type { + const MEMORY_LAYOUT: Layout = Layout::Primitive(PrimitiveLayout { + size: std::mem::size_of::<$type>(), + }); + } + }; +} + +impl_serialize_const!(u8); +impl_serialize_const!(u16); +impl_serialize_const!(u32); +impl_serialize_const!(u64); +impl_serialize_const!(i8); +impl_serialize_const!(i16); +impl_serialize_const!(i32); +impl_serialize_const!(i64); +impl_serialize_const!(bool); +impl_serialize_const!(f32); +impl_serialize_const!(f64); + +/// Serialize a primitive type that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_primitive( + ptr: *const (), + to: ConstVec, + layout: &PrimitiveLayout, +) -> ConstVec { + let ptr = ptr as *const u8; + let mut offset = 0; + let mut i64_bytes = [0u8; 8]; + while offset < layout.size { + // If the bytes are reversed, walk backwards from the end of the number when pushing bytes + let byte = unsafe { + if cfg!(any(target_endian = "big", feature = "test-big-endian")) { + ptr.wrapping_byte_offset((layout.size - offset - 1) as _) + .read() + } else { + ptr.wrapping_byte_offset(offset as _).read() + } + }; + i64_bytes[offset] = byte; + offset += 1; + } + let number = i64::from_ne_bytes(i64_bytes); + write_number(to, number) +} + +/// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_primitive<'a>( + from: &'a [u8], + layout: &PrimitiveLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let mut offset = 0; + let Ok((number, from)) = take_number(from) else { + return None; + }; + let bytes = number.to_le_bytes(); + while offset < layout.size { + // If the bytes are reversed, walk backwards from the end of the number when filling in bytes + let byte = bytes[offset]; + if cfg!(any(target_endian = "big", feature = "test-big-endian")) { + out[layout.size - offset - 1] = MaybeUninit::new(byte); + } else { + out[offset] = MaybeUninit::new(byte); + } + offset += 1; + } + Some(from) +} diff --git a/packages/const-serialize/src/str.rs b/packages/const-serialize/src/str.rs new file mode 100644 index 0000000000..f838e23505 --- /dev/null +++ b/packages/const-serialize/src/str.rs @@ -0,0 +1,391 @@ +use crate::*; +use std::{char, hash::Hash, mem::MaybeUninit}; + +const MAX_STR_SIZE: usize = 256; + +/// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time +#[derive(Clone, Copy, Debug)] +pub struct ConstStr { + pub(crate) bytes: [MaybeUninit; MAX_STR_SIZE], + pub(crate) len: u32, +} + +#[cfg(feature = "serde")] +mod serde_bytes { + use serde::{Deserialize, Serialize, Serializer}; + + use crate::ConstStr; + + impl Serialize for ConstStr { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.as_str()) + } + } + + impl<'de> Deserialize<'de> for ConstStr { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Ok(ConstStr::new(&s)) + } + } +} + +unsafe impl SerializeConst for ConstStr { + const MEMORY_LAYOUT: Layout = Layout::List(ListLayout::new( + std::mem::size_of::(), + std::mem::offset_of!(Self, len), + PrimitiveLayout { + size: std::mem::size_of::(), + }, + std::mem::offset_of!(Self, bytes), + ArrayLayout { + len: MAX_STR_SIZE, + item_layout: &Layout::Primitive(PrimitiveLayout { + size: std::mem::size_of::(), + }), + }, + )); +} + +impl ConstStr { + /// Create a new constant string + pub const fn new(s: &str) -> Self { + let str_bytes = s.as_bytes(); + let mut bytes = [MaybeUninit::uninit(); MAX_STR_SIZE]; + let mut i = 0; + while i < str_bytes.len() { + bytes[i] = MaybeUninit::new(str_bytes[i]); + i += 1; + } + Self { + bytes, + len: str_bytes.len() as u32, + } + } + + /// Get the bytes of the initialized portion of the string + const fn bytes(&self) -> &[u8] { + // Safety: All bytes up to the pointer are initialized + unsafe { + &*(self.bytes.split_at(self.len as usize).0 as *const [MaybeUninit] + as *const [u8]) + } + } + + /// Get a reference to the string + pub const fn as_str(&self) -> &str { + let str_bytes = self.bytes(); + match std::str::from_utf8(str_bytes) { + Ok(s) => s, + Err(_) => panic!( + "Invalid utf8; ConstStr should only ever be constructed from valid utf8 strings" + ), + } + } + + /// Get the length of the string + pub const fn len(&self) -> usize { + self.len as usize + } + + /// Check if the string is empty + pub const fn is_empty(&self) -> bool { + self.len == 0 + } + + /// Push a character onto the string + pub const fn push(self, byte: char) -> Self { + assert!(byte.is_ascii(), "Only ASCII bytes are supported"); + let (bytes, len) = char_to_bytes(byte); + let (str, _) = bytes.split_at(len); + let Ok(str) = std::str::from_utf8(str) else { + panic!("Invalid utf8; char_to_bytes should always return valid utf8 bytes") + }; + self.push_str(str) + } + + /// Push a str onto the string + pub const fn push_str(self, str: &str) -> Self { + let Self { mut bytes, len } = self; + assert!( + str.len() + len as usize <= MAX_STR_SIZE, + "String is too long" + ); + let str_bytes = str.as_bytes(); + let new_len = len as usize + str_bytes.len(); + let mut i = 0; + while i < str_bytes.len() { + bytes[len as usize + i] = MaybeUninit::new(str_bytes[i]); + i += 1; + } + Self { + bytes, + len: new_len as u32, + } + } + + /// Split the string at a byte index. The byte index must be a char boundary + pub const fn split_at(self, index: usize) -> (Self, Self) { + let (left, right) = self.bytes().split_at(index); + let left = match std::str::from_utf8(left) { + Ok(s) => s, + Err(_) => { + panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") + } + }; + let right = match std::str::from_utf8(right) { + Ok(s) => s, + Err(_) => { + panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") + } + }; + (Self::new(left), Self::new(right)) + } + + /// Split the string at the last occurrence of a character + pub const fn rsplit_once(&self, char: char) -> Option<(Self, Self)> { + let str = self.as_str(); + let mut index = str.len() - 1; + // First find the bytes we are searching for + let (char_bytes, len) = char_to_bytes(char); + let (char_bytes, _) = char_bytes.split_at(len); + let bytes = str.as_bytes(); + + // Then walk backwards from the end of the string + loop { + let byte = bytes[index]; + // Look for char boundaries in the string and check if the bytes match + if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { + // Split up the string into three sections: [before_char, in_char, after_char] + let (before_char, after_index) = bytes.split_at(index); + let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); + if in_char.len() != char_boundary_len as usize { + panic!("in_char.len() should always be equal to char_boundary_len as usize") + } + // Check if the bytes for the current char and the target char match + let mut in_char_eq = true; + let mut i = 0; + let min_len = if in_char.len() < char_bytes.len() { + in_char.len() + } else { + char_bytes.len() + }; + while i < min_len { + in_char_eq &= in_char[i] == char_bytes[i]; + i += 1; + } + // If they do, convert the bytes to strings and return the split strings + if in_char_eq { + let Ok(before_char_str) = std::str::from_utf8(before_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + let Ok(after_char_str) = std::str::from_utf8(after_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + return Some((Self::new(before_char_str), Self::new(after_char_str))); + } + } + match index.checked_sub(1) { + Some(new_index) => index = new_index, + None => return None, + } + } + } + + /// Split the string at the first occurrence of a character + pub const fn split_once(&self, char: char) -> Option<(Self, Self)> { + let str = self.as_str(); + let mut index = 0; + // First find the bytes we are searching for + let (char_bytes, len) = char_to_bytes(char); + let (char_bytes, _) = char_bytes.split_at(len); + let bytes = str.as_bytes(); + + // Then walk forwards from the start of the string + while index < bytes.len() { + let byte = bytes[index]; + // Look for char boundaries in the string and check if the bytes match + if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { + // Split up the string into three sections: [before_char, in_char, after_char] + let (before_char, after_index) = bytes.split_at(index); + let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); + if in_char.len() != char_boundary_len as usize { + panic!("in_char.len() should always be equal to char_boundary_len as usize") + } + // Check if the bytes for the current char and the target char match + let mut in_char_eq = true; + let mut i = 0; + let min_len = if in_char.len() < char_bytes.len() { + in_char.len() + } else { + char_bytes.len() + }; + while i < min_len { + in_char_eq &= in_char[i] == char_bytes[i]; + i += 1; + } + // If they do, convert the bytes to strings and return the split strings + if in_char_eq { + let Ok(before_char_str) = std::str::from_utf8(before_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + let Ok(after_char_str) = std::str::from_utf8(after_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + return Some((Self::new(before_char_str), Self::new(after_char_str))); + } + } + index += 1 + } + None + } +} + +impl PartialEq for ConstStr { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl Eq for ConstStr {} + +impl PartialOrd for ConstStr { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for ConstStr { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl Hash for ConstStr { + fn hash(&self, state: &mut H) { + self.as_str().hash(state); + } +} + +#[test] +fn test_rsplit_once() { + let str = ConstStr::new("hello world"); + assert_eq!( + str.rsplit_once(' '), + Some((ConstStr::new("hello"), ConstStr::new("world"))) + ); + + let unicode_str = ConstStr::new("hi😀hello😀world😀world"); + assert_eq!( + unicode_str.rsplit_once('😀'), + Some((ConstStr::new("hi😀hello😀world"), ConstStr::new("world"))) + ); + assert_eq!(unicode_str.rsplit_once('❌'), None); + + for _ in 0..100 { + let random_str: String = (0..rand::random::() % 50) + .map(|_| rand::random::()) + .collect(); + let konst = ConstStr::new(&random_str); + let mut seen_chars = std::collections::HashSet::new(); + for char in random_str.chars().rev() { + let (char_bytes, len) = char_to_bytes(char); + let char_bytes = &char_bytes[..len]; + assert_eq!(char_bytes, char.to_string().as_bytes()); + if seen_chars.contains(&char) { + continue; + } + seen_chars.insert(char); + let (correct_left, correct_right) = random_str.rsplit_once(char).unwrap(); + let (left, right) = konst.rsplit_once(char).unwrap(); + println!("splitting {random_str:?} at {char:?}"); + assert_eq!(left.as_str(), correct_left); + assert_eq!(right.as_str(), correct_right); + } + } +} + +const CONTINUED_CHAR_MASK: u8 = 0b10000000; +const BYTE_CHAR_BOUNDARIES: [u8; 4] = [0b00000000, 0b11000000, 0b11100000, 0b11110000]; + +// Const version of https://doc.rust-lang.org/src/core/char/methods.rs.html#1765-1797 +const fn char_to_bytes(char: char) -> ([u8; 4], usize) { + let code = char as u32; + let len = char.len_utf8(); + let mut bytes = [0; 4]; + match len { + 1 => { + bytes[0] = code as u8; + } + 2 => { + bytes[0] = ((code >> 6) & 0x1F) as u8 | BYTE_CHAR_BOUNDARIES[1]; + bytes[1] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + 3 => { + bytes[0] = ((code >> 12) & 0x0F) as u8 | BYTE_CHAR_BOUNDARIES[2]; + bytes[1] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[2] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + 4 => { + bytes[0] = ((code >> 18) & 0x07) as u8 | BYTE_CHAR_BOUNDARIES[3]; + bytes[1] = ((code >> 12) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[2] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[3] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + _ => panic!( + "encode_utf8: need more than 4 bytes to encode the unicode character, but the buffer has 4 bytes" + ), + }; + (bytes, len) +} + +#[test] +fn fuzz_char_to_bytes() { + use std::char; + for _ in 0..100 { + let char = rand::random::(); + let (bytes, len) = char_to_bytes(char); + let str = std::str::from_utf8(&bytes[..len]).unwrap(); + assert_eq!(char.to_string(), str); + } +} + +const fn utf8_char_boundary_to_char_len(byte: u8) -> Option { + match byte { + 0b00000000..=0b01111111 => Some(1), + 0b11000000..=0b11011111 => Some(2), + 0b11100000..=0b11101111 => Some(3), + 0b11110000..=0b11111111 => Some(4), + _ => None, + } +} + +#[test] +fn fuzz_utf8_byte_to_char_len() { + for _ in 0..100 { + let random_string: String = (0..rand::random::()) + .map(|_| rand::random::()) + .collect(); + let bytes = random_string.as_bytes(); + let chars: std::collections::HashMap<_, _> = random_string.char_indices().collect(); + for (i, byte) in bytes.iter().enumerate() { + match utf8_char_boundary_to_char_len(*byte) { + Some(char_len) => { + let char = chars + .get(&i) + .unwrap_or_else(|| panic!("{byte:b} is not a character boundary")); + assert_eq!(char.len_utf8(), char_len as usize); + } + None => { + assert!(!chars.contains_key(&i), "{byte:b} is a character boundary"); + } + } + } + } +} diff --git a/packages/const-serialize/src/struct.rs b/packages/const-serialize/src/struct.rs new file mode 100644 index 0000000000..a2db822b6a --- /dev/null +++ b/packages/const-serialize/src/struct.rs @@ -0,0 +1,120 @@ +use crate::*; + +/// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. +#[derive(Debug, Copy, Clone)] +pub struct StructFieldLayout { + name: &'static str, + offset: usize, + layout: Layout, +} + +impl StructFieldLayout { + /// Create a new struct field layout + pub const fn new(name: &'static str, offset: usize, layout: Layout) -> Self { + Self { + name, + offset, + layout, + } + } +} + +/// Layout for a struct. The struct layout is just a list of fields with offsets +#[derive(Debug, Copy, Clone)] +pub struct StructLayout { + pub(crate) size: usize, + pub(crate) data: &'static [StructFieldLayout], +} + +impl StructLayout { + /// Create a new struct layout + pub const fn new(size: usize, data: &'static [StructFieldLayout]) -> Self { + Self { size, data } + } +} + +/// Serialize a struct that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_struct( + ptr: *const (), + to: ConstVec, + layout: &StructLayout, +) -> ConstVec { + let mut i = 0; + let field_count = layout.data.len(); + let mut to = write_map(to, field_count); + while i < field_count { + // Serialize the field at the offset pointer in the struct + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + to = write_map_key(to, name); + let field = ptr.wrapping_byte_add(*offset as _); + to = serialize_const_ptr(field, to, layout); + i += 1; + } + to +} + +/// Deserialize a struct type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_struct<'a>( + from: &'a [u8], + layout: &StructLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Ok((map, from)) = take_map(from) else { + return None; + }; + let mut i = 0; + while i < layout.data.len() { + // Deserialize the field at the offset pointer in the struct + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + let Ok(Some(from)) = map.find(name) else { + return None; + }; + let Some((_, field_bytes)) = out.split_at_mut_checked(*offset) else { + return None; + }; + if deserialize_const_ptr(from, layout, field_bytes).is_none() { + return None; + } + i += 1; + } + Some(from) +} + +macro_rules! impl_serialize_const_tuple { + ($($generic:ident: $generic_number:expr),*) => { + impl_serialize_const_tuple!(@impl ($($generic,)*) = $($generic: $generic_number),*); + }; + (@impl $inner:ty = $($generic:ident: $generic_number:expr),*) => { + unsafe impl<$($generic: SerializeConst),*> SerializeConst for ($($generic,)*) { + const MEMORY_LAYOUT: Layout = { + Layout::Struct(StructLayout { + size: std::mem::size_of::<($($generic,)*)>(), + data: &[ + $( + StructFieldLayout::new(stringify!($generic_number), std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), + )* + ], + }) + }; + } + }; +} + +impl_serialize_const_tuple!(T1: 0); +impl_serialize_const_tuple!(T1: 0, T2: 1); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8, T10: 9); diff --git a/packages/const-serialize/tests/enum.rs b/packages/const-serialize/tests/enum.rs index a0df9f160c..5b8e286ebd 100644 --- a/packages/const-serialize/tests/enum.rs +++ b/packages/const-serialize/tests/enum.rs @@ -81,7 +81,7 @@ fn test_serialize_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -91,7 +91,7 @@ fn test_serialize_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } @@ -110,7 +110,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -126,7 +126,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -139,7 +139,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -152,7 +152,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); } @@ -171,14 +171,14 @@ fn test_serialize_u8_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B; let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } @@ -198,7 +198,7 @@ fn test_serialize_corrupted_enum() { buf = serialize_const(&data, buf); buf = buf.set(0, 2); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf), None); } @@ -226,7 +226,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -236,7 +236,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -249,7 +249,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -262,6 +262,81 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } + +#[test] +fn test_adding_enum_field_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum Initial { + A { a: u32, b: u8 }, + } + + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum New { + A { b: u8, a: u32, c: u32 }, + } + + let data = New::A { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial::A { + a: 0x11111111, + b: 0x22, + }, + data2 + ); +} + +#[test] +fn test_adding_enum_variant_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum Initial { + A { a: u32, b: u8 }, + } + + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum New { + #[allow(unused)] + B { + d: u32, + e: u8, + }, + A { + c: u32, + b: u8, + a: u32, + }, + } + + let data = New::A { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial::A { + a: 0x11111111, + b: 0x22, + }, + data2 + ); +} diff --git a/packages/const-serialize/tests/lists.rs b/packages/const-serialize/tests/lists.rs index 84f9fe11b2..4192499150 100644 --- a/packages/const-serialize/tests/lists.rs +++ b/packages/const-serialize/tests/lists.rs @@ -5,7 +5,7 @@ fn test_serialize_const_layout_list() { let mut buf = ConstVec::new(); buf = serialize_const(&[1u8, 2, 3] as &[u8; 3], buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([u8; 3], buf).unwrap().1, [1, 2, 3]) } @@ -17,7 +17,7 @@ fn test_serialize_const_layout_nested_lists() { buf, ); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!([[u8; 3]; 3], buf).unwrap().1, @@ -29,6 +29,6 @@ fn test_serialize_const_layout_nested_lists() { fn test_serialize_list_too_little_data() { let mut buf = ConstVec::new(); buf = buf.push(1); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([u64; 10], buf), None); } diff --git a/packages/const-serialize/tests/primitive.rs b/packages/const-serialize/tests/primitive.rs index a5e3e803ff..0423dcf219 100644 --- a/packages/const-serialize/tests/primitive.rs +++ b/packages/const-serialize/tests/primitive.rs @@ -4,58 +4,34 @@ use const_serialize::{deserialize_const, serialize_const, ConstVec}; fn test_serialize_const_layout_primitive() { let mut buf = ConstVec::new(); buf = serialize_const(&1234u32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234u32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234u32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); + println!("{:?}", buf); assert_eq!(deserialize_const!(u32, buf).unwrap().1, 1234u32); let mut buf = ConstVec::new(); buf = serialize_const(&1234u64, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234u64.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234u64.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(u64, buf).unwrap().1, 1234u64); let mut buf = ConstVec::new(); buf = serialize_const(&1234i32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234i32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234i32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(i32, buf).unwrap().1, 1234i32); let mut buf = ConstVec::new(); buf = serialize_const(&1234i64, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234i64.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234i64.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(i64, buf).unwrap().1, 1234i64); let mut buf = ConstVec::new(); buf = serialize_const(&true, buf); assert_eq!(buf.as_ref(), [1u8]); - let buf = buf.read(); + let buf = buf.as_ref(); assert!(deserialize_const!(bool, buf).unwrap().1); let mut buf = ConstVec::new(); buf = serialize_const(&0.631f32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 0.631f32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 0.631f32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(f32, buf).unwrap().1, 0.631); } @@ -66,6 +42,6 @@ fn test_serialize_primitive_too_little_data() { buf = buf.push(1); buf = buf.push(1); buf = buf.push(1); - let buf = buf.read(); - assert_eq!(deserialize_const!(u64, buf), None); + let buf = buf.as_ref(); + assert_eq!(deserialize_const!([u64; 10], buf), None); } diff --git a/packages/const-serialize/tests/str.rs b/packages/const-serialize/tests/str.rs index 45371741d5..4a11deeb41 100644 --- a/packages/const-serialize/tests/str.rs +++ b/packages/const-serialize/tests/str.rs @@ -6,11 +6,11 @@ fn test_serialize_const_layout_str() { let str = ConstStr::new("hello"); buf = serialize_const(&str, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); - assert_eq!( - deserialize_const!(ConstStr, buf).unwrap().1.as_str(), - "hello" - ); + let buf = buf.as_ref(); + assert!(buf.len() < 10); + let str = deserialize_const!(ConstStr, buf).unwrap().1; + eprintln!("{str:?}"); + assert_eq!(str.as_str(), "hello"); } #[test] @@ -19,7 +19,8 @@ fn test_serialize_const_layout_nested_str() { let str = ConstStr::new("hello"); buf = serialize_const(&[str, str, str] as &[ConstStr; 3], buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + assert!(buf.len() < 30); + let buf = buf.as_ref(); assert_eq!( deserialize_const!([ConstStr; 3], buf).unwrap().1, @@ -35,6 +36,6 @@ fn test_serialize_const_layout_nested_str() { fn test_serialize_str_too_little_data() { let mut buf = ConstVec::new(); buf = buf.push(1); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(ConstStr, buf), None); } diff --git a/packages/const-serialize/tests/structs.rs b/packages/const-serialize/tests/structs.rs index 68ce249381..cb1f9847d2 100644 --- a/packages/const-serialize/tests/structs.rs +++ b/packages/const-serialize/tests/structs.rs @@ -96,7 +96,7 @@ fn test_serialize_const_layout_struct_list() { const _ASSERT: () = { let mut buf = ConstVec::new(); buf = serialize_const(&DATA, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let [first, second, third] = match deserialize_const!([OtherStruct; 3], buf) { Some((_, data)) => data, None => panic!("data mismatch"), @@ -109,7 +109,7 @@ fn test_serialize_const_layout_struct_list() { let mut buf = ConstVec::new(); const DATA_AGAIN: [[OtherStruct; 3]; 3] = [DATA, DATA, DATA]; buf = serialize_const(&DATA_AGAIN, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let [first, second, third] = match deserialize_const!([[OtherStruct; 3]; 3], buf) { Some((_, data)) => data, None => panic!("data mismatch"), @@ -128,7 +128,7 @@ fn test_serialize_const_layout_struct_list() { let mut buf = ConstVec::new(); buf = serialize_const(&DATA, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); let (_, data2) = deserialize_const!([OtherStruct; 3], buf).unwrap(); assert_eq!(DATA, data2); } @@ -158,7 +158,41 @@ fn test_serialize_const_layout_struct() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); let (_, data2) = deserialize_const!(OtherStruct, buf).unwrap(); assert_eq!(data, data2); } + +#[test] +fn test_adding_struct_field_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + struct Initial { + a: u32, + b: u8, + } + + #[derive(Debug, PartialEq, SerializeConst)] + struct New { + c: u32, + b: u8, + a: u32, + } + + let data = New { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial { + a: data.a, + b: data.b, + }, + data2 + ); +} diff --git a/packages/const-serialize/tests/tuples.rs b/packages/const-serialize/tests/tuples.rs index 43a036c413..d277d826bf 100644 --- a/packages/const-serialize/tests/tuples.rs +++ b/packages/const-serialize/tests/tuples.rs @@ -4,7 +4,7 @@ use const_serialize::{deserialize_const, serialize_const, ConstVec}; fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234u32, 5678u16), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((u32, u16), buf).unwrap().1, (1234u32, 5678u16) @@ -12,7 +12,7 @@ fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234f64, 5678u16, 90u8), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((f64, u16, u8), buf).unwrap().1, (1234f64, 5678u16, 90u8) @@ -20,7 +20,7 @@ fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234u32, 5678u16, 90u8, 1000000f64), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((u32, u16, u8, f64), buf).unwrap().1, (1234u32, 5678u16, 90u8, 1000000f64) diff --git a/packages/dioxus/Cargo.toml b/packages/dioxus/Cargo.toml index 8644885374..94e605feec 100644 --- a/packages/dioxus/Cargo.toml +++ b/packages/dioxus/Cargo.toml @@ -8,7 +8,7 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/DioxusLabs/dioxus/" homepage = "https://dioxuslabs.com" keywords = ["web", "desktop", "mobile", "gui", "wasm"] -rust-version = "1.80.0" +rust-version = "1.83.0" [dependencies] dioxus-core = { workspace = true } diff --git a/packages/generational-box/Cargo.toml b/packages/generational-box/Cargo.toml index 686c8c7c6c..3fb9180dbc 100644 --- a/packages/generational-box/Cargo.toml +++ b/packages/generational-box/Cargo.toml @@ -7,7 +7,7 @@ description = "A box backed by a generational runtime" license = "MIT OR Apache-2.0" repository = "https://github.com/DioxusLabs/dioxus/" keywords = ["generational", "box", "memory", "allocator"] -rust-version = "1.80.0" +rust-version = "1.83.0" [dependencies] parking_lot = { workspace = true } diff --git a/packages/manganis/manganis-core/src/asset.rs b/packages/manganis/manganis-core/src/asset.rs index 92c543599a..fabae8332e 100644 --- a/packages/manganis/manganis-core/src/asset.rs +++ b/packages/manganis/manganis-core/src/asset.rs @@ -140,7 +140,7 @@ impl Asset { let byte = unsafe { std::ptr::read_volatile(ptr.add(byte)) }; bytes = bytes.push(byte); } - let read = bytes.read(); + let read = bytes.as_ref(); deserialize_const!(BundledAsset, read).expect("Failed to deserialize asset. Make sure you built with the matching version of the Dioxus CLI").1 } diff --git a/packages/manganis/manganis-core/src/options.rs b/packages/manganis/manganis-core/src/options.rs index dd383ab4d8..bed2cf4651 100644 --- a/packages/manganis/manganis-core/src/options.rs +++ b/packages/manganis/manganis-core/src/options.rs @@ -107,7 +107,7 @@ impl AssetOptionsBuilder<()> { impl AssetOptionsBuilder { /// Create a new asset options builder with the given variant - pub(crate) const fn variant(variant: T) -> Self { + pub const fn variant(variant: T) -> Self { Self { add_hash: true, variant, diff --git a/packages/manganis/manganis-macro/src/linker.rs b/packages/manganis/manganis-macro/src/linker.rs index 116d0c63b2..f2f9a408bb 100644 --- a/packages/manganis/manganis-macro/src/linker.rs +++ b/packages/manganis/manganis-macro/src/linker.rs @@ -8,7 +8,7 @@ use quote::ToTokens; /// After linking, the "manganis" sections of the different object files will be merged. pub fn generate_link_section(asset: impl ToTokens, asset_hash: &str) -> TokenStream2 { let position = proc_macro2::Span::call_site(); - let export_name = syn::LitStr::new(&format!("__MANGANIS__{}", asset_hash), position); + let export_name = syn::LitStr::new(&format!("__ASSETS__{}", asset_hash), position); quote::quote! { // First serialize the asset into a constant sized buffer diff --git a/packages/manganis/manganis/src/macro_helpers.rs b/packages/manganis/manganis/src/macro_helpers.rs index 984461b031..8b7dba7981 100644 --- a/packages/manganis/manganis/src/macro_helpers.rs +++ b/packages/manganis/manganis/src/macro_helpers.rs @@ -36,7 +36,7 @@ pub const fn serialize_asset(asset: &BundledAsset) -> ConstVec { /// Deserialize a const buffer into a BundledAsset pub const fn deserialize_asset(bytes: &[u8]) -> BundledAsset { let bytes = ConstVec::new().extend(bytes); - match const_serialize::deserialize_const!(BundledAsset, bytes.read()) { + match const_serialize::deserialize_const!(BundledAsset, bytes.as_ref()) { Some((_, asset)) => asset, None => panic!("Failed to deserialize asset. This may be caused by a mismatch between your dioxus and dioxus-cli versions"), } diff --git a/packages/playwright-tests/cli-optimization-07/.gitignore b/packages/playwright-tests/cli-optimization-07/.gitignore new file mode 100644 index 0000000000..0fdbab7369 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/.gitignore @@ -0,0 +1,4 @@ +dist +target +monaco-editor +partial-monaco-editor \ No newline at end of file diff --git a/packages/playwright-tests/cli-optimization-07/Cargo.toml b/packages/playwright-tests/cli-optimization-07/Cargo.toml new file mode 100644 index 0000000000..3e47929b99 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "dioxus-cli-optimization-test-07" +version = "0.0.1" +edition = "2021" +description = "Playwright test for Dioxus CLI optimization" +license = "MIT OR Apache-2.0" +publish = false + +[dependencies] +# We test both if the current version of dioxus works and if the CLI can understand assets from the old asset format +dioxus = { version = "=0.7.1", features = ["web"] } +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true + +# [build-dependencies] +# reqwest = { workspace = true, features = ["blocking"] } +# flate2 = "1.1.2" +# tar = "0.4.44" diff --git a/packages/playwright-tests/cli-optimization-07/assets/data.json b/packages/playwright-tests/cli-optimization-07/assets/data.json new file mode 100644 index 0000000000..f28e5498c8 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/assets/data.json @@ -0,0 +1,3 @@ +{ + "list": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} diff --git a/packages/playwright-tests/cli-optimization-07/build.rs b/packages/playwright-tests/cli-optimization-07/build.rs new file mode 100644 index 0000000000..bc050c1025 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/build.rs @@ -0,0 +1,16 @@ +fn main() { + // use std::path::PathBuf; + + // // If the monaco editor folder doesn't exist, download it + // let monaco_path = PathBuf::from("monaco-editor"); + // if monaco_path.exists() { + // return; + // } + + // let url = "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz"; + // let bytes = reqwest::blocking::get(url).unwrap().bytes().unwrap(); + // let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(bytes.as_ref())); + // let monaco_path_partial = PathBuf::from("partial-monaco-editor"); + // archive.unpack(&monaco_path_partial).unwrap(); + // std::fs::rename(monaco_path_partial, monaco_path).unwrap(); +} diff --git a/packages/playwright-tests/cli-optimization-07/images/toasts.png b/packages/playwright-tests/cli-optimization-07/images/toasts.png new file mode 100644 index 0000000000..382298f9c7 Binary files /dev/null and b/packages/playwright-tests/cli-optimization-07/images/toasts.png differ diff --git a/packages/playwright-tests/cli-optimization-07/src/main.rs b/packages/playwright-tests/cli-optimization-07/src/main.rs new file mode 100644 index 0000000000..252c3946cd --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/src/main.rs @@ -0,0 +1,96 @@ +// This test checks the CLI optimizes assets correctly without breaking them + +#[cfg(feature = "dioxus_07")] +use dioxus_07 as dioxus; + +use dioxus::prelude::*; + +const SOME_IMAGE: Asset = asset!("/images/toasts.png", AssetOptions::image().with_avif()); +const SOME_IMAGE_WITH_THE_SAME_URL: Asset = + asset!("/images/toasts.png", AssetOptions::image().with_jpg()); +#[used] +static SOME_IMAGE_WITHOUT_HASH: Asset = asset!( + "/images/toasts.png", + AssetOptions::image().with_avif().with_hash_suffix(false) +); +// This asset is unused, but it should still be bundled because it is an external asset +#[used] +static _ASSET: Asset = asset!( + "/images/toasts.png", + AssetOptions::builder().with_hash_suffix(false) +); + +fn main() { + dioxus::launch(App); +} + +#[component] +fn App() -> Element { + // todo: test monaco more.... + // const MONACO_FOLDER: Asset = asset!("/monaco-editor/package/min/vs"); + // let script = format!("(() => {{ + // require.config({{ paths: {{ vs: '{MONACO_FOLDER}' }} }}); + + // require(['vs/editor/editor.main'], () => {{ + // var model = monaco.editor.createModel('fn main() {{\\n\\tprintln!(\\\"hi\\\")\\n}}', 'rust'); + // var editor = monaco.editor.create(document.getElementById('editor')); + // editor.setModel(model); + // }}) + // }})()"); + + rsx! { + div { + id: "editor", + width: "100vw", + height: "100vw", + } + // // Monaco script + // script { + // src: "{MONACO_FOLDER}/loader.js", + // "onload": script + // } + img { + id: "some_image", + src: "{SOME_IMAGE}" + } + img { + id: "some_image_with_the_same_url", + src: "{SOME_IMAGE_WITH_THE_SAME_URL}" + } + img { + id: "some_image_without_hash", + src: "{SOME_IMAGE_WITHOUT_HASH}" + } + LoadsAsset {} + } +} + +const JSON: Asset = asset!("/assets/data.json"); + +#[derive(Debug, Clone, serde::Deserialize)] +struct Data { + list: Vec, +} + +#[component] +fn LoadsAsset() -> Element { + let data = use_resource(|| async { + let bytes = dioxus::asset_resolver::read_asset_bytes(&JSON) + .await + .unwrap(); + serde_json::from_slice::(&bytes).unwrap() + }); + match data() { + Some(data) => rsx! { + div { + id: "resolved-data", + "List: {data.list:?}" + } + }, + None => rsx! { + div { + "Loading..." + } + }, + } +} diff --git a/packages/playwright-tests/cli-optimization.spec.js b/packages/playwright-tests/cli-optimization.spec.js index 28b83d84c7..48e6a06f3a 100644 --- a/packages/playwright-tests/cli-optimization.spec.js +++ b/packages/playwright-tests/cli-optimization.spec.js @@ -1,59 +1,67 @@ // @ts-check const { test, expect } = require("@playwright/test"); -test("optimized scripts run", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // // Expect the page to load the script after optimizations have been applied. The script - // // should add an editor to the page that shows a main function - // const main = page.locator("#main"); - // await expect(main).toContainText("hi"); - - // Expect the page to contain an image with the id "some_image" - const image = page.locator("#some_image"); - await expect(image).toBeVisible(); - // Get the image src - const src = await image.getAttribute("src"); - - // Expect the page to contain an image with the id "some_image_with_the_same_url" - const image2 = page.locator("#some_image_with_the_same_url"); - await expect(image2).toBeVisible(); - // Get the image src - const src2 = await image2.getAttribute("src"); - - // Expect the urls to be different - expect(src).not.toEqual(src2); - - // Expect the page to contain an image with the id "some_image_without_hash" - const image3 = page.locator("#some_image_without_hash"); - await expect(image3).toBeVisible(); - // Get the image src - const src3 = await image3.getAttribute("src"); - // Expect the src to be without a hash - expect(src3).toEqual("/assets/toasts.avif"); -}); - -test("unused external assets are bundled", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // Assert http://localhost:8989/assets/toasts.png is found even though it is not used in the page - const response = await page.request.get( - "http://localhost:8989/assets/toasts.png" - ); - // Expect the response to be ok - expect(response.status()).toBe(200); - // make sure the response is an image - expect(response.headers()["content-type"]).toBe("image/png"); -}); - -test("assets are resolved", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // Expect the page to contain an element with the id "resolved-data" - const resolvedData = page.locator("#resolved-data"); - await expect(resolvedData).toBeVisible(); - // Expect the element to contain the text "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" - await expect(resolvedData).toContainText( - "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" - ); -}); +const test_variants = [ + { port: 9191, name: "0.7.1" }, + { port: 8989, name: "current version" }, +]; + +for (let { port, name } of test_variants) { + test(`optimized scripts run in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // // Expect the page to load the script after optimizations have been applied. The script + // // should add an editor to the page that shows a main function + // const main = page.locator("#main"); + // await expect(main).toContainText("hi"); + + // Expect the page to contain an image with the id "some_image" + const image = page.locator("#some_image"); + await expect(image).toBeVisible(); + // Get the image src + const src = await image.getAttribute("src"); + + // Expect the page to contain an image with the id "some_image_with_the_same_url" + const image2 = page.locator("#some_image_with_the_same_url"); + await expect(image2).toBeVisible(); + // Get the image src + const src2 = await image2.getAttribute("src"); + + // Expect the urls to be different + expect(src).not.toEqual(src2); + + // Expect the page to contain an image with the id "some_image_without_hash" + const image3 = page.locator("#some_image_without_hash"); + await expect(image3).toBeVisible(); + // Get the image src + const src3 = await image3.getAttribute("src"); + // Expect the src to be without a hash + expect(src3).toEqual("/assets/toasts.avif"); + }); + + test(`unused external assets are bundled in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // Assert http://localhost:{port}/assets/toasts.png is found even though it is not used in the page + const response = await page.request.get( + `http://localhost:${port}/assets/toasts.png` + ); + // Expect the response to be ok + expect(response.status()).toBe(200); + // make sure the response is an image + expect(response.headers()["content-type"]).toBe("image/png"); + }); + + test(`assets are resolved in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // Expect the page to contain an element with the id "resolved-data" + const resolvedData = page.locator("#resolved-data"); + await expect(resolvedData).toBeVisible(); + // Expect the element to contain the text "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" + await expect(resolvedData).toContainText( + "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" + ); + }); + +} diff --git a/packages/playwright-tests/cli-optimization/Cargo.toml b/packages/playwright-tests/cli-optimization/Cargo.toml index 29d7ae2e47..cb6d6c1ed9 100644 --- a/packages/playwright-tests/cli-optimization/Cargo.toml +++ b/packages/playwright-tests/cli-optimization/Cargo.toml @@ -15,3 +15,4 @@ serde_json.workspace = true # reqwest = { workspace = true, features = ["blocking"] } # flate2 = "1.1.2" # tar = "0.4.44" + diff --git a/packages/playwright-tests/cli-optimization/src/main.rs b/packages/playwright-tests/cli-optimization/src/main.rs index cd7f590c35..252c3946cd 100644 --- a/packages/playwright-tests/cli-optimization/src/main.rs +++ b/packages/playwright-tests/cli-optimization/src/main.rs @@ -1,5 +1,8 @@ // This test checks the CLI optimizes assets correctly without breaking them +#[cfg(feature = "dioxus_07")] +use dioxus_07 as dioxus; + use dioxus::prelude::*; const SOME_IMAGE: Asset = asset!("/images/toasts.png", AssetOptions::image().with_avif()); diff --git a/packages/playwright-tests/package-lock.json b/packages/playwright-tests/package-lock.json index 69bd204d00..f7e3523c8d 100644 --- a/packages/playwright-tests/package-lock.json +++ b/packages/playwright-tests/package-lock.json @@ -9,17 +9,17 @@ "version": "1.0.0", "license": "ISC", "devDependencies": { - "@playwright/test": "^1.53.1" + "@playwright/test": "^1.56.1" } }, "node_modules/@playwright/test": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.53.1.tgz", - "integrity": "sha512-Z4c23LHV0muZ8hfv4jw6HngPJkbbtZxTkxPNIg7cJcTc9C28N/p2q7g3JZS2SiKBBHJ3uM1dgDye66bB7LEk5w==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", + "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.53.1" + "playwright": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -44,13 +44,13 @@ } }, "node_modules/playwright": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.53.1.tgz", - "integrity": "sha512-LJ13YLr/ocweuwxyGf1XNFWIU4M2zUSo149Qbp+A4cpwDjsxRPj7k6H25LBrEHiEwxvRbD8HdwvQmRMSvquhYw==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", + "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.53.1" + "playwright-core": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -63,9 +63,9 @@ } }, "node_modules/playwright-core": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.53.1.tgz", - "integrity": "sha512-Z46Oq7tLAyT0lGoFx4DOuB1IA9D1TPj0QkYxpPVUnGDqHHvDpCftu1J2hM2PiWsNMoZh8+LQaarAWcDfPBc6zg==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", + "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", "dev": true, "license": "Apache-2.0", "bin": { diff --git a/packages/playwright-tests/package.json b/packages/playwright-tests/package.json index 595cfc4c61..b52c1fdee7 100644 --- a/packages/playwright-tests/package.json +++ b/packages/playwright-tests/package.json @@ -12,6 +12,6 @@ "author": "", "license": "ISC", "devDependencies": { - "@playwright/test": "^1.53.1" + "@playwright/test": "^1.56.1" } } diff --git a/packages/playwright-tests/playwright.config.js b/packages/playwright-tests/playwright.config.js index c90c80df3f..edf9d2a861 100644 --- a/packages/playwright-tests/playwright.config.js +++ b/packages/playwright-tests/playwright.config.js @@ -172,6 +172,16 @@ module.exports = defineConfig({ reuseExistingServer: !process.env.CI, stdout: "pipe", }, + { + cwd: path.join(process.cwd(), "cli-optimization-07"), + // Remove the cache folder for the cli-optimization build to force a full cache reset + command: + 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9191', + port: 9191, + timeout: 50 * 60 * 1000, + reuseExistingServer: !process.env.CI, + stdout: "pipe", + }, { cwd: path.join(process.cwd(), "wasm-split-harness"), command: