diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..084fc77 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,8 @@ +{ + "permissions": { + "allow": [ + "Bash(treefmt:*)", + "Bash(nix flake check:*)" + ] + } +} diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..8392d15 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake \ No newline at end of file diff --git a/.github/workflows/cargo.yaml b/.github/workflows/cargo.yaml new file mode 100644 index 0000000..4d7bf17 --- /dev/null +++ b/.github/workflows/cargo.yaml @@ -0,0 +1,38 @@ +name: Cargo + +on: + workflow_call: + inputs: + nightly: + type: boolean + components: + type: string + title: + type: string + required: true + run: + type: string + required: true + +env: + RUSTFLAGS: -D warnings + RUSTDOCFLAGS: -D warnings + +jobs: + cargo-run: + name: Run + + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + + - name: ${{ inputs.title }} + run: cargo ${{ inputs.run }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci-cd.yaml similarity index 57% rename from .github/workflows/ci.yaml rename to .github/workflows/ci-cd.yaml index 0d1239c..766c76b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci-cd.yaml @@ -1,4 +1,4 @@ -name: Continuous Integration +name: CI/CD on: - push @@ -17,6 +17,17 @@ jobs: runs-on: ubuntu-latest + strategy: + matrix: + features: + - name: No Default Features + flag: "" + - name: Default Features + flag: --features default + - name: All Features + flag: --all-features + fail-fast: false + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -28,13 +39,24 @@ jobs: uses: Swatinem/rust-cache@v2 - name: Run tests - run: cargo test --all-features + run: cargo test --tests --no-default-features ${{ matrix.features.flag }} miri: name: Miri runs-on: ubuntu-latest + strategy: + matrix: + features: + - name: No Default Features + flag: "" + - name: Default Features + flag: --features default + - name: All Features + flag: --all-features + fail-fast: false + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -51,13 +73,24 @@ jobs: run: cargo miri setup - name: Run Miri - run: cargo miri test --all-features + run: cargo miri test --tests --no-default-features ${{ matrix.features.flag }} - check: - name: Check + clippy: + name: Clippy runs-on: ubuntu-latest + strategy: + matrix: + features: + - name: No Default Features + flag: "" + - name: Default Features + flag: --features default + - name: All Features + flag: --all-features + fail-fast: false + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -70,11 +103,50 @@ jobs: - name: Cache dependencies uses: Swatinem/rust-cache@v2 - - name: Check code - run: cargo clippy --all-features + - name: Run Clippy + run: cargo clippy --no-default-features ${{ matrix.features.flag }} + + test-docs: + name: Test Documentation + + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + + - name: Run documentation tests + run: cargo test --doc --all-features + + check-docs: + name: Check Documentation + + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@nightly + + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + + - name: Install `cargo-docs-rs` + uses: dtolnay/install@cargo-docs-rs + + - name: Check documentation + run: cargo docs-rs - format: - name: Format + check-format: + name: Check Formatting runs-on: ubuntu-latest @@ -104,8 +176,10 @@ jobs: needs: - test - miri - - check - - format + - clippy + - test-docs + - check-docs + - check-format if: github.event_name == 'push' && github.ref == 'refs/heads/main' @@ -138,8 +212,10 @@ jobs: needs: - test - miri - - check - - format + - clippy + - test-docs + - check-docs + - check-format if: github.event_name == 'push' && github.ref == 'refs/heads/main' diff --git a/.github/workflows/security-audit.yaml b/.github/workflows/security-audit.yaml index 1fb2303..92dfa30 100644 --- a/.github/workflows/security-audit.yaml +++ b/.github/workflows/security-audit.yaml @@ -7,7 +7,6 @@ on: push: paths: - "**/Cargo.toml" - - "**/Cargo.lock" - "**/deny.toml" pull_request: diff --git a/.gitignore b/.gitignore index 0f004fb..8adaa53 100644 --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,9 @@ -# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig -# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,rust -# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos,rust - -### macOS ### # General .DS_Store .AppleDouble .LSOverride - -# Icon must end with two \r -Icon +Icon[ +] # Thumbnails ._* @@ -30,19 +24,10 @@ Network Trash Folder Temporary Items .apdisk -### macOS Patch ### -# iCloud generated files -*.icloud - -### Rust ### # Generated by Cargo # will have compiled files and executables -debug/ -target/ - -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -# Cargo.lock +debug +target # These are backup files generated by rustfmt **/*.rs.bk @@ -50,26 +35,31 @@ target/ # MSVC Windows builds of rustc generate these, which store debugging information *.pdb -### VisualStudioCode ### +# Generated by cargo mutants +# Contains mutation testing data +**/mutants.out*/ + +# RustRover +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + .vscode/* !.vscode/settings.json !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json !.vscode/*.code-snippets - -# Local History for Visual Studio Code -.history/ +!*.code-workspace # Built Visual Studio Code Extensions *.vsix -### VisualStudioCode Patch ### -# Ignore all local history of files -.history -.ionide - -# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,rust +# Ignore build outputs from performing a nix-build or `nix build` command +result +result-* -# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) -tmp/ +# Ignore automatically generated direnv output +.direnv diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..aed37df --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "rust-analyzer.server.path": "rust-analyzer", + "claudeCode.selectedModel": "sonnet" +} \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 3c190d9..5500afe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -234,9 +234,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" [[package]] name = "async-channel" @@ -371,9 +371,9 @@ dependencies = [ [[package]] name = "async-std" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" dependencies = [ "async-channel 1.9.0", "async-global-executor", @@ -415,7 +415,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -426,13 +426,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -462,40 +462,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "axum" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" -dependencies = [ - "axum-core", - "bytes", - "form_urlencoded", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite 0.2.16", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "axum-core" version = "0.5.2" @@ -513,17 +479,6 @@ dependencies = [ "sync_wrapper", "tower-layer", "tower-service", - "tracing", -] - -[[package]] -name = "axum-htmx" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2d4a162b7621482903309c0e8a990a866728b6312350147181230f840252314" -dependencies = [ - "axum-core", - "http 1.3.1", ] [[package]] @@ -567,9 +522,9 @@ checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29" [[package]] name = "block-buffer" @@ -637,9 +592,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.32" +version = "1.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2352e5597e9c544d5e6d9c95190d5d27738ade584fa8db0a16e130e5c2b5296e" +checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f" dependencies = [ "shlex", ] @@ -805,7 +760,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "unicode-xid", ] @@ -839,7 +794,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -875,7 +830,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -910,7 +865,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -955,7 +910,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] @@ -1107,7 +1062,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1230,9 +1185,9 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "gloo-timers" @@ -1330,18 +1285,6 @@ dependencies = [ "utf8-width", ] -[[package]] -name = "htmx-rsx" -version = "0.0.0" -dependencies = [ - "anyhow", - "axum", - "axum-htmx", - "hypertext", - "tokio", - "tower-http", -] - [[package]] name = "http" version = "0.2.12" @@ -1410,12 +1353,6 @@ dependencies = [ "log", ] -[[package]] -name = "http-range-header" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" - [[package]] name = "http-types" version = "2.12.0" @@ -1475,13 +1412,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "h2", "http 1.3.1", "http-body 1.0.1", @@ -1489,6 +1427,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite 0.2.16", + "pin-utils", "smallvec", "tokio", "want", @@ -1504,10 +1443,9 @@ dependencies = [ "futures-core", "http 1.3.1", "http-body 1.0.1", - "hyper 1.6.0", + "hyper 1.7.0", "pin-project-lite 0.2.16", "tokio", - "tower-service", ] [[package]] @@ -1517,7 +1455,7 @@ dependencies = [ "actix-web", "axum-core", "html-escape", - "hypertext-macros", + "hypertext-proc-macros", "itoa", "ntex", "poem", @@ -1531,11 +1469,28 @@ dependencies = [ [[package]] name = "hypertext-macros" version = "0.12.1" +dependencies = [ + "hypertext-proc-macros", +] + +[[package]] +name = "hypertext-proc-macros" +version = "0.12.1" dependencies = [ "html-escape", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "hypertext-syntax" +version = "0.12.1" +dependencies = [ + "html-escape", + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -1702,7 +1657,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1744,9 +1699,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.174" +version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" [[package]] name = "linux-raw-sys" @@ -1820,12 +1775,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "matchit" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" - [[package]] name = "memchr" version = "2.7.5" @@ -1959,7 +1908,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "thiserror 2.0.12", + "thiserror 2.0.15", "variadics_please", ] @@ -2002,7 +1951,7 @@ dependencies = [ "ntex-service", "ntex-util", "pin-project-lite 0.2.16", - "thiserror 2.0.12", + "thiserror 2.0.15", ] [[package]] @@ -2018,7 +1967,7 @@ dependencies = [ "log", "ntex-bytes", "serde", - "thiserror 2.0.12", + "thiserror 2.0.15", ] [[package]] @@ -2049,9 +1998,9 @@ dependencies = [ [[package]] name = "ntex-net" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef3d6829da93773089c38939803dd5cc348d0743b60b1b079e5529e3ee88d1fe" +checksum = "8e25de68e90b2f1f15a765366e170b0d5b2d2fe0f81db03673505998a009f991" dependencies = [ "bitflags", "cfg-if", @@ -2063,7 +2012,7 @@ dependencies = [ "ntex-rt", "ntex-service", "ntex-util", - "thiserror 2.0.12", + "thiserror 2.0.15", ] [[package]] @@ -2153,7 +2102,7 @@ dependencies = [ "ntex-service", "pin-project-lite 0.2.16", "slab", - "thiserror 2.0.12", + "thiserror 2.0.15", ] [[package]] @@ -2264,7 +2213,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2290,7 +2239,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2333,7 +2282,7 @@ dependencies = [ "headers", "http 1.3.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-util", "mime", "nix", @@ -2348,7 +2297,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "sync_wrapper", - "thiserror 2.0.12", + "thiserror 2.0.15", "tokio", "tokio-util", "tracing", @@ -2364,7 +2313,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2433,9 +2382,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -2448,7 +2397,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "version_check", "yansi", ] @@ -2594,7 +2543,7 @@ checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2705,7 +2654,7 @@ dependencies = [ "proc-macro2", "quote", "rocket_http", - "syn 2.0.104", + "syn 2.0.106", "unicode-xid", "version_check", ] @@ -2768,7 +2717,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] @@ -2785,20 +2734,20 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salvo-serde-util" -version = "0.81.0" +version = "0.82.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b2cd0a2b0073c85f10eefa3722dbae18e3c2c9a2567f9d840ecc712127e30c" +checksum = "290bc58aceb637aa530eddd0e4a01c0d0ca054ba9627ba4e009ad15c0ff68988" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "salvo_core" -version = "0.81.0" +version = "0.82.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0978458bee0102c6c337040ea0b13c497ff1e31015c49c2cc9a387f813e2c1" +checksum = "e246c1580eb342847eebb7a2faf4042e26b403d36a3184f4b1670751d551fee5" dependencies = [ "async-trait", "base64 0.22.1", @@ -2810,7 +2759,7 @@ dependencies = [ "headers", "http 1.3.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-util", "indexmap", "mime", @@ -2829,7 +2778,7 @@ dependencies = [ "serde_json", "sync_wrapper", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.15", "tokio", "tokio-util", "tracing", @@ -2837,16 +2786,16 @@ dependencies = [ [[package]] name = "salvo_macros" -version = "0.81.0" +version = "0.82.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74585140d4b656e51bd95e985c7acb8e57fe3d25f2dbb7c2b21fec30a8a91ef" +checksum = "be541bc31ec81ac1902824e5ecb9daba086b7ee8a4b3ce37c8104d01ac3b3961" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", "regex", "salvo-serde-util", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2905,7 +2854,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2919,9 +2868,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.143" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" dependencies = [ "itoa", "memchr", @@ -2929,16 +2878,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" -dependencies = [ - "itoa", - "serde", -] - [[package]] name = "serde_qs" version = "0.8.5" @@ -3261,9 +3200,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.104" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -3287,7 +3226,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3300,7 +3239,7 @@ dependencies = [ "getrandom 0.3.3", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3314,11 +3253,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "80d76d3f064b981389ecb4b6b7f45a0bf9fdac1d5b9204c7bd6714fecc302850" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.15", ] [[package]] @@ -3329,18 +3268,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "44d29feb33e986b6ea906bd9c3559a856983f92371b3eaa5e83782a351623de0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3479,7 +3418,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3547,48 +3486,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" -[[package]] -name = "tower" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite 0.2.16", - "sync_wrapper", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" -dependencies = [ - "bitflags", - "bytes", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "http-range-header", - "httpdate", - "mime", - "mime_guess", - "percent-encoding", - "pin-project-lite 0.2.16", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "tower-layer" version = "0.3.3" @@ -3621,7 +3518,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3802,7 +3699,7 @@ checksum = "41b6d82be61465f97d42bd1d15bf20f3b0a3a0905018f38f9d6f6962055b0b5c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3838,7 +3735,7 @@ dependencies = [ "headers", "http 1.3.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.7.0", "log", "mime", "mime_guess", @@ -3897,7 +3794,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-shared", ] @@ -3932,7 +3829,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4271,7 +4168,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] @@ -4292,7 +4189,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4312,7 +4209,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] @@ -4346,5 +4243,5 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] diff --git a/Cargo.toml b/Cargo.toml index baca3be..c24f545 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,33 +1,37 @@ [workspace] resolver = "2" -members = ["examples/*", "hypertext", "hypertext-macros"] -default-members = ["hypertext", "hypertext-macros"] +members = ["crates/*"] +default-members = ["crates/*"] [workspace.package] version = "0.12.1" -authors = ["Vidhan Bhatt "] edition = "2024" -description = "A blazing fast type-checked HTML macro crate." +description = "A blazing fast type checked HTML macro crate." readme = "README.md" homepage = "https://github.com/vidhanio/hypertext" repository = "https://github.com/vidhanio/hypertext" license = "MIT" -keywords = ["html", "macro"] +keywords = ["html", "macro", "maud", "rsx"] categories = ["template-engine"] +[workspace.metadata.crane] +name = "hypertext" + [workspace.dependencies] +hypertext = { version = "0.12.1", path = "./crates/hypertext" } +hypertext-macros = { version = "0.12.1", path = "./crates/hypertext-macros" } +hypertext-proc-macros = { version = "0.12.1", path = "./crates/hypertext-proc-macros" } +hypertext-syntax = { version = "0.12.1", path = "./crates/hypertext-syntax" } + html-escape = { version = "0.2", default-features = false } -hypertext-macros = { version = "0.12.1", path = "./hypertext-macros" } [workspace.lints] [workspace.lints.clippy] cargo = { level = "warn", priority = -1 } nursery = { level = "warn", priority = -1 } pedantic = { level = "warn", priority = -1 } -too_long_first_doc_paragraph = "allow" [workspace.lints.rust] missing_copy_implementations = "warn" missing_debug_implementations = "warn" missing_docs = "warn" - diff --git a/README.md b/README.md index 94a7813..45dfb69 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # `hypertext` -A blazing fast type-checked HTML macro crate. +A blazing fast type checked HTML macro crate. ## Features -- Type checking for element names/attributes, including extensible support for custom frameworks like [htmx](https://htmx.org/) and [Alpine.js](https://alpinejs.dev/) +- Compile-time type checking for element names/attributes, including extensible support for custom frameworks like [htmx](https://htmx.org/) and [Alpine.js](https://alpinejs.dev/) - `#![no_std]` support - [Extremely fast](https://github.com/askama-rs/template-benchmark#benchmark-results), using lazy rendering to minimize allocation diff --git a/crates/hypertext-macros/Cargo.toml b/crates/hypertext-macros/Cargo.toml new file mode 100644 index 0000000..8f02e49 --- /dev/null +++ b/crates/hypertext-macros/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "hypertext-macros" +version.workspace = true +edition.workspace = true +description.workspace = true +documentation = "https://docs.rs/hypertext-macros" +readme.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +hypertext-proc-macros.workspace = true + +[lints] +workspace = true diff --git a/crates/hypertext-macros/src/lib.rs b/crates/hypertext-macros/src/lib.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/crates/hypertext-macros/src/lib.rs @@ -0,0 +1 @@ + diff --git a/hypertext-macros/Cargo.toml b/crates/hypertext-proc-macros/Cargo.toml similarity index 80% rename from hypertext-macros/Cargo.toml rename to crates/hypertext-proc-macros/Cargo.toml index d7ee51d..6e00fbb 100644 --- a/hypertext-macros/Cargo.toml +++ b/crates/hypertext-proc-macros/Cargo.toml @@ -1,10 +1,9 @@ [package] -name = "hypertext-macros" +name = "hypertext-proc-macros" version.workspace = true -authors.workspace = true edition.workspace = true description.workspace = true -documentation = "https://docs.rs/hypertext-macros" +documentation = "https://docs.rs/hypertext-proc-macros" readme.workspace = true homepage.workspace = true repository.workspace = true @@ -23,4 +22,3 @@ syn = { version = "2", features = ["extra-traits", "full"] } [lints] workspace = true - diff --git a/hypertext-macros/src/derive.rs b/crates/hypertext-proc-macros/src/derive.rs similarity index 64% rename from hypertext-macros/src/derive.rs rename to crates/hypertext-proc-macros/src/derive.rs index e023353..e451830 100644 --- a/hypertext-macros/src/derive.rs +++ b/crates/hypertext-proc-macros/src/derive.rs @@ -3,16 +3,16 @@ use quote::quote; use syn::{DeriveInput, Error, spanned::Spanned}; use crate::{ - AttributeValueNode, Context, Document, Maud, Nodes, Rsx, - html::{self, generate::Generator}, + AttributeValue, Config, Document, Many, Maud, Rsx, Semantics, + html::{Context, generate::Generator}, }; #[allow(clippy::needless_pass_by_value)] pub fn renderable(input: DeriveInput) -> syn::Result { - match (renderable_element(&input), attribute_renderable(&input)) { + match (renderable_node(&input), renderable_attribute(&input)) { (Ok(None), Ok(None)) => Err(Error::new( Span::call_site(), - "expected at least one of `maud`, `rsx`, or `attribute` attributes", + "expected at least one of `#[maud(...)]`, `#[rsx(...)]`, or `#[attribute(...)]`", )), (Ok(element), Ok(attribute)) => Ok(quote! { #element @@ -26,7 +26,7 @@ pub fn renderable(input: DeriveInput) -> syn::Result { } } -fn renderable_element(input: &DeriveInput) -> syn::Result> { +fn renderable_node(input: &DeriveInput) -> syn::Result> { let mut attrs = input .attrs .iter() @@ -34,14 +34,14 @@ fn renderable_element(input: &DeriveInput) -> syn::Result> { if attr.path().is_ident("maud") { Some(( attr, - html::generate::lazy::> - as fn(TokenStream, bool) -> syn::Result, + (|tokens| Config::Lazy(Semantics::Move).generate::>(tokens)) + as fn(_) -> _, )) } else if attr.path().is_ident("rsx") { Some(( attr, - html::generate::lazy::> - as fn(TokenStream, bool) -> syn::Result, + (|tokens| Config::Lazy(Semantics::Move).generate::>(tokens)) + as fn(_) -> _, )) } else { None @@ -49,17 +49,17 @@ fn renderable_element(input: &DeriveInput) -> syn::Result> { }) .peekable(); - let (lazy_fn, tokens) = match (attrs.next(), attrs.peek()) { + let (generate_fn, tokens) = match (attrs.next(), attrs.peek()) { (Some((attr, f)), None) => (f, attr.meta.require_list()?.tokens.clone()), (Some((attr, _)), Some(_)) => { let mut error = Error::new( attr.span(), - "cannot have multiple `maud` or `rsx` attributes", + "cannot have multiple `#[maud(...)]` or `#[rsx(...)]` attributes", ); for (attr, _) in attrs { - error.combine(syn::Error::new( + error.combine(Error::new( attr.span(), - "cannot have multiple `maud` or `rsx` attributes", + "cannot have multiple `#[maud(...)]` or `#[rsx(...)]` attributes", )); } return Err(error); @@ -69,7 +69,7 @@ fn renderable_element(input: &DeriveInput) -> syn::Result> { } }; - let lazy = lazy_fn(tokens, true)?; + let lazy = generate_fn(tokens)?; let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); @@ -78,14 +78,14 @@ fn renderable_element(input: &DeriveInput) -> syn::Result> { #[automatically_derived] impl #impl_generics ::hypertext::Renderable for #name #ty_generics #where_clause { fn render_to(&self, #buffer_ident: &mut ::hypertext::Buffer) { - ::hypertext::Renderable::render_to(&#lazy, #buffer_ident); + #buffer_ident.push(#lazy); } } }; Ok(Some(output)) } -fn attribute_renderable(input: &DeriveInput) -> syn::Result> { +fn renderable_attribute(input: &DeriveInput) -> syn::Result> { let mut attrs = input .attrs .iter() @@ -97,12 +97,12 @@ fn attribute_renderable(input: &DeriveInput) -> syn::Result> (Some(_), Some(_)) => { let mut error = Error::new( Span::call_site(), - "cannot have multiple `attribute` attributes", + "cannot have multiple `#[attribute(...)]` attributes", ); for attr in attrs { - error.combine(syn::Error::new( + error.combine(Error::new( attr.span(), - "cannot have multiple `attribute` attributes", + "cannot have multiple `#[attribute(...)]` attributes", )); } return Err(error); @@ -112,23 +112,20 @@ fn attribute_renderable(input: &DeriveInput) -> syn::Result> } }; - let lazy = html::generate::lazy::>(tokens, true)?; + let lazy = Config::Lazy(Semantics::Move).generate::>(tokens)?; let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let buffer_ident = Generator::buffer_ident(); - let context_marker = Context::AttributeValue.marker_type(); + let ctx = AttributeValue::marker_type(); let output = quote! { #[automatically_derived] - impl #impl_generics ::hypertext::Renderable<#context_marker> for #name #ty_generics + impl #impl_generics ::hypertext::Renderable<#ctx> for #name #ty_generics #where_clause { fn render_to( &self, #buffer_ident: &mut ::hypertext::AttributeBuffer, ) { - ::hypertext::Renderable::render_to( - &#lazy, - #buffer_ident, - ); + #buffer_ident.push(#lazy); } } }; diff --git a/hypertext-macros/src/html/basics.rs b/crates/hypertext-proc-macros/src/html/basics.rs similarity index 90% rename from hypertext-macros/src/html/basics.rs rename to crates/hypertext-proc-macros/src/html/basics.rs index 0c68feb..f41cfcf 100644 --- a/hypertext-macros/src/html/basics.rs +++ b/crates/hypertext-proc-macros/src/html/basics.rs @@ -3,14 +3,14 @@ use std::fmt::{self, Display, Formatter, Write}; use proc_macro2::{Span, TokenStream}; use quote::ToTokens; use syn::{ - Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, + Error, Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, ext::IdentExt, parse::{Parse, ParseStream}, spanned::Spanned, }; #[derive(PartialEq, Eq, Clone)] -pub struct UnquotedName(Vec); +pub struct UnquotedName(pub Vec); impl UnquotedName { pub fn ident_string(&self) -> String { @@ -139,7 +139,7 @@ impl Parse for UnquotedName { } #[derive(Clone, PartialEq, Eq)] -enum NameFragment { +pub enum NameFragment { Ident(Ident), Int(LitInt), Hyphen(Token![-]), @@ -213,24 +213,6 @@ impl Literal { Self::Char(lit) => LitStr::new(&lit.value().to_string(), lit.span()), } } - - pub fn parse_any(input: ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - - if lookahead.peek(LitStr) { - input.parse().map(Self::Str) - } else if lookahead.peek(LitInt) { - input.parse().map(Self::Int) - } else if lookahead.peek(LitBool) { - input.parse().map(Self::Bool) - } else if lookahead.peek(LitFloat) { - input.parse().map(Self::Float) - } else if lookahead.peek(LitChar) { - input.parse().map(Self::Char) - } else { - Err(lookahead.error()) - } - } } impl Parse for Literal { @@ -242,7 +224,7 @@ impl Parse for Literal { if !lit.suffix().is_empty() { let suffix = lit.suffix(); let next_quote = if input.peek(LitStr) { r#"\""# } else { "" }; - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( &lit, format!( r#"string suffixes are not allowed in literals (you probably meant `"...\"{suffix}{next_quote}..."` or `"..." {suffix}`)"#, @@ -254,7 +236,7 @@ impl Parse for Literal { } else if lookahead.peek(LitInt) { let lit = input.parse::()?; if !lit.suffix().is_empty() { - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( &lit, "integer literals cannot have suffixes", )); @@ -265,7 +247,7 @@ impl Parse for Literal { } else if lookahead.peek(LitFloat) { let lit = input.parse::()?; if !lit.suffix().is_empty() { - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( &lit, "float literals cannot have suffixes", )); @@ -274,7 +256,7 @@ impl Parse for Literal { } else if lookahead.peek(LitChar) { let lit = input.parse::()?; if !lit.suffix().is_empty() { - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( &lit, "character literals cannot have suffixes", )); diff --git a/hypertext-macros/src/html/component.rs b/crates/hypertext-proc-macros/src/html/component.rs similarity index 76% rename from hypertext-macros/src/html/component.rs rename to crates/hypertext-proc-macros/src/html/component.rs index d37d758..06034f0 100644 --- a/hypertext-macros/src/html/component.rs +++ b/crates/hypertext-proc-macros/src/html/component.rs @@ -1,14 +1,14 @@ use proc_macro2::TokenStream; use quote::{ToTokens, quote, quote_spanned}; use syn::{ - Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, + Ident, Lit, Token, parse::{Parse, ParseStream}, spanned::Spanned, token::{Brace, Paren}, }; -use super::{ElementBody, Generate, Generator, Literal, ParenExpr, Syntax}; -use crate::{AttributeValueNode, Context}; +use super::{ElementBody, Generate, Generator, ParenExpr, Syntax}; +use crate::{AttributeValue, html::Node}; pub struct Component { pub name: Ident, @@ -18,14 +18,13 @@ pub struct Component { } impl Generate for Component { - const CONTEXT: Context = Context::Node; + type Context = Node; fn generate(&self, g: &mut Generator) { let fields = self.attrs.iter().map(|attr| { let name = &attr.name; - let value = &attr.value_expr(); - - quote!(#name: #value,) + attr.value_expr() + .map_or_else(|| quote!(#name,), |value| quote!(#name: #value,)) }); let children = match &self.body { @@ -68,18 +67,18 @@ impl Generate for Component { } }; - g.push_expr(Paren::default(), Self::CONTEXT, &init); + g.push_expr::(Paren::default(), &init); } } pub struct ComponentAttribute { name: Ident, - value: ComponentAttributeValue, + value: Option, } impl ComponentAttribute { - fn value_expr(&self) -> TokenStream { - match &self.value { + fn value_expr(&self) -> Option { + self.value.as_ref().map(|value| match value { ComponentAttributeValue::Literal(lit) => lit.to_token_stream(), ComponentAttributeValue::Ident(ident) => ident.to_token_stream(), ComponentAttributeValue::Expr(expr) => { @@ -91,7 +90,7 @@ impl ComponentAttribute { tokens } - } + }) } } @@ -100,31 +99,30 @@ impl Parse for ComponentAttribute { Ok(Self { name: input.parse()?, value: { - input.parse::()?; + if input.peek(Token![=]) { + input.parse::()?; - input.parse()? + Some(input.parse()?) + } else { + None + } }, }) } } pub enum ComponentAttributeValue { - Literal(Literal), + Literal(Lit), Ident(Ident), - Expr(ParenExpr), + Expr(ParenExpr), } impl Parse for ComponentAttributeValue { fn parse(input: ParseStream) -> syn::Result { let lookahead = input.lookahead1(); - if lookahead.peek(LitStr) - || lookahead.peek(LitInt) - || lookahead.peek(LitBool) - || lookahead.peek(LitFloat) - || lookahead.peek(LitChar) - { - input.call(Literal::parse_any).map(Self::Literal) + if lookahead.peek(Lit) { + input.parse().map(Self::Literal) } else if lookahead.peek(Ident) { input.parse().map(Self::Ident) } else if lookahead.peek(Paren) { diff --git a/hypertext-macros/src/html/control.rs b/crates/hypertext-proc-macros/src/html/control.rs similarity index 68% rename from hypertext-macros/src/html/control.rs rename to crates/hypertext-proc-macros/src/html/control.rs index 4e9dea9..c69416f 100644 --- a/hypertext-macros/src/html/control.rs +++ b/crates/hypertext-proc-macros/src/html/control.rs @@ -1,23 +1,24 @@ +use std::convert::Infallible; + use proc_macro2::TokenStream; use quote::{ToTokens, quote}; use syn::{ - Expr, Local, Pat, Stmt, Token, braced, + Expr, Pat, PatType, Token, braced, parse::{Parse, ParseStream}, token::Brace, }; -use super::{AnyBlock, Generate, Generator, Node, Nodes}; -use crate::Context; +use super::{AnyBlock, Context, Generate, Generator, Many}; -pub enum Control { +pub enum Control { Let(Let), - If(If), - For(For), - While(While), - Match(Match), + If(If), + For(For), + While(While), + Match(Match), } -impl Parse for Control { +impl Parse for Control { fn parse(input: ParseStream) -> syn::Result { input.parse::()?; @@ -39,8 +40,8 @@ impl Parse for Control { } } -impl Generate for Control { - const CONTEXT: Context = N::CONTEXT; +impl Generate for Control { + type Context = C; fn generate(&self, g: &mut Generator) { match self { @@ -53,57 +54,89 @@ impl Generate for Control { } } -pub struct Let(Local); +pub struct Let { + let_token: Token![let], + pat: Pat, + init: Option<(Token![=], Expr)>, + semi_token: Token![;], +} impl Parse for Let { fn parse(input: ParseStream) -> syn::Result { - let local = match input.parse()? { - Stmt::Local(local) => local, - stmt => return Err(syn::Error::new_spanned(stmt, "expected `let` statement")), - }; - - Ok(Self(local)) + Ok(Self { + let_token: input.parse()?, + pat: { + let pat = input.call(Pat::parse_single)?; + if input.peek(Token![:]) { + Pat::Type(PatType { + attrs: Vec::new(), + pat: Box::new(pat), + colon_token: input.parse()?, + ty: input.parse()?, + }) + } else { + pat + } + }, + init: if input.peek(Token![=]) { + Some((input.parse()?, input.parse()?)) + } else { + None + }, + semi_token: input.parse()?, + }) } } impl Generate for Let { - const CONTEXT: Context = Context::Node; + type Context = Infallible; fn generate(&self, g: &mut Generator) { - g.push_stmt(&self.0); + let let_token = self.let_token; + let pat = &self.pat; + let (eq_token, expr) = self + .init + .as_ref() + .map(|(eq_token, expr)| (eq_token, expr)) + .unzip(); + let semi_token = self.semi_token; + + g.push_stmt(quote! { + #let_token #pat #eq_token #expr #semi_token + }); } } -pub struct ControlBlock { +pub struct ControlBlock { brace_token: Brace, - nodes: Nodes, + children: Many, } -impl ControlBlock { +impl ControlBlock { fn block(&self, g: &mut Generator) -> AnyBlock { - self.nodes.block(g, self.brace_token) + self.children.block(g, self.brace_token) } } -impl Parse for ControlBlock { +impl Parse for ControlBlock { fn parse(input: ParseStream) -> syn::Result { let content; Ok(Self { brace_token: braced!(content in input), - nodes: content.parse()?, + children: content.parse()?, }) } } -pub struct If { +pub struct If { if_token: Token![if], cond: Expr, - then_block: ControlBlock, - else_branch: Option<(Token![else], Box>)>, + then_block: ControlBlock, + else_branch: Option<(Token![else], Box>)>, } -impl Parse for If { +impl Parse for If { fn parse(input: ParseStream) -> syn::Result { Ok(Self { if_token: input.parse()?, @@ -120,11 +153,11 @@ impl Parse for If { } } -impl Generate for If { - const CONTEXT: Context = N::CONTEXT; +impl Generate for If { + type Context = C; fn generate(&self, g: &mut Generator) { - fn to_expr(if_: &If, g: &mut Generator) -> TokenStream { + fn to_expr(if_: &If, g: &mut Generator) -> TokenStream { let if_token = if_.if_token; let cond = &if_.cond; let then_block = if_.then_block.block(g); @@ -152,12 +185,12 @@ impl Generate for If { } } -pub enum ControlIfOrBlock { - If(If), - Block(ControlBlock), +pub enum ControlIfOrBlock { + If(If), + Block(ControlBlock), } -impl Parse for ControlIfOrBlock { +impl Parse for ControlIfOrBlock { fn parse(input: ParseStream) -> syn::Result { let lookahead = input.lookahead1(); @@ -171,15 +204,15 @@ impl Parse for ControlIfOrBlock { } } -pub struct For { +pub struct For { for_token: Token![for], pat: Pat, in_token: Token![in], expr: Expr, - block: ControlBlock, + block: ControlBlock, } -impl Parse for For { +impl Parse for For { fn parse(input: ParseStream) -> syn::Result { Ok(Self { for_token: input.parse()?, @@ -191,8 +224,8 @@ impl Parse for For { } } -impl Generate for For { - const CONTEXT: Context = N::CONTEXT; +impl Generate for For { + type Context = C; fn generate(&self, g: &mut Generator) { let for_token = self.for_token; @@ -208,13 +241,13 @@ impl Generate for For { } } -pub struct While { +pub struct While { while_token: Token![while], cond: Expr, - block: ControlBlock, + block: ControlBlock, } -impl Parse for While { +impl Parse for While { fn parse(input: ParseStream) -> syn::Result { Ok(Self { while_token: input.parse()?, @@ -224,8 +257,8 @@ impl Parse for While { } } -impl Generate for While { - const CONTEXT: Context = N::CONTEXT; +impl Generate for While { + type Context = C; fn generate(&self, g: &mut Generator) { let while_token = self.while_token; @@ -239,14 +272,14 @@ impl Generate for While { } } -pub struct Match { +pub struct Match { match_token: Token![match], expr: Expr, brace_token: Brace, - arms: Vec>, + arms: Vec>, } -impl Parse for Match { +impl Parse for Match { fn parse(input: ParseStream) -> syn::Result { let content; @@ -267,8 +300,8 @@ impl Parse for Match { } } -impl Generate for Match { - const CONTEXT: Context = N::CONTEXT; +impl Generate for Match { + type Context = C; fn generate(&self, g: &mut Generator) { let arms = self @@ -283,8 +316,8 @@ impl Generate for Match { let fat_arrow_token = arm.fat_arrow_token; let block = match &arm.body { MatchNodeArmBody::Block(block) => block.block(g), - MatchNodeArmBody::Node(node) => { - g.block_with(Brace::default(), |g| g.push(node)) + MatchNodeArmBody::Child(child) => { + g.block_with(Brace::default(), |g| g.push(child)) } }; let comma = arm.comma_token; @@ -305,15 +338,15 @@ impl Generate for Match { } } -pub struct MatchNodeArm { +pub struct MatchNodeArm { pat: Pat, guard: Option<(Token![if], Expr)>, fat_arrow_token: Token![=>], - body: MatchNodeArmBody, + body: MatchNodeArmBody, comma_token: Option, } -impl Parse for MatchNodeArm { +impl Parse for MatchNodeArm { fn parse(input: ParseStream) -> syn::Result { Ok(Self { pat: input.call(Pat::parse_multi_with_leading_vert)?, @@ -329,17 +362,17 @@ impl Parse for MatchNodeArm { } } -pub enum MatchNodeArmBody { - Block(ControlBlock), - Node(N), +pub enum MatchNodeArmBody { + Block(ControlBlock), + Child(C), } -impl Parse for MatchNodeArmBody { +impl Parse for MatchNodeArmBody { fn parse(input: ParseStream) -> syn::Result { if input.peek(Brace) { input.parse().map(Self::Block) } else { - input.parse().map(Self::Node) + input.parse().map(Self::Child) } } } diff --git a/crates/hypertext-proc-macros/src/html/generate.rs b/crates/hypertext-proc-macros/src/html/generate.rs new file mode 100644 index 0000000..da2a4b2 --- /dev/null +++ b/crates/hypertext-proc-macros/src/html/generate.rs @@ -0,0 +1,514 @@ +use std::{ + convert::Infallible, + env, fs, iter, + ops::{Deref, DerefMut}, + path::PathBuf, +}; + +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{ToTokens, quote, quote_spanned}; +use syn::{ + Error, LitStr, Token, braced, custom_keyword, + parse::{Parse, ParseStream, Parser}, + token::{Brace, Paren}, +}; + +use super::UnquotedName; +use crate::html::Context; + +#[derive(Debug, Clone, Copy)] +pub enum Config { + Lazy(Semantics), + Simple, +} + +impl Config { + pub fn generate(self, tokens: TokenStream) -> syn::Result { + match self { + Self::Lazy(move_) => { + let mut g = Generator::new_closure(); + + let size_estimate = tokens.to_string().len(); + + g.push(parse_maybe_file::.parse2(tokens)?); + + let block = g.finish(); + + let buffer_ident = Generator::buffer_ident(); + + let ctx = T::Context::marker_type(); + + Ok(quote! { + ::hypertext::Lazy::<_, #ctx>::dangerously_create( + #move_ |#buffer_ident: &mut ::hypertext::Buffer<#ctx>| { + #buffer_ident.dangerously_get_string().reserve(#size_estimate); + #block + } + ) + }) + } + Self::Simple => { + let mut g = Generator::new_static(); + + g.push(parse_maybe_file::.parse2(tokens)?); + + let literal = g.finish().to_token_stream(); + + let ctx = T::Context::marker_type(); + + Ok(quote! { + ::hypertext::Raw::<_, #ctx>::dangerously_create(#literal) + }) + } + } + } +} + +fn parse_maybe_file(input: ParseStream) -> syn::Result { + custom_keyword!(file); + + if input.peek(file) && input.peek2(Token![=]) { + input.parse::()?; + input.parse::()?; + let path_lit = input.parse::()?; + let path = PathBuf::from(path_lit.value()); + if path.is_absolute() { + return Err(Error::new_spanned( + path_lit, + "absolute paths are not allowed", + )); + } + let path = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()).join(path); + let contents = fs::read_to_string(&path).map_err(|e| { + Error::new_spanned( + &path_lit, + format!(r#"io error while reading "{}": {e}"#, path.display()), + ) + })?; + let tokens = contents + .parse::() + .map_err(|e| Error::new_spanned(path_lit, e))?; + + syn::parse2(tokens) + } else { + input.parse() + } +} + +#[derive(Debug, Clone, Copy)] +pub enum Semantics { + Move, + Borrow, +} + +impl ToTokens for Semantics { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Move => quote!(move).to_tokens(tokens), + Self::Borrow => {} + } + } +} + +pub struct Generator { + lazy: bool, + brace_token: Brace, + parts: Vec, + checks: Checks, +} + +impl Generator { + pub fn buffer_ident() -> Ident { + Ident::new("__hypertext_buffer", Span::mixed_site()) + } + + fn new_closure() -> Self { + Self::new_with_brace(true, Brace::default()) + } + + fn new_static() -> Self { + Self::new_with_brace(false, Brace::default()) + } + + const fn new_with_brace(lazy: bool, brace_token: Brace) -> Self { + Self { + lazy, + brace_token, + parts: Vec::new(), + checks: Checks::new(), + } + } + + fn finish(self) -> AnyBlock { + let render = if self.lazy { + let buffer_ident = Self::buffer_ident(); + let mut stmts = TokenStream::new(); + + let mut parts = self.parts.into_iter(); + + while let Some(part) = parts.next() { + match part { + Part::Static(lit) => { + let mut dynamic_stmt = None; + let static_parts = + iter::once(lit).chain(parts.by_ref().map_while(|part| match part { + Part::Static(lit) => Some(lit), + Part::Dynamic(stmt) => { + dynamic_stmt = Some(stmt); + None + } + })); + + stmts.extend(quote! { + #buffer_ident.dangerously_get_string().push_str(::core::concat!(#(#static_parts),*)); + }); + stmts.extend(dynamic_stmt); + } + Part::Dynamic(stmt) => { + stmts.extend(stmt); + } + } + } + + stmts + } else { + let mut static_parts = Vec::new(); + let mut errors = TokenStream::new(); + + for part in self.parts { + match part { + Part::Static(lit) => static_parts.push(lit), + Part::Dynamic(stmt) => errors.extend( + Error::new_spanned(stmt, "simple evaluation cannot contain dynamic parts") + .to_compile_error(), + ), + } + } + + quote! { + #errors + ::core::concat!(#(#static_parts),*) + } + }; + + let checks = self.checks; + + AnyBlock { + brace_token: self.brace_token, + stmts: quote! { + #checks + #render + }, + } + } + + pub fn block_with(&mut self, brace_token: Brace, f: impl FnOnce(&mut Self)) -> AnyBlock { + let mut g = Self::new_with_brace(true, brace_token); + + f(&mut g); + + self.checks.append(&mut g.checks); + + g.finish() + } + + pub fn push_in_block(&mut self, brace_token: Brace, f: impl FnOnce(&mut Self)) { + let block = self.block_with(brace_token, f); + self.push_stmt(block); + } + + pub fn push_str(&mut self, s: &'static str) { + self.push_spanned_str(s, Span::mixed_site()); + } + + pub fn push_spanned_str(&mut self, s: &'static str, span: Span) { + self.parts.push(Part::Static(LitStr::new(s, span))); + } + + pub fn push_escaped_lit(&mut self, lit: &LitStr) { + let value = lit.value(); + let escaped_value = C::escape(&value); + + self.parts + .push(Part::Static(LitStr::new(&escaped_value, lit.span()))); + } + + pub fn push_lits(&mut self, literals: Vec) { + for lit in literals { + self.parts.push(Part::Static(lit)); + } + } + + pub fn push_expr(&mut self, paren_token: Paren, expr: impl ToTokens) { + let buffer_ident = Self::buffer_ident(); + let ctx = C::marker_type(); + let buffer_expr = quote!(#buffer_ident.with_context::<#ctx>()); + + let mut paren_expr = TokenStream::new(); + paren_token.surround(&mut paren_expr, |tokens| expr.to_tokens(tokens)); + let reference = quote_spanned!(paren_token.span=> &); + self.push_stmt(quote! { + ::hypertext::Renderable::render_to( + #reference #paren_expr, + #buffer_expr + ); + }); + } + + pub fn push_stmt(&mut self, stmt: impl ToTokens) { + self.parts.push(Part::Dynamic(stmt.to_token_stream())); + } + + pub fn push_conditional(&mut self, cond: impl ToTokens, f: impl FnOnce(&mut Self)) { + let then_block = self.block_with(Brace::default(), f); + self.push_stmt(quote! { + if #cond #then_block + }); + } + + pub fn push(&mut self, value: impl Generate) { + value.generate(self); + } + + pub fn record_element(&mut self, el_checks: ElementCheck) { + self.checks.push(el_checks); + } + + pub fn push_all(&mut self, values: impl IntoIterator) { + for value in values { + self.push(value); + } + } +} + +enum Part { + Static(LitStr), + Dynamic(TokenStream), +} + +pub trait Generate { + type Context: Context; + + fn generate(&self, g: &mut Generator); +} + +impl Generate for Infallible { + type Context = Self; + + fn generate(&self, _: &mut Generator) { + #[expect(clippy::uninhabited_references)] + match *self {} + } +} + +impl Generate for &T { + type Context = T::Context; + + fn generate(&self, g: &mut Generator) { + (*self).generate(g); + } +} + +struct Checks { + elements: Vec, +} + +impl Checks { + const fn new() -> Self { + Self { + elements: Vec::new(), + } + } + + fn append(&mut self, other: &mut Self) { + self.elements.append(&mut other.elements); + } +} + +impl ToTokens for Checks { + fn to_tokens(&self, tokens: &mut TokenStream) { + if self.is_empty() { + return; + } + + let checks = &self.elements; + + quote! { + const _: fn() = || { + #[allow(unused_imports)] + use hypertext_elements::*; + + #[doc(hidden)] + fn check_element< + K: ::hypertext::validation::ElementKind + >(_: impl ::hypertext::validation::Element) {} + + #(#checks)* + }; + } + .to_tokens(tokens); + } +} + +impl Deref for Checks { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.elements + } +} + +impl DerefMut for Checks { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.elements + } +} + +pub struct ElementCheck { + ident: String, + kind: ElementKind, + opening_spans: Vec, + closing_spans: Vec, + attributes: Vec, +} + +impl ElementCheck { + pub fn new(el_name: &UnquotedName, element_kind: ElementKind) -> Self { + Self { + ident: el_name.ident_string(), + kind: element_kind, + opening_spans: el_name.spans(), + closing_spans: Vec::new(), + attributes: Vec::new(), + } + } + + pub fn set_closing_spans(&mut self, spans: Vec) { + self.closing_spans = spans; + } + + pub fn push_attribute(&mut self, attr: AttributeCheck) { + self.attributes.push(attr); + } +} + +impl ToTokens for ElementCheck { + fn to_tokens(&self, tokens: &mut TokenStream) { + let kind = self.kind; + + let el_checks = self + .opening_spans + .iter() + .chain(&self.closing_spans) + .map(|span| { + let el = Ident::new_raw(&self.ident, *span); + + quote! { + check_element::<#kind>(#el); + } + }); + + let el = Ident::new_raw( + &self.ident, + self.opening_spans + .first() + .copied() + .unwrap_or_else(Span::mixed_site), + ); + + let attr_checks = self + .attributes + .iter() + .map(|attr| attr.to_token_stream_with_el(&el)); + + quote! { + #(#el_checks)* + #(#attr_checks)* + } + .to_tokens(tokens); + } +} + +#[derive(Debug, Clone, Copy)] +pub enum ElementKind { + Normal, + Void, +} + +impl ToTokens for ElementKind { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Normal => quote!(::hypertext::validation::Normal), + Self::Void => quote!(::hypertext::validation::Void), + } + .to_tokens(tokens); + } +} + +pub struct AttributeCheck { + kind: AttributeCheckKind, + ident: String, + spans: Vec, +} + +impl AttributeCheck { + pub const fn new(kind: AttributeCheckKind, ident: String, spans: Vec) -> Self { + Self { kind, ident, spans } + } + + fn to_token_stream_with_el(&self, el: &Ident) -> TokenStream { + let kind = &self.kind; + + self.spans + .iter() + .map(|span| { + let ident = Ident::new_raw(&self.ident, *span); + + quote! { + let _: #kind = <#el>::#ident; + } + }) + .collect() + } +} + +pub enum AttributeCheckKind { + Normal, + Namespace, + Symbol, +} + +impl ToTokens for AttributeCheckKind { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Normal => quote!(::hypertext::validation::Attribute), + Self::Namespace => quote!(::hypertext::validation::AttributeNamespace), + Self::Symbol => quote!(::hypertext::validation::AttributeSymbol), + } + .to_tokens(tokens); + } +} + +pub struct AnyBlock { + pub brace_token: Brace, + pub stmts: TokenStream, +} + +impl Parse for AnyBlock { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + + Ok(Self { + brace_token: braced!(content in input), + stmts: content.parse()?, + }) + } +} + +impl ToTokens for AnyBlock { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.stmts.to_tokens(tokens); + }); + } +} diff --git a/hypertext-macros/src/html/mod.rs b/crates/hypertext-proc-macros/src/html/mod.rs similarity index 86% rename from hypertext-macros/src/html/mod.rs rename to crates/hypertext-proc-macros/src/html/mod.rs index 56b410c..e67cce7 100644 --- a/hypertext-macros/src/html/mod.rs +++ b/crates/hypertext-proc-macros/src/html/mod.rs @@ -6,7 +6,7 @@ mod control; pub mod generate; mod syntaxes; -use std::marker::PhantomData; +use std::{borrow::Cow, convert::Infallible, marker::PhantomData}; use proc_macro2::{Span, TokenStream}; use quote::{ToTokens, quote, quote_spanned}; @@ -30,7 +30,6 @@ use self::{ Generator, }, }; -use crate::Context; mod kw { use syn::LitStr; @@ -62,13 +61,32 @@ mod kw { pub trait Syntax {} -pub type Document = Nodes>; +pub type Document = Many>; -pub trait Node: Generate { +pub trait Context: Generate { fn is_control(&self) -> bool; + + fn marker_type() -> TokenStream; + + fn escape(s: &str) -> Cow<'_, str>; +} + +impl Context for Infallible { + fn is_control(&self) -> bool { + #[expect(clippy::uninhabited_references)] + match *self {} + } + + fn marker_type() -> TokenStream { + TokenStream::new() + } + + fn escape(s: &str) -> Cow<'_, str> { + Cow::Borrowed(s) + } } -pub enum ElementNode { +pub enum Node { Doctype(Doctype), Element(Element), Component(Component), @@ -80,21 +98,29 @@ pub enum ElementNode { Group(Group), } -impl Node for ElementNode { +impl Context for Node { fn is_control(&self) -> bool { matches!(self, Self::Control(_)) } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::Node) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_text(s) + } } -impl Generate for ElementNode { - const CONTEXT: Context = Context::Node; +impl Generate for Node { + type Context = Self; fn generate(&self, g: &mut Generator) { match self { Self::Doctype(doctype) => g.push(doctype), Self::Element(element) => g.push(element), Self::Component(component) => g.push(component), - Self::Literal(lit) => g.push_escaped_lit(Self::CONTEXT, &lit.lit_str()), + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), Self::Control(control) => g.push(control), Self::Expr(expr) => g.push(expr), Self::DisplayExpr(display_expr) => g.push(display_expr), @@ -114,7 +140,7 @@ pub struct Doctype { } impl Generate for Doctype { - const CONTEXT: Context = Context::Node; + type Context = Node; fn generate(&self, g: &mut Generator) { g.push_lits(vec![ @@ -128,13 +154,13 @@ impl Generate for Doctype { } } -pub struct ParenExpr { +pub struct ParenExpr { paren_token: Paren, expr: TokenStream, - phantom: PhantomData, + phantom: PhantomData, } -impl Parse for ParenExpr { +impl Parse for ParenExpr { fn parse(input: ParseStream) -> syn::Result { let content; @@ -146,15 +172,15 @@ impl Parse for ParenExpr { } } -impl Generate for ParenExpr { - const CONTEXT: Context = N::CONTEXT; +impl Generate for ParenExpr { + type Context = C; fn generate(&self, g: &mut Generator) { - g.push_expr(self.paren_token, Self::CONTEXT, &self.expr); + g.push_expr::(self.paren_token, &self.expr); } } -impl ToTokens for ParenExpr { +impl ToTokens for ParenExpr { fn to_tokens(&self, tokens: &mut TokenStream) { self.paren_token.surround(tokens, |tokens| { self.expr.to_tokens(tokens); @@ -162,12 +188,12 @@ impl ToTokens for ParenExpr { } } -pub struct DisplayExpr { +pub struct DisplayExpr { percent_token: Token![%], - paren_expr: ParenExpr, + paren_expr: ParenExpr, } -impl DisplayExpr { +impl DisplayExpr { fn wrapped_expr(&self) -> TokenStream { let wrapper = quote_spanned!(self.percent_token.span=> Displayed); let mut new_paren_expr = TokenStream::new(); @@ -182,7 +208,7 @@ impl DisplayExpr { } } -impl Parse for DisplayExpr { +impl Parse for DisplayExpr { fn parse(input: ParseStream) -> syn::Result { Ok(Self { percent_token: input.parse()?, @@ -191,24 +217,20 @@ impl Parse for DisplayExpr { } } -impl Generate for DisplayExpr { - const CONTEXT: Context = N::CONTEXT; +impl Generate for DisplayExpr { + type Context = C; fn generate(&self, g: &mut Generator) { - g.push_expr( - self.paren_expr.paren_token, - Self::CONTEXT, - self.wrapped_expr(), - ); + g.push_expr::(self.paren_expr.paren_token, self.wrapped_expr()); } } -pub struct DebugExpr { +pub struct DebugExpr { question_token: Token![?], - expr: ParenExpr, + expr: ParenExpr, } -impl DebugExpr { +impl DebugExpr { fn wrapped_expr(&self) -> TokenStream { let wrapper = quote_spanned!(self.question_token.span=> Debugged); let mut new_paren_expr = TokenStream::new(); @@ -223,7 +245,7 @@ impl DebugExpr { } } -impl Parse for DebugExpr { +impl Parse for DebugExpr { fn parse(input: ParseStream) -> syn::Result { Ok(Self { question_token: input.parse()?, @@ -232,17 +254,17 @@ impl Parse for DebugExpr { } } -impl Generate for DebugExpr { - const CONTEXT: Context = N::CONTEXT; +impl Generate for DebugExpr { + type Context = C; fn generate(&self, g: &mut Generator) { - g.push_expr(self.expr.paren_token, Self::CONTEXT, self.wrapped_expr()); + g.push_expr::(self.expr.paren_token, self.wrapped_expr()); } } -pub struct Group(Nodes); +pub struct Group(Many); -impl Parse for Group { +impl Parse for Group { fn parse(input: ParseStream) -> syn::Result { let content; braced!(content in input); @@ -251,17 +273,17 @@ impl Parse for Group { } } -impl Generate for Group { - const CONTEXT: Context = N::CONTEXT; +impl Generate for Group { + type Context = C; fn generate(&self, g: &mut Generator) { g.push(&self.0); } } -pub struct Nodes(Vec); +pub struct Many(Vec); -impl Nodes { +impl Many { fn block(&self, g: &mut Generator, brace_token: Brace) -> AnyBlock { g.block_with(brace_token, |g| { g.push_all(&self.0); @@ -269,25 +291,25 @@ impl Nodes { } } -impl Parse for Nodes { +impl Parse for Many { fn parse(input: ParseStream) -> syn::Result { Ok(Self({ - let mut nodes = Vec::new(); + let mut children = Vec::new(); while !input.is_empty() { - nodes.push(input.parse()?); + children.push(input.parse()?); } - nodes + children })) } } -impl Generate for Nodes { - const CONTEXT: Context = N::CONTEXT; +impl Generate for Many { + type Context = C; fn generate(&self, g: &mut Generator) { - if self.0.iter().any(Node::is_control) { + if self.0.iter().any(Context::is_control) { g.push_in_block(Brace::default(), |g| g.push_all(&self.0)); } else { g.push_all(&self.0); @@ -302,7 +324,7 @@ pub struct Element { } impl Generate for Element { - const CONTEXT: Context = Context::Node; + type Context = Node; fn generate(&self, g: &mut Generator) { let mut el_checks = ElementCheck::new(&self.name, self.body.kind()); @@ -343,7 +365,7 @@ impl Generate for Element { pub enum ElementBody { Normal { - children: Nodes>, + children: Many>, closing_name: Option, }, Void, @@ -369,7 +391,7 @@ impl Attribute { Ok(Self { name: parse_quote_spanned!(pound_token.span()=> id), kind: AttributeKind::Value { - value: input.call(AttributeValueNode::parse_unquoted)?, + value: input.call(AttributeValue::parse_unquoted)?, toggle: None, }, }) @@ -413,7 +435,7 @@ impl Parse for Attribute { } impl Generate for Attribute { - const CONTEXT: Context = Context::AttributeValue; + type Context = AttributeValue; fn generate(&self, g: &mut Generator) { match &self.kind { @@ -445,7 +467,7 @@ impl Generate for Attribute { g.push_str(" "); g.push_lits(self.name.lits()); g.push_str("=\""); - g.push_expr(Paren::default(), Self::CONTEXT, &value); + g.push_expr::(Paren::default(), &value); g.push_str("\""); }, ); @@ -655,7 +677,7 @@ impl Parse for AttributeSymbol { pub enum AttributeKind { Value { - value: AttributeValueNode, + value: AttributeValue, toggle: Option, }, Empty(Option), @@ -663,7 +685,7 @@ pub enum AttributeKind { ClassList(Vec), } -pub enum AttributeValueNode { +pub enum AttributeValue { Literal(Literal), Group(Group), Control(Control), @@ -673,10 +695,10 @@ pub enum AttributeValueNode { Ident(Ident), } -impl AttributeValueNode { +impl AttributeValue { fn parse_unquoted(input: ParseStream) -> syn::Result { if input.peek(Ident::peek_any) || input.peek(LitInt) { - Ok(Self::Group(Group(Nodes( + Ok(Self::Group(Group(Many( input .call(UnquotedName::parse_attr_value)? .lits() @@ -690,13 +712,21 @@ impl AttributeValueNode { } } -impl Node for AttributeValueNode { +impl Context for AttributeValue { fn is_control(&self) -> bool { matches!(self, Self::Control(_)) } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::AttributeValue) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_double_quoted_attribute(s) + } } -impl Parse for AttributeValueNode { +impl Parse for AttributeValue { fn parse(input: ParseStream) -> syn::Result { let lookahead = input.lookahead1(); @@ -725,25 +755,25 @@ impl Parse for AttributeValueNode { } } -impl Generate for AttributeValueNode { - const CONTEXT: Context = Context::AttributeValue; +impl Generate for AttributeValue { + type Context = Self; fn generate(&self, g: &mut Generator) { match self { - Self::Literal(lit) => g.push_escaped_lit(Self::CONTEXT, &lit.lit_str()), + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), Self::Group(block) => g.push(block), Self::Control(control) => g.push(control), Self::Expr(expr) => g.push(expr), Self::DisplayExpr(display_expr) => g.push(display_expr), Self::DebugExpr(debug_expr) => g.push(debug_expr), - Self::Ident(ident) => g.push_expr(Paren::default(), Self::CONTEXT, ident), + Self::Ident(ident) => g.push_expr::(Paren::default(), ident), } } } pub enum Class { Value { - value: AttributeValueNode, + value: AttributeValue, toggle: Option, }, Option(Toggle), @@ -777,7 +807,7 @@ impl Class { if index > 0 { g.push_str(" "); } - g.push_expr(Paren::default(), Context::AttributeValue, &value); + g.push_expr::(Paren::default(), &value); }, ); } @@ -793,7 +823,7 @@ impl Parse for Class { Ok(Self::Option(input.parse()?)) } else { Ok(Self::Value { - value: input.call(AttributeValueNode::parse_unquoted)?, + value: input.call(AttributeValue::parse_unquoted)?, toggle: input.call(Toggle::parse_optional)?, }) } diff --git a/hypertext-macros/src/html/syntaxes/maud.rs b/crates/hypertext-proc-macros/src/html/syntaxes/maud.rs similarity index 95% rename from hypertext-macros/src/html/syntaxes/maud.rs rename to crates/hypertext-proc-macros/src/html/syntaxes/maud.rs index 0a47781..4e09f57 100644 --- a/hypertext-macros/src/html/syntaxes/maud.rs +++ b/crates/hypertext-proc-macros/src/html/syntaxes/maud.rs @@ -9,15 +9,14 @@ use syn::{ }; use crate::html::{ - Attribute, Component, Doctype, Element, ElementBody, ElementNode, Group, Syntax, UnquotedName, - kw, + Attribute, Component, Doctype, Element, ElementBody, Group, Node, Syntax, UnquotedName, kw, }; pub struct Maud; impl Syntax for Maud {} -impl Parse for ElementNode { +impl Parse for Node { fn parse(input: ParseStream) -> syn::Result { let lookahead = input.lookahead1(); @@ -65,7 +64,7 @@ impl Parse for Doctype { } } -impl Parse for Group> { +impl Parse for Group> { fn parse(input: ParseStream) -> syn::Result { let content; braced!(content in input); diff --git a/hypertext-macros/src/html/syntaxes/mod.rs b/crates/hypertext-proc-macros/src/html/syntaxes/mod.rs similarity index 100% rename from hypertext-macros/src/html/syntaxes/mod.rs rename to crates/hypertext-proc-macros/src/html/syntaxes/mod.rs diff --git a/hypertext-macros/src/html/syntaxes/rsx.rs b/crates/hypertext-proc-macros/src/html/syntaxes/rsx.rs similarity index 79% rename from hypertext-macros/src/html/syntaxes/rsx.rs rename to crates/hypertext-proc-macros/src/html/syntaxes/rsx.rs index 00cf578..dbc1c44 100644 --- a/hypertext-macros/src/html/syntaxes/rsx.rs +++ b/crates/hypertext-proc-macros/src/html/syntaxes/rsx.rs @@ -1,7 +1,7 @@ use std::marker::PhantomData; use syn::{ - Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, custom_punctuation, + Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, ext::IdentExt, parse::{Parse, ParseStream, discouraged::Speculative}, parse_quote, @@ -9,20 +9,14 @@ use syn::{ }; use crate::html::{ - Component, Doctype, Element, ElementBody, ElementNode, Group, Literal, Nodes, Syntax, - UnquotedName, + Component, Doctype, Element, ElementBody, Group, Literal, Many, Node, Syntax, UnquotedName, }; pub struct Rsx; impl Syntax for Rsx {} -custom_punctuation!(FragmentOpen, <>); -custom_punctuation!(FragmentClose, ); -custom_punctuation!(OpenTagSolidusEnd, />); -custom_punctuation!(CloseTagStart, { +impl Node { fn parse_component(input: ParseStream) -> syn::Result { input.parse::()?; @@ -30,7 +24,11 @@ impl ElementNode { let mut attrs = Vec::new(); - while !(input.peek(Token![..]) || input.peek(Token![>]) || input.peek(OpenTagSolidusEnd)) { + #[allow(clippy::suspicious_operation_groupings)] + while !(input.peek(Token![..]) + || input.peek(Token![>]) + || (input.peek(Token![/]) && input.peek2(Token![>]))) + { attrs.push(input.parse()?); } @@ -49,7 +47,7 @@ impl ElementNode { } else { let mut children = Vec::new(); - while !input.peek(CloseTagStart) { + while !(input.peek(Token![<]) && input.peek2(Token![/])) { if input.is_empty() { children.insert( 0, @@ -61,14 +59,15 @@ impl ElementNode { }), ); - return Ok(Self::Group(Group(Nodes(children)))); + return Ok(Self::Group(Group(Many(children)))); } children.push(input.parse()?); } let fork = input.fork(); - fork.parse::()?; + fork.parse::()?; + fork.parse::()?; let closing_name = fork.parse::()?; if closing_name == name { input.advance_to(&fork); @@ -83,7 +82,7 @@ impl ElementNode { }), ); - return Ok(Self::Group(Group(Nodes(children)))); + return Ok(Self::Group(Group(Many(children)))); } input.parse::]>()?; @@ -92,7 +91,7 @@ impl ElementNode { attrs, dotdot, body: ElementBody::Normal { - children: Nodes(children), + children: Many(children), closing_name: Some(parse_quote!(#closing_name)), }, })) @@ -106,7 +105,7 @@ impl ElementNode { let mut attrs = Vec::new(); - while !(input.peek(Token![>]) || (input.peek(OpenTagSolidusEnd))) { + while !(input.peek(Token![>]) || (input.peek(Token![/]) && input.peek2(Token![>]))) { attrs.push(input.parse()?); } @@ -122,7 +121,7 @@ impl ElementNode { } else { let mut children = Vec::new(); - while !(input.peek(CloseTagStart)) { + while !(input.peek(Token![<]) && input.peek2(Token![/])) { if input.is_empty() { children.insert( 0, @@ -133,13 +132,14 @@ impl ElementNode { }), ); - return Ok(Self::Group(Group(Nodes(children)))); + return Ok(Self::Group(Group(Many(children)))); } children.push(input.parse()?); } let fork = input.fork(); - fork.parse::()?; + fork.parse::()?; + fork.parse::()?; let closing_name = fork.parse()?; if closing_name == name { input.advance_to(&fork); @@ -153,7 +153,7 @@ impl ElementNode { }), ); - return Ok(Self::Group(Group(Nodes(children)))); + return Ok(Self::Group(Group(Many(children)))); } input.parse::]>()?; @@ -161,7 +161,7 @@ impl ElementNode { name, attrs, body: ElementBody::Normal { - children: Nodes(children), + children: Many(children), closing_name: Some(closing_name), }, })) @@ -169,7 +169,7 @@ impl ElementNode { } } -impl Parse for ElementNode { +impl Parse for Node { fn parse(input: ParseStream) -> syn::Result { let lookahead = input.lookahead1(); @@ -241,18 +241,21 @@ impl Parse for Doctype { } } -impl Parse for Group> { +impl Parse for Group> { fn parse(input: ParseStream) -> syn::Result { - input.parse::()?; + input.parse::()?; + input.parse::]>()?; - let mut nodes = Vec::new(); + let mut children = Vec::new(); - while !input.peek(FragmentClose) { - nodes.push(input.parse()?); + while !(input.peek(Token![<]) && input.peek2(Token![/]) && input.peek3(Token![>])) { + children.push(input.parse()?); } - input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; - Ok(Self(Nodes(nodes))) + Ok(Self(Many(children))) } } diff --git a/crates/hypertext-proc-macros/src/lib.rs b/crates/hypertext-proc-macros/src/lib.rs new file mode 100644 index 0000000..1d9775b --- /dev/null +++ b/crates/hypertext-proc-macros/src/lib.rs @@ -0,0 +1,78 @@ +#![expect(missing_docs)] + +mod derive; +mod html; +mod renderable; + +use html::{AttributeValue, Many}; +use proc_macro::TokenStream; +use syn::{parse::Parse, parse_macro_input}; + +use self::html::{Document, Maud, Rsx}; +use crate::html::generate::{Config, Generate, Semantics}; + +fn generate(config: Config, tokens: TokenStream) -> TokenStream { + config + .generate::(tokens.into()) + .unwrap_or_else(|err| err.to_compile_error()) + .into() +} + +macro_rules! create_variants { + { + $($Ty:ty { + $lazy_move:ident + $lazy_borrow:ident + $simple:ident + })* + } => { + $(#[proc_macro] + pub fn $lazy_move(tokens: TokenStream) -> TokenStream { + generate::<$Ty>(Config::Lazy(Semantics::Move), tokens) + } + + #[proc_macro] + pub fn $lazy_borrow(tokens: TokenStream) -> TokenStream { + generate::<$Ty>(Config::Lazy(Semantics::Borrow), tokens) + } + + #[proc_macro] + pub fn $simple(tokens: TokenStream) -> TokenStream { + generate::<$Ty>(Config::Simple, tokens) + })* + }; +} + +create_variants! { + Document { + maud + maud_borrow + maud_simple + } + + Document { + rsx + rsx_borrow + rsx_simple + } + + Many { + attribute + attribute_borrow + attribute_simple + } +} + +#[proc_macro_derive(Renderable, attributes(maud, rsx, attribute))] +pub fn derive_renderable(input: TokenStream) -> TokenStream { + derive::renderable(parse_macro_input!(input)) + .unwrap_or_else(|err| err.to_compile_error()) + .into() +} + +#[proc_macro_attribute] +pub fn renderable(attr: TokenStream, item: TokenStream) -> TokenStream { + renderable::generate(parse_macro_input!(attr), parse_macro_input!(item)) + .unwrap_or_else(|err| err.to_compile_error()) + .into() +} diff --git a/hypertext-macros/src/component.rs b/crates/hypertext-proc-macros/src/renderable.rs similarity index 78% rename from hypertext-macros/src/component.rs rename to crates/hypertext-proc-macros/src/renderable.rs index 04dcd7b..65b86c5 100644 --- a/hypertext-macros/src/component.rs +++ b/crates/hypertext-proc-macros/src/renderable.rs @@ -1,15 +1,15 @@ use proc_macro2::TokenStream; use quote::quote; -use syn::{FnArg, Ident, ItemFn, Pat, PatType, Type, Visibility, parse::Parse}; +use syn::{Error, FnArg, Ident, ItemFn, Pat, PatType, Type, Visibility, parse::Parse}; use crate::html::generate::Generator; -pub struct ComponentArgs { +pub struct RenderableArgs { visibility: Visibility, ident: Option, } -impl Parse for ComponentArgs { +impl Parse for RenderableArgs { fn parse(input: syn::parse::ParseStream) -> syn::Result { Ok(Self { visibility: input.parse()?, @@ -22,7 +22,8 @@ impl Parse for ComponentArgs { } } -pub fn generate(args: ComponentArgs, fn_item: &ItemFn) -> syn::Result { +#[expect(clippy::needless_pass_by_value)] +pub fn generate(args: RenderableArgs, fn_item: ItemFn) -> syn::Result { let mut fields = Vec::new(); let mut field_names = Vec::new(); let mut field_refs = Vec::new(); @@ -38,18 +39,18 @@ pub fn generate(args: ComponentArgs, fn_item: &ItemFn) -> syn::Result &pat_ident.ident, _ => { - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( pat, - "component function parameters must be identifiers", + "renderable function parameters must be identifiers", )); } }; let (ty, ref_token) = match &**ty { Type::Reference(ty_ref) => { if ty_ref.mutability.is_some() { - return Err(syn::Error::new_spanned( + return Err(Error::new_spanned( ty_ref, - "component function parameters cannot be mutable references", + "renderable function parameters cannot be mutable references", )); } @@ -67,9 +68,9 @@ pub fn generate(args: ComponentArgs, fn_item: &ItemFn) -> syn::Result syn::Result syn::Result { + let attribute = { + let content; + Attribute { + pound_token: input.parse()?, + style: AttrStyle::Inner(input.parse()?), + bracket_token: bracketed!(content in input), + meta: content.parse()?, + } + }; + + if !attribute.path().is_ident("config") { + return Err(syn::Error::new_spanned( + &attribute, + "expected `#![config(...)]` attribute", + )); + } + + let mut syntax = None::; + let mut quotes = None::; + let mut output = None::; + + attribute.parse_nested_meta(|meta| { + if meta.path.is_ident("syntax") { + if syntax.is_some() { + return Err(meta.error("duplicate `syntax` option")); + } + + syntax = Some(meta.value()?.parse()?); + } else if meta.path.is_ident("quotes") { + if quotes.is_some() { + return Err(meta.error("duplicate `quotes` option")); + } + + quotes = Some(meta.value()?.parse()?); + } else if meta.path.is_ident("output") { + if output.is_some() { + return Err(meta.error("duplicate `output` option")); + } + + output = Some(meta.value()?.parse()?); + } else { + return Err( + meta.error("unrecognized option: expected `syntax`, `quotes`, or `output`") + ); + } + + Ok(()) + })?; + + Ok(Self { + syntax: syntax.ok_or_else(|| { + Error::new_spanned(attribute.path(), "expected `syntax` option to be specified") + })?, + quotes: quotes.ok_or_else(|| { + Error::new_spanned(attribute.path(), "expected `quotes` option to be specified") + })?, + output: output.ok_or_else(|| { + Error::new_spanned(attribute.path(), "expected `output` option to be specified") + })?, + }) + } +} + +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +pub enum MacroSyntax { + Maud, + Rsx, + Attribute, +} + +impl Parse for MacroSyntax { + fn parse(input: ParseStream) -> syn::Result { + custom_keyword!(maud); + custom_keyword!(rsx); + custom_keyword!(attribute); + + let lookahead = input.lookahead1(); + + if lookahead.peek(maud) { + input.parse::()?; + Ok(Self::Maud) + } else if lookahead.peek(rsx) { + input.parse::()?; + Ok(Self::Rsx) + } else if lookahead.peek(attribute) { + input.parse::()?; + Ok(Self::Attribute) + } else { + Err(lookahead.error()) + } + } +} + +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +pub enum Quotes { + Double, + Single, +} + +impl Parse for Quotes { + fn parse(input: ParseStream) -> syn::Result { + custom_keyword!(double); + custom_keyword!(single); + + let lookahead = input.lookahead1(); + + if lookahead.peek(double) { + input.parse::()?; + Ok(Self::Double) + } else if lookahead.peek(single) { + input.parse::()?; + Ok(Self::Single) + } else { + Err(lookahead.error()) + } + } +} + +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +pub enum Output { + Simple, + Lazy { move_: bool }, +} + +impl Parse for Output { + fn parse(input: ParseStream) -> syn::Result { + custom_keyword!(simple); + custom_keyword!(lazy); + + let lookahead = input.lookahead1(); + + if lookahead.peek(simple) { + input.parse::()?; + Ok(Self::Simple) + } else if lookahead.peek(lazy) { + let list = input.parse::()?; + + if !list.path.is_ident("lazy") { + return Err(Error::new_spanned(&list.path, "expected `lazy(...)`")); + } + + let mut move_ = None::; + + list.parse_nested_meta(|meta| { + if meta.path.is_ident("move") { + if move_.is_some() { + return Err(meta.error("duplicate `move` option")); + } + + move_ = Some(meta.value()?.parse::()?.value); + } else { + return Err(meta.error("unrecognized option: expected `move`")); + } + + Ok(()) + })?; + + Ok(Self::Lazy { + move_: move_.ok_or_else(|| { + Error::new_spanned(&list.path, "expected `move` option to be specified") + })?, + }) + } else { + Err(lookahead.error()) + } + } +} + +#[cfg(test)] +mod tests { + use quote::quote; + + use super::*; + + #[test] + fn works() { + let attr = quote! { + #![config( + syntax = maud, + quotes = double, + output = lazy(move = false) + )] + }; + + assert_eq!( + syn::parse2::(attr).unwrap(), + Config { + syntax: MacroSyntax::Maud, + quotes: Quotes::Double, + output: Output::Lazy { move_: false } + } + ); + } +} diff --git a/hypertext-macros/src/html/generate.rs b/crates/hypertext-syntax/src/lib.rs similarity index 74% rename from hypertext-macros/src/html/generate.rs rename to crates/hypertext-syntax/src/lib.rs index 08e40cd..7db6818 100644 --- a/hypertext-macros/src/html/generate.rs +++ b/crates/hypertext-syntax/src/lib.rs @@ -1,58 +1,35 @@ +#![expect( + missing_docs, + clippy::missing_errors_doc, + missing_debug_implementations, + clippy::must_use_candidate, + missing_copy_implementations +)] + +pub mod config; +pub mod repr; +pub mod syntaxes; + use std::{ - iter, + borrow::Cow, + convert::Infallible, + env, fs, iter, ops::{Deref, DerefMut}, + path::PathBuf, }; use proc_macro2::{Ident, Span, TokenStream}; use quote::{ToTokens, quote, quote_spanned}; use syn::{ - LitStr, braced, - parse::Parse, + Error, LitStr, Token, braced, custom_keyword, + parse::{Parse, ParseStream, Parser}, token::{Brace, Paren}, }; -use super::UnquotedName; - -pub fn lazy(tokens: TokenStream, move_: bool) -> syn::Result { - let mut g = Generator::new_closure(T::CONTEXT); - - g.push(syn::parse2::(tokens)?); - - let block = g.finish(); - - let buffer_ident = Generator::buffer_ident(); - - let move_token = move_.then(|| quote!(move)); - - let marker_ident = T::CONTEXT.marker_type(); - - Ok(quote! { - ::hypertext::Lazy::<_, #marker_ident>::dangerously_create( - #move_token |#buffer_ident: &mut ::hypertext::Buffer<#marker_ident>| { - - #block - } - ) - }) -} - -pub fn literal(tokens: TokenStream) -> syn::Result { - let mut g = Generator::new_static(T::CONTEXT); - - g.push(syn::parse2::(tokens)?); - - let literal = g.finish().to_token_stream(); - - let marker_ident = T::CONTEXT.marker_type(); - - Ok(quote! { - ::hypertext::Raw::<_, #marker_ident>::dangerously_create(#literal) - }) -} +use self::repr::UnquotedName; pub struct Generator { lazy: bool, - context: Context, brace_token: Brace, parts: Vec, checks: Checks, @@ -63,18 +40,17 @@ impl Generator { Ident::new("__hypertext_buffer", Span::mixed_site()) } - fn new_closure(context: Context) -> Self { - Self::new_with_brace(context, true, Brace::default()) + fn new_closure() -> Self { + Self::new_with_brace(true, Brace::default()) } - fn new_static(context: Context) -> Self { - Self::new_with_brace(context, false, Brace::default()) + fn new_static() -> Self { + Self::new_with_brace(false, Brace::default()) } - const fn new_with_brace(context: Context, lazy: bool, brace_token: Brace) -> Self { + const fn new_with_brace(lazy: bool, brace_token: Brace) -> Self { Self { lazy, - context, brace_token, parts: Vec::new(), checks: Checks::new(), @@ -117,10 +93,7 @@ impl Generator { } } - quote! { - #buffer_ident.dangerously_get_string().reserve(#size_estimate); - #stmts - } + stmts } else { let mut static_parts = Vec::new(); let mut errors = TokenStream::new(); @@ -129,11 +102,8 @@ impl Generator { match part { Part::Static(lit) => static_parts.push(lit), Part::Dynamic(stmt) => errors.extend( - syn::Error::new_spanned( - stmt, - "static evaluation cannot contain dynamic parts", - ) - .to_compile_error(), + Error::new_spanned(stmt, "simple evaluation cannot contain dynamic parts") + .to_compile_error(), ), } } @@ -156,7 +126,7 @@ impl Generator { } pub fn block_with(&mut self, brace_token: Brace, f: impl FnOnce(&mut Self)) -> AnyBlock { - let mut g = Self::new_with_brace(self.context, true, brace_token); + let mut g = Self::new_with_brace(true, brace_token); f(&mut g); @@ -178,12 +148,9 @@ impl Generator { self.parts.push(Part::Static(LitStr::new(s, span))); } - pub fn push_escaped_lit(&mut self, context: Context, lit: &LitStr) { + pub fn push_escaped_lit(&mut self, lit: &LitStr) { let value = lit.value(); - let escaped_value = match context { - Context::Node => html_escape::encode_text(&value), - Context::AttributeValue => html_escape::encode_double_quoted_attribute(&value), - }; + let escaped_value = C::escape(&value); self.parts .push(Part::Static(LitStr::new(&escaped_value, lit.span()))); @@ -195,17 +162,10 @@ impl Generator { } } - pub fn push_expr(&mut self, paren_token: Paren, context: Context, expr: impl ToTokens) { + pub fn push_expr(&mut self, paren_token: Paren, expr: impl ToTokens) { let buffer_ident = Self::buffer_ident(); - let buffer_expr = match (self.context, context) { - (Context::Node, Context::Node) | (Context::AttributeValue, Context::AttributeValue) => { - quote!(#buffer_ident) - } - (Context::Node, Context::AttributeValue) => { - quote!(#buffer_ident.as_attribute_buffer()) - } - (Context::AttributeValue, Context::Node) => unreachable!(), - }; + let ctx = C::marker_type(); + let buffer_expr = quote!(#buffer_ident.with_context::<#ctx>()); let mut paren_expr = TokenStream::new(); paren_token.surround(&mut paren_expr, |tokens| expr.to_tokens(tokens)); @@ -249,36 +209,76 @@ enum Part { Dynamic(TokenStream), } -#[derive(Debug, Clone, Copy)] -pub enum Context { - Node, - AttributeValue, +pub struct AnyBlock { + pub brace_token: Brace, + pub stmts: TokenStream, } -impl Context { - pub fn marker_type(self) -> TokenStream { - let ident = match self { - Self::Node => Ident::new("Node", Span::mixed_site()), - Self::AttributeValue => Ident::new("AttributeValue", Span::mixed_site()), - }; +impl Parse for AnyBlock { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; - quote!(::hypertext::context::#ident) + Ok(Self { + brace_token: braced!(content in input), + stmts: content.parse()?, + }) + } +} + +impl ToTokens for AnyBlock { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.stmts.to_tokens(tokens); + }); } } pub trait Generate { - const CONTEXT: Context; + type Context: Context; + fn generate(&self, g: &mut Generator); } +impl Generate for Infallible { + type Context = Self; + + fn generate(&self, _: &mut Generator) { + #[expect(clippy::uninhabited_references)] + match *self {} + } +} + impl Generate for &T { - const CONTEXT: Context = T::CONTEXT; + type Context = T::Context; fn generate(&self, g: &mut Generator) { (*self).generate(g); } } +pub trait Context: Generate { + fn is_control(&self) -> bool; + + fn marker_type() -> TokenStream; + + fn escape(s: &str) -> Cow<'_, str>; +} + +impl Context for Infallible { + fn is_control(&self) -> bool { + #[expect(clippy::uninhabited_references)] + match *self {} + } + + fn marker_type() -> TokenStream { + TokenStream::new() + } + + fn escape(s: &str) -> Cow<'_, str> { + Cow::Borrowed(s) + } +} + struct Checks { elements: Vec, } @@ -353,8 +353,8 @@ impl ElementCheck { } } - pub fn set_closing_spans(&mut self, spans: Vec) { - self.closing_spans = spans; + pub fn set_closing_tag(&mut self, name: &UnquotedName) { + self.closing_spans = name.spans(); } pub fn push_attribute(&mut self, attr: AttributeCheck) { @@ -364,7 +364,7 @@ impl ElementCheck { impl ToTokens for ElementCheck { fn to_tokens(&self, tokens: &mut TokenStream) { - let kind = self.kind; + let kind = &self.kind; let el_checks = self .opening_spans @@ -399,7 +399,6 @@ impl ToTokens for ElementCheck { } } -#[derive(Debug, Clone, Copy)] pub enum ElementKind { Normal, Void, @@ -458,27 +457,3 @@ impl ToTokens for AttributeCheckKind { .to_tokens(tokens); } } - -pub struct AnyBlock { - pub brace_token: Brace, - pub stmts: TokenStream, -} - -impl Parse for AnyBlock { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - - Ok(Self { - brace_token: braced!(content in input), - stmts: content.parse()?, - }) - } -} - -impl ToTokens for AnyBlock { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.brace_token.surround(tokens, |tokens| { - self.stmts.to_tokens(tokens); - }); - } -} diff --git a/crates/hypertext-syntax/src/mod.rs b/crates/hypertext-syntax/src/mod.rs new file mode 100644 index 0000000..5881060 --- /dev/null +++ b/crates/hypertext-syntax/src/mod.rs @@ -0,0 +1,874 @@ +#![expect(clippy::struct_field_names, clippy::large_enum_variant)] + +mod basics; +mod component; +mod control; +pub mod generate; +mod syntaxes; + +use std::{borrow::Cow, convert::Infallible, marker::PhantomData}; + +use proc_macro2::{Span, TokenStream}; +use quote::{ToTokens, quote, quote_spanned}; +use syn::{ + Error, Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, braced, bracketed, + ext::IdentExt, + parenthesized, + parse::{Parse, ParseStream}, + parse_quote_spanned, + spanned::Spanned, + token::{Brace, Bracket, Paren}, +}; + +pub use self::syntaxes::{Maud, Rsx}; +use self::{ + basics::{Literal, UnquotedName}, + component::Component, + control::Control, + generate::{ + AnyBlock, AttributeCheck, AttributeCheckKind, ElementCheck, ElementKind, Generate, + Generator, + }, +}; + +mod kw { + use syn::LitStr; + + syn::custom_keyword!(data); + + impl data { + pub fn lit(self) -> LitStr { + LitStr::new("data", self.span) + } + } + + syn::custom_keyword!(DOCTYPE); + + impl DOCTYPE { + pub fn lit(self) -> LitStr { + LitStr::new("DOCTYPE", self.span) + } + } + + syn::custom_keyword!(html); + + impl html { + pub fn lit(self) -> LitStr { + LitStr::new("html", self.span) + } + } +} + +pub trait Syntax {} + +pub type Document = Many>; + +pub trait Context: Generate { + fn is_control(&self) -> bool; + + fn marker_type() -> TokenStream; + + fn escape(s: &str) -> Cow<'_, str>; +} + +impl Context for Infallible { + fn is_control(&self) -> bool { + #[expect(clippy::uninhabited_references)] + match *self {} + } + + fn marker_type() -> TokenStream { + TokenStream::new() + } + + fn escape(s: &str) -> Cow<'_, str> { + Cow::Borrowed(s) + } +} + +pub enum Node { + Doctype(Doctype), + Element(Element), + Component(Component), + Literal(Literal), + Control(Control), + Expr(ParenExpr), + DisplayExpr(DisplayExpr), + DebugExpr(DebugExpr), + Group(Group), +} + +impl Context for Node { + fn is_control(&self) -> bool { + matches!(self, Self::Control(_)) + } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::Node) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_text(s) + } +} + +impl Generate for Node { + type Context = Self; + + fn generate(&self, g: &mut Generator) { + match self { + Self::Doctype(doctype) => g.push(doctype), + Self::Element(element) => g.push(element), + Self::Component(component) => g.push(component), + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), + Self::Control(control) => g.push(control), + Self::Expr(expr) => g.push(expr), + Self::DisplayExpr(display_expr) => g.push(display_expr), + Self::DebugExpr(debug_expr) => g.push(debug_expr), + Self::Group(group) => g.push(group), + } + } +} + +pub struct Doctype { + lt_token: Token![<], + bang_token: Token![!], + doctype_token: kw::DOCTYPE, + html_token: kw::html, + gt_token: Token![>], + phantom: PhantomData, +} + +impl Generate for Doctype { + type Context = Node; + + fn generate(&self, g: &mut Generator) { + g.push_lits(vec![ + LitStr::new("<", self.lt_token.span), + LitStr::new("!", self.bang_token.span), + self.doctype_token.lit(), + LitStr::new(" ", Span::mixed_site()), + self.html_token.lit(), + LitStr::new(">", self.gt_token.span), + ]); + } +} + +pub struct ParenExpr { + paren_token: Paren, + expr: TokenStream, + phantom: PhantomData, +} + +impl Parse for ParenExpr { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + paren_token: parenthesized!(content in input), + expr: content.parse()?, + phantom: PhantomData, + }) + } +} + +impl Generate for ParenExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.paren_token, &self.expr); + } +} + +impl ToTokens for ParenExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + } +} + +pub struct DisplayExpr { + percent_token: Token![%], + paren_expr: ParenExpr, +} + +impl DisplayExpr { + fn wrapped_expr(&self) -> TokenStream { + let wrapper = quote_spanned!(self.percent_token.span=> Displayed); + let mut new_paren_expr = TokenStream::new(); + self.paren_expr + .paren_token + .surround(&mut new_paren_expr, |tokens| { + quote_spanned!(self.paren_expr.paren_token.span=> &).to_tokens(tokens); + self.paren_expr.to_tokens(tokens); + }); + + quote!(::hypertext::#wrapper #new_paren_expr) + } +} + +impl Parse for DisplayExpr { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + percent_token: input.parse()?, + paren_expr: input.parse()?, + }) + } +} + +impl Generate for DisplayExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.paren_expr.paren_token, self.wrapped_expr()); + } +} + +pub struct DebugExpr { + question_token: Token![?], + expr: ParenExpr, +} + +impl DebugExpr { + fn wrapped_expr(&self) -> TokenStream { + let wrapper = quote_spanned!(self.question_token.span=> Debugged); + let mut new_paren_expr = TokenStream::new(); + self.expr + .paren_token + .surround(&mut new_paren_expr, |tokens| { + quote_spanned!(self.expr.paren_token.span=> &).to_tokens(tokens); + self.expr.to_tokens(tokens); + }); + + quote!(::hypertext::#wrapper #new_paren_expr) + } +} + +impl Parse for DebugExpr { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + question_token: input.parse()?, + expr: input.parse()?, + }) + } +} + +impl Generate for DebugExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.expr.paren_token, self.wrapped_expr()); + } +} + +pub struct Group(Many); + +impl Parse for Group { + fn parse(input: ParseStream) -> syn::Result { + let content; + braced!(content in input); + + Ok(Self(content.parse()?)) + } +} + +impl Generate for Group { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push(&self.0); + } +} + +pub struct Many(Vec); + +impl Many { + fn block(&self, g: &mut Generator, brace_token: Brace) -> AnyBlock { + g.block_with(brace_token, |g| { + g.push_all(&self.0); + }) + } +} + +impl Parse for Many { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self({ + let mut children = Vec::new(); + + while !input.is_empty() { + children.push(input.parse()?); + } + + children + })) + } +} + +impl Generate for Many { + type Context = C; + + fn generate(&self, g: &mut Generator) { + if self.0.iter().any(Context::is_control) { + g.push_in_block(Brace::default(), |g| g.push_all(&self.0)); + } else { + g.push_all(&self.0); + } + } +} + +pub struct Element { + name: UnquotedName, + attrs: Vec, + body: ElementBody, +} + +impl Generate for Element { + type Context = Node; + + fn generate(&self, g: &mut Generator) { + let mut el_checks = ElementCheck::new(&self.name, self.body.kind()); + + g.push_str("<"); + g.push_lits(self.name.lits()); + + for attr in &self.attrs { + g.push(attr); + if let Some(check) = attr.name.check() { + el_checks.push_attribute(check); + } + } + + g.push_str(">"); + + match &self.body { + ElementBody::Normal { + children, + closing_name, + } => { + let name = closing_name.as_ref().map_or(&self.name, |closing_name| { + el_checks.set_closing_spans(closing_name.spans()); + closing_name + }); + + g.push(children); + g.push_str(""); + } + ElementBody::Void => {} + } + + g.record_element(el_checks); + } +} + +pub enum ElementBody { + Normal { + children: Many>, + closing_name: Option, + }, + Void, +} + +impl ElementBody { + const fn kind(&self) -> ElementKind { + match self { + Self::Normal { .. } => ElementKind::Normal, + Self::Void => ElementKind::Void, + } + } +} + +pub struct Attribute { + name: AttributeName, + kind: AttributeKind, +} + +impl Attribute { + fn parse_id(input: ParseStream) -> syn::Result { + let pound_token = input.parse::()?; + Ok(Self { + name: parse_quote_spanned!(pound_token.span()=> id), + kind: AttributeKind::Value { + value: input.call(AttributeValue::parse_unquoted)?, + toggle: None, + }, + }) + } + + fn parse_class_list(input: ParseStream) -> syn::Result { + let dot_token = input.fork().parse::()?; + let mut classes = Vec::new(); + + while input.peek(Token![.]) { + classes.push(input.parse()?); + } + + Ok(Self { + name: parse_quote_spanned!(dot_token.span()=> class), + kind: AttributeKind::ClassList(classes), + }) + } +} + +impl Parse for Attribute { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + name: input.parse()?, + kind: if input.peek(Token![=]) { + input.parse::()?; + + if let Some(toggle) = input.call(Toggle::parse_optional)? { + AttributeKind::Option(toggle) + } else { + AttributeKind::Value { + value: input.parse()?, + toggle: input.call(Toggle::parse_optional)?, + } + } + } else { + AttributeKind::Empty(input.call(Toggle::parse_optional)?) + }, + }) + } +} + +impl Generate for Attribute { + type Context = AttributeValue; + + fn generate(&self, g: &mut Generator) { + match &self.kind { + AttributeKind::Value { value, toggle, .. } => { + if let Some(toggle) = toggle { + g.push_conditional(toggle.parenthesized(), |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push(value); + g.push_str("\""); + }); + } else { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push(value); + g.push_str("\""); + } + } + AttributeKind::Option(option) => { + let option_expr = &option.expr; + + let value = Ident::new("value", Span::mixed_site()); + + g.push_conditional( + quote!(let ::core::option::Option::Some(#value) = (#option_expr)), + |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push_expr::(Paren::default(), &value); + g.push_str("\""); + }, + ); + } + AttributeKind::Empty(Some(toggle)) => { + g.push_conditional(toggle.parenthesized(), |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + }); + } + AttributeKind::Empty(None) => { + g.push_str(" "); + g.push_lits(self.name.lits()); + } + AttributeKind::ClassList(classes) => { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + + for (i, class) in classes.iter().enumerate() { + class.generate(g, i); + } + + g.push_str("\""); + } + } + } +} + +pub enum AttributeName { + Data { + data_token: kw::data, + hyphen: Token![-], + rest: UnquotedName, + }, + Namespace { + namespace: UnquotedName, + colon: Token![:], + rest: UnquotedName, + }, + Symbol { + symbol: AttributeSymbol, + rest: UnquotedName, + }, + Normal(UnquotedName), + Unchecked(LitStr), +} + +impl AttributeName { + fn check(&self) -> Option { + match self { + Self::Data { .. } | Self::Unchecked(_) => None, + Self::Namespace { namespace, .. } => Some(AttributeCheck::new( + AttributeCheckKind::Namespace, + namespace.ident_string(), + namespace.spans(), + )), + Self::Symbol { symbol, .. } => Some(AttributeCheck::new( + AttributeCheckKind::Symbol, + symbol.ident_string(), + vec![symbol.span()], + )), + Self::Normal(name) => Some(AttributeCheck::new( + AttributeCheckKind::Normal, + name.ident_string(), + name.spans(), + )), + } + } + + fn lits(&self) -> Vec { + match self { + Self::Data { + data_token, + hyphen, + rest, + } => { + let mut lits = vec![data_token.lit(), LitStr::new("-", hyphen.span)]; + + lits.append(&mut rest.lits()); + + lits + } + Self::Namespace { + namespace, rest, .. + } => { + let mut lits = namespace.lits(); + lits.push(LitStr::new(":", Span::mixed_site())); + lits.append(&mut rest.lits()); + lits + } + Self::Symbol { symbol, rest } => { + let mut lits = vec![symbol.lit()]; + lits.append(&mut rest.lits()); + lits + } + Self::Normal(unquoted_name) => unquoted_name.lits(), + Self::Unchecked(lit) => vec![lit.clone()], + } + } +} + +impl Parse for AttributeName { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(kw::data) && input.peek2(Token![-]) { + Ok(Self::Data { + data_token: input.parse()?, + hyphen: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else if lookahead.peek(Ident::peek_any) || lookahead.peek(LitInt) { + let name = input.parse()?; + if input.peek(Token![:]) { + Ok(Self::Namespace { + namespace: name, + colon: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else { + Ok(Self::Normal(name)) + } + } else if lookahead.peek(Token![@]) || lookahead.peek(Token![:]) { + Ok(Self::Symbol { + symbol: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else if lookahead.peek(LitStr) { + let s = input.parse::()?; + let value = s.value(); + + for c in value.chars() { + if c.is_whitespace() { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain whitespace", + )); + } else if c.is_control() { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain control characters", + )); + } else if c == '>' || c == '/' || c == '=' { + return Err(Error::new_spanned( + &s, + format!("Attribute names cannot contain '{c}' characters"), + )); + } else if c == '"' || c == '\'' { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain quotes", + )); + } + } + + Ok(Self::Unchecked(s)) + } else { + Err(lookahead.error()) + } + } +} + +pub enum AttributeSymbol { + At(Token![@]), + Colon(Token![:]), +} + +impl AttributeSymbol { + fn lit(&self) -> LitStr { + match self { + Self::At(token) => LitStr::new("@", token.span()), + Self::Colon(token) => LitStr::new(":", token.span()), + } + } + + fn ident_string(&self) -> String { + match self { + Self::At(_) => "_at".to_string(), + Self::Colon(_) => "_colon".to_string(), + } + } + + fn span(&self) -> Span { + match self { + Self::At(token) => token.span(), + Self::Colon(token) => token.span(), + } + } +} + +impl Parse for AttributeSymbol { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![@]) { + input.parse().map(Self::At) + } else if lookahead.peek(Token![:]) { + input.parse().map(Self::Colon) + } else { + Err(lookahead.error()) + } + } +} + +pub enum AttributeKind { + Value { + value: AttributeValue, + toggle: Option, + }, + Empty(Option), + Option(Toggle), + ClassList(Vec), +} + +pub enum AttributeValue { + Literal(Literal), + Group(Group), + Control(Control), + Expr(ParenExpr), + DisplayExpr(DisplayExpr), + DebugExpr(DebugExpr), + Ident(Ident), +} + +impl AttributeValue { + fn parse_unquoted(input: ParseStream) -> syn::Result { + if input.peek(Ident::peek_any) || input.peek(LitInt) { + Ok(Self::Group(Group(Many( + input + .call(UnquotedName::parse_attr_value)? + .lits() + .into_iter() + .map(|lit| Self::Literal(Literal::Str(lit))) + .collect(), + )))) + } else { + input.parse() + } + } +} + +impl Context for AttributeValue { + fn is_control(&self) -> bool { + matches!(self, Self::Control(_)) + } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::AttributeValue) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_double_quoted_attribute(s) + } +} + +impl Parse for AttributeValue { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(LitStr) + || lookahead.peek(LitInt) + || lookahead.peek(LitBool) + || lookahead.peek(LitFloat) + || lookahead.peek(LitChar) + { + input.parse().map(Self::Literal) + } else if lookahead.peek(Brace) { + input.parse().map(Self::Group) + } else if lookahead.peek(Token![@]) { + input.parse().map(Self::Control) + } else if lookahead.peek(Paren) { + input.parse().map(Self::Expr) + } else if lookahead.peek(Token![%]) { + input.parse().map(Self::DisplayExpr) + } else if lookahead.peek(Token![?]) { + input.parse().map(Self::DebugExpr) + } else if lookahead.peek(Ident) { + input.parse().map(Self::Ident) + } else { + Err(lookahead.error()) + } + } +} + +impl Generate for AttributeValue { + type Context = Self; + + fn generate(&self, g: &mut Generator) { + match self { + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), + Self::Group(block) => g.push(block), + Self::Control(control) => g.push(control), + Self::Expr(expr) => g.push(expr), + Self::DisplayExpr(display_expr) => g.push(display_expr), + Self::DebugExpr(debug_expr) => g.push(debug_expr), + Self::Ident(ident) => g.push_expr::(Paren::default(), ident), + } + } +} + +pub enum Class { + Value { + value: AttributeValue, + toggle: Option, + }, + Option(Toggle), +} + +impl Class { + fn generate(&self, g: &mut Generator, index: usize) { + match self { + Self::Value { value, toggle } => { + if let Some(toggle) = toggle { + g.push_conditional(toggle.parenthesized(), |g| { + if index > 0 { + g.push_str(" "); + } + g.push(value); + }); + } else { + if index > 0 { + g.push_str(" "); + } + g.push(value); + } + } + Self::Option(option) => { + let option_expr = &option.expr; + let value = Ident::new("value", Span::mixed_site()); + + g.push_conditional( + quote!(let ::core::option::Option::Some(#value) = (#option_expr)), + |g| { + if index > 0 { + g.push_str(" "); + } + g.push_expr::(Paren::default(), &value); + }, + ); + } + } + } +} + +impl Parse for Class { + fn parse(input: ParseStream) -> syn::Result { + input.parse::()?; + + if input.peek(Bracket) { + Ok(Self::Option(input.parse()?)) + } else { + Ok(Self::Value { + value: input.call(AttributeValue::parse_unquoted)?, + toggle: input.call(Toggle::parse_optional)?, + }) + } + } +} + +pub struct Toggle { + bracket_token: Bracket, + expr: TokenStream, +} + +impl Toggle { + fn parenthesized(&self) -> TokenStream { + let paren_token = Paren { + span: self.bracket_token.span, + }; + + let mut tokens = TokenStream::new(); + + paren_token.surround(&mut tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + + quote! { + { + #[allow(unused_parens)] + #tokens + } + } + } + + fn parse_optional(input: ParseStream) -> syn::Result> { + if input.peek(Bracket) { + input.parse().map(Some) + } else { + Ok(None) + } + } +} + +impl Parse for Toggle { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + bracket_token: bracketed!(content in input), + expr: content.parse()?, + }) + } +} diff --git a/crates/hypertext-syntax/src/repr/attributes.rs b/crates/hypertext-syntax/src/repr/attributes.rs new file mode 100644 index 0000000..d461eb2 --- /dev/null +++ b/crates/hypertext-syntax/src/repr/attributes.rs @@ -0,0 +1,508 @@ +use std::borrow::Cow; + +use proc_macro2::{Span, TokenStream}; +use quote::{ToTokens, quote}; +use syn::{ + Error, Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, bracketed, + ext::IdentExt, + parse::{Parse, ParseStream}, + parse_quote_spanned, + spanned::Spanned, + token::{Brace, Bracket, Paren}, +}; + +use super::{Control, DebugExpr, DisplayExpr, Group, Literal, Many, ParenExpr, UnquotedName, kw}; +use crate::{AttributeCheck, AttributeCheckKind, Context, Generate, Generator}; + +pub struct Attribute { + pub name: AttributeName, + pub kind: AttributeKind, +} + +impl Attribute { + pub fn parse_id(input: ParseStream) -> syn::Result { + let pound_token = input.parse::()?; + Ok(Self { + name: parse_quote_spanned!(pound_token.span()=> id), + kind: AttributeKind::Value { + value: input.call(AttributeValue::parse_unquoted)?, + toggle: None, + }, + }) + } + + pub fn parse_class_list(input: ParseStream) -> syn::Result { + let dot_token = input.fork().parse::()?; + let mut classes = Vec::new(); + + while input.peek(Token![.]) { + classes.push(input.parse()?); + } + + Ok(Self { + name: parse_quote_spanned!(dot_token.span()=> class), + kind: AttributeKind::ClassList(classes), + }) + } +} + +impl Parse for Attribute { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + name: input.parse()?, + kind: if input.peek(Token![=]) { + input.parse::()?; + + if let Some(toggle) = input.call(Toggle::parse_optional)? { + AttributeKind::Option(toggle) + } else { + AttributeKind::Value { + value: input.parse()?, + toggle: input.call(Toggle::parse_optional)?, + } + } + } else { + AttributeKind::Empty(input.call(Toggle::parse_optional)?) + }, + }) + } +} + +impl Generate for Attribute { + type Context = AttributeValue; + + fn generate(&self, g: &mut Generator) { + match &self.kind { + AttributeKind::Value { value, toggle, .. } => { + if let Some(toggle) = toggle { + g.push_conditional(toggle.parenthesized(), |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push(value); + g.push_str("\""); + }); + } else { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push(value); + g.push_str("\""); + } + } + AttributeKind::Option(option) => { + let option_expr = &option.expr; + + let value = Ident::new("value", Span::mixed_site()); + + g.push_conditional( + quote!(let ::core::option::Option::Some(#value) = (#option_expr)), + |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + g.push_expr::(Paren::default(), &value); + g.push_str("\""); + }, + ); + } + AttributeKind::Empty(Some(toggle)) => { + g.push_conditional(toggle.parenthesized(), |g| { + g.push_str(" "); + g.push_lits(self.name.lits()); + }); + } + AttributeKind::Empty(None) => { + g.push_str(" "); + g.push_lits(self.name.lits()); + } + AttributeKind::ClassList(classes) => { + g.push_str(" "); + g.push_lits(self.name.lits()); + g.push_str("=\""); + + for (i, class) in classes.iter().enumerate() { + class.generate(g, i); + } + + g.push_str("\""); + } + } + } +} + +pub enum AttributeName { + Data { + data_token: kw::data, + hyphen: Token![-], + rest: UnquotedName, + }, + Namespace { + namespace: UnquotedName, + colon: Token![:], + rest: UnquotedName, + }, + Symbol { + symbol: AttributeSymbol, + rest: UnquotedName, + }, + Normal(UnquotedName), + Unchecked(LitStr), +} + +impl AttributeName { + pub fn check(&self) -> Option { + match self { + Self::Data { .. } | Self::Unchecked(_) => None, + Self::Namespace { namespace, .. } => Some(AttributeCheck::new( + AttributeCheckKind::Namespace, + namespace.ident_string(), + namespace.spans(), + )), + Self::Symbol { symbol, .. } => Some(AttributeCheck::new( + AttributeCheckKind::Symbol, + symbol.ident_string(), + vec![symbol.span()], + )), + Self::Normal(name) => Some(AttributeCheck::new( + AttributeCheckKind::Normal, + name.ident_string(), + name.spans(), + )), + } + } + + pub fn lits(&self) -> Vec { + match self { + Self::Data { + data_token, + hyphen, + rest, + } => { + let mut lits = vec![data_token.lit(), LitStr::new("-", hyphen.span)]; + + lits.append(&mut rest.lits()); + + lits + } + Self::Namespace { + namespace, rest, .. + } => { + let mut lits = namespace.lits(); + lits.push(LitStr::new(":", Span::mixed_site())); + lits.append(&mut rest.lits()); + lits + } + Self::Symbol { symbol, rest } => { + let mut lits = vec![symbol.lit()]; + lits.append(&mut rest.lits()); + lits + } + Self::Normal(unquoted_name) => unquoted_name.lits(), + Self::Unchecked(lit) => vec![lit.clone()], + } + } +} + +impl Parse for AttributeName { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(kw::data) && input.peek2(Token![-]) { + Ok(Self::Data { + data_token: input.parse()?, + hyphen: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else if lookahead.peek(Ident::peek_any) || lookahead.peek(LitInt) { + let name = input.parse()?; + if input.peek(Token![:]) { + Ok(Self::Namespace { + namespace: name, + colon: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else { + Ok(Self::Normal(name)) + } + } else if lookahead.peek(Token![@]) || lookahead.peek(Token![:]) { + Ok(Self::Symbol { + symbol: input.parse()?, + rest: input.call(UnquotedName::parse_any)?, + }) + } else if lookahead.peek(LitStr) { + let s = input.parse::()?; + let value = s.value(); + + for c in value.chars() { + if c.is_whitespace() { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain whitespace", + )); + } else if c.is_control() { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain control characters", + )); + } else if c == '>' || c == '/' || c == '=' { + return Err(Error::new_spanned( + &s, + format!("Attribute names cannot contain '{c}' characters"), + )); + } else if c == '"' || c == '\'' { + return Err(Error::new_spanned( + &s, + "Attribute names cannot contain quotes", + )); + } + } + + Ok(Self::Unchecked(s)) + } else { + Err(lookahead.error()) + } + } +} + +pub enum AttributeSymbol { + At(Token![@]), + Colon(Token![:]), +} + +impl AttributeSymbol { + pub fn lit(&self) -> LitStr { + match self { + Self::At(token) => LitStr::new("@", token.span()), + Self::Colon(token) => LitStr::new(":", token.span()), + } + } + + pub fn ident_string(&self) -> String { + match self { + Self::At(_) => "_at".to_string(), + Self::Colon(_) => "_colon".to_string(), + } + } + + pub fn span(&self) -> Span { + match self { + Self::At(token) => token.span(), + Self::Colon(token) => token.span(), + } + } +} + +impl Parse for AttributeSymbol { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![@]) { + input.parse().map(Self::At) + } else if lookahead.peek(Token![:]) { + input.parse().map(Self::Colon) + } else { + Err(lookahead.error()) + } + } +} + +pub enum AttributeKind { + Value { + value: AttributeValue, + toggle: Option, + }, + Empty(Option), + Option(Toggle), + ClassList(Vec), +} + +pub enum AttributeValue { + Literal(Literal), + Group(Group), + Control(Control), + Expr(ParenExpr), + DisplayExpr(DisplayExpr), + DebugExpr(DebugExpr), + Ident(Ident), +} + +impl AttributeValue { + pub fn parse_unquoted(input: ParseStream) -> syn::Result { + if input.peek(Ident::peek_any) || input.peek(LitInt) { + Ok(Self::Group(Group(Many( + input + .call(UnquotedName::parse_attr_value)? + .lits() + .into_iter() + .map(|lit| Self::Literal(Literal::Str(lit))) + .collect(), + )))) + } else { + input.parse() + } + } +} + +impl Context for AttributeValue { + fn is_control(&self) -> bool { + matches!(self, Self::Control(_)) + } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::AttributeValue) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_double_quoted_attribute(s) + } +} + +impl Parse for AttributeValue { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(LitStr) + || lookahead.peek(LitInt) + || lookahead.peek(LitBool) + || lookahead.peek(LitFloat) + || lookahead.peek(LitChar) + { + input.parse().map(Self::Literal) + } else if lookahead.peek(Brace) { + input.parse().map(Self::Group) + } else if lookahead.peek(Token![@]) { + input.parse().map(Self::Control) + } else if lookahead.peek(Paren) { + input.parse().map(Self::Expr) + } else if lookahead.peek(Token![%]) { + input.parse().map(Self::DisplayExpr) + } else if lookahead.peek(Token![?]) { + input.parse().map(Self::DebugExpr) + } else if lookahead.peek(Ident) { + input.parse().map(Self::Ident) + } else { + Err(lookahead.error()) + } + } +} + +impl Generate for AttributeValue { + type Context = Self; + + fn generate(&self, g: &mut Generator) { + match self { + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), + Self::Group(block) => g.push(block), + Self::Control(control) => g.push(control), + Self::Expr(expr) => g.push(expr), + Self::DisplayExpr(display_expr) => g.push(display_expr), + Self::DebugExpr(debug_expr) => g.push(debug_expr), + Self::Ident(ident) => g.push_expr::(Paren::default(), ident), + } + } +} + +pub enum Class { + Value { + value: AttributeValue, + toggle: Option, + }, + Option(Toggle), +} + +impl Class { + fn generate(&self, g: &mut Generator, index: usize) { + match self { + Self::Value { value, toggle } => { + if let Some(toggle) = toggle { + g.push_conditional(toggle.parenthesized(), |g| { + if index > 0 { + g.push_str(" "); + } + g.push(value); + }); + } else { + if index > 0 { + g.push_str(" "); + } + g.push(value); + } + } + Self::Option(option) => { + let option_expr = &option.expr; + let value = Ident::new("value", Span::mixed_site()); + + g.push_conditional( + quote!(let ::core::option::Option::Some(#value) = (#option_expr)), + |g| { + if index > 0 { + g.push_str(" "); + } + g.push_expr::(Paren::default(), &value); + }, + ); + } + } + } +} + +impl Parse for Class { + fn parse(input: ParseStream) -> syn::Result { + input.parse::()?; + + if input.peek(Bracket) { + Ok(Self::Option(input.parse()?)) + } else { + Ok(Self::Value { + value: input.call(AttributeValue::parse_unquoted)?, + toggle: input.call(Toggle::parse_optional)?, + }) + } + } +} + +pub struct Toggle { + pub bracket_token: Bracket, + pub expr: TokenStream, +} + +impl Toggle { + pub fn parenthesized(&self) -> TokenStream { + let paren_token = Paren { + span: self.bracket_token.span, + }; + + let mut tokens = TokenStream::new(); + + paren_token.surround(&mut tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + + quote! { + { + #[allow(unused_parens)] + #tokens + } + } + } + + pub fn parse_optional(input: ParseStream) -> syn::Result> { + if input.peek(Bracket) { + input.parse().map(Some) + } else { + Ok(None) + } + } +} + +impl Parse for Toggle { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + bracket_token: bracketed!(content in input), + expr: content.parse()?, + }) + } +} diff --git a/crates/hypertext-syntax/src/repr/basics.rs b/crates/hypertext-syntax/src/repr/basics.rs new file mode 100644 index 0000000..5b57124 --- /dev/null +++ b/crates/hypertext-syntax/src/repr/basics.rs @@ -0,0 +1,325 @@ +use std::fmt::{self, Display, Formatter, Write}; + +use proc_macro2::{Span, TokenStream}; +use quote::ToTokens; +use syn::{ + Error, Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, + ext::IdentExt, + parse::{Parse, ParseStream}, + spanned::Spanned, +}; + +#[derive(PartialEq, Eq)] +pub struct UnquotedName(pub Vec); + +impl UnquotedName { + pub fn ident_string(&self) -> String { + let mut s = String::new(); + + for fragment in &self.0 { + match fragment { + NameFragment::Ident(ident) => { + _ = write!(s, "{ident}"); + } + NameFragment::Int(num) => { + _ = write!(s, "{num}"); + } + NameFragment::Hyphen(_) => { + s.push('_'); + } + NameFragment::Colon(_) | NameFragment::Dot(_) => { + unreachable!( + "unquoted name idents should only contain identifiers, int literals, and hyphens" + ); + } + } + } + + if s == "super" + || s == "self" + || s == "Self" + || s == "extern" + || s == "crate" + || s == "_" + || s.chars().next().is_some_and(|c| c.is_ascii_digit()) + { + s.insert(0, '_'); + } + + s + } + + pub fn is_component(&self) -> bool { + matches!( + self.0.as_slice(), + [NameFragment::Ident(ident)] + if ident.to_string().chars().next().is_some_and(|c| c.is_ascii_uppercase()) + ) + } + + pub fn spans(&self) -> Vec { + let mut spans = Vec::new(); + + for fragment in &self.0 { + spans.push(fragment.span()); + } + + spans + } + + pub fn lits(&self) -> Vec { + let mut strs = Vec::new(); + + for fragment in &self.0 { + strs.push(LitStr::new(&fragment.to_string(), fragment.span())); + } + + strs + } + + pub fn parse_any(input: ParseStream) -> syn::Result { + let mut name = Vec::new(); + + while input.peek(Token![-]) + || input.peek(Token![:]) + || input.peek(Token![.]) + || (name.last().is_none_or(NameFragment::is_punct) + && (input.peek(Ident::peek_any) || input.peek(LitInt))) + { + name.push(input.parse()?); + } + + Ok(Self(name)) + } + + pub fn parse_attr_value(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + let mut name = Vec::new(); + + if lookahead.peek(Ident::peek_any) || lookahead.peek(LitInt) { + name.push(input.parse()?); + + while input.peek(Token![-]) + || input.peek(Token![:]) + || (name.last().is_none_or(NameFragment::is_punct) + && (input.peek(Ident::peek_any) || input.peek(LitInt))) + { + name.push(input.parse()?); + } + + Ok(Self(name)) + } else { + Err(lookahead.error()) + } + } +} + +impl Parse for UnquotedName { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + let mut name = Vec::new(); + + if lookahead.peek(Ident::peek_any) || lookahead.peek(LitInt) { + name.push(input.parse()?); + + while input.peek(Token![-]) + || (name.last().is_none_or(NameFragment::is_punct) + && (input.peek(Ident::peek_any) || input.peek(LitInt))) + { + name.push(input.parse()?); + } + + Ok(Self(name)) + } else { + Err(lookahead.error()) + } + } +} + +#[derive(PartialEq, Eq)] +pub enum NameFragment { + Ident(Ident), + Int(LitInt), + Hyphen(Token![-]), + Colon(Token![:]), + Dot(Token![.]), +} + +impl NameFragment { + fn span(&self) -> Span { + match self { + Self::Ident(ident) => ident.span(), + Self::Int(int) => int.span(), + Self::Hyphen(hyphen) => hyphen.span(), + Self::Colon(colon) => colon.span(), + Self::Dot(dot) => dot.span(), + } + } + + const fn is_punct(&self) -> bool { + matches!(self, Self::Hyphen(_) | Self::Colon(_) | Self::Dot(_)) + } +} + +impl Parse for NameFragment { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![-]) { + input.parse().map(Self::Hyphen) + } else if lookahead.peek(Token![:]) { + input.parse().map(Self::Colon) + } else if lookahead.peek(Token![.]) { + input.parse().map(Self::Dot) + } else if lookahead.peek(Ident::peek_any) { + input.call(Ident::parse_any).map(Self::Ident) + } else if lookahead.peek(LitInt) { + input.parse().map(Self::Int) + } else { + Err(lookahead.error()) + } + } +} + +impl Display for NameFragment { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + Self::Ident(ident) => write!(f, "{ident}"), + Self::Int(num) => write!(f, "{num}"), + Self::Hyphen(_) => f.write_str("-"), + Self::Colon(_) => f.write_str(":"), + Self::Dot(_) => f.write_str("."), + } + } +} + +pub enum Literal { + Str(LitStr), + Int(LitInt), + Bool(LitBool), + Float(LitFloat), + Char(LitChar), +} + +impl Literal { + pub fn lit_str(&self) -> LitStr { + match self { + Self::Str(lit) => lit.clone(), + Self::Int(lit) => LitStr::new(&lit.to_string(), lit.span()), + Self::Bool(lit) => LitStr::new(&lit.value.to_string(), lit.span()), + Self::Float(lit) => LitStr::new(&lit.to_string(), lit.span()), + Self::Char(lit) => LitStr::new(&lit.value().to_string(), lit.span()), + } + } +} + +impl Parse for Literal { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(LitStr) { + let lit = input.parse::()?; + if !lit.suffix().is_empty() { + let suffix = lit.suffix(); + let next_quote = if input.peek(LitStr) { r#"\""# } else { "" }; + return Err(Error::new_spanned( + &lit, + format!( + r#"string suffixes are not allowed in literals (you probably meant `"...\"{suffix}{next_quote}..."` or `"..." {suffix}`)"#, + ), + )); + } + let value = unindent(&lit.value()); + Ok(Self::Str(LitStr::new(&value, lit.span()))) + } else if lookahead.peek(LitInt) { + let lit = input.parse::()?; + if !lit.suffix().is_empty() { + return Err(Error::new_spanned( + &lit, + "integer literals cannot have suffixes", + )); + } + Ok(Self::Int(lit)) + } else if lookahead.peek(LitBool) { + input.parse().map(Self::Bool) + } else if lookahead.peek(LitFloat) { + let lit = input.parse::()?; + if !lit.suffix().is_empty() { + return Err(Error::new_spanned( + &lit, + "float literals cannot have suffixes", + )); + } + Ok(Self::Float(lit)) + } else if lookahead.peek(LitChar) { + let lit = input.parse::()?; + if !lit.suffix().is_empty() { + return Err(Error::new_spanned( + &lit, + "character literals cannot have suffixes", + )); + } + Ok(Self::Char(lit)) + } else { + Err(lookahead.error()) + } + } +} + +impl ToTokens for Literal { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Str(lit) => lit.to_tokens(tokens), + Self::Int(lit) => lit.to_tokens(tokens), + Self::Bool(lit) => lit.to_tokens(tokens), + Self::Float(lit) => lit.to_tokens(tokens), + Self::Char(lit) => lit.to_tokens(tokens), + } + } +} + +// from dtolnay/unindent +fn unindent(s: &str) -> String { + const fn is_indent(c: char) -> bool { + c == ' ' || c == '\t' + } + + let mut lines = s.lines().collect::>(); + + // lines() does not include the last line if it ends with a newline + if s.ends_with('\n') { + lines.push(""); + } + + let last_line = lines.len().saturating_sub(1); + + let spaces = lines + .iter() + .skip(1) // skip same line as opening quote + .filter_map(|line| line.chars().position(|ch| !is_indent(ch))) + .min() + .unwrap_or_default(); + + let mut result = String::with_capacity(s.len()); + for (i, line) in lines.iter().enumerate() { + if (i == 1 && !lines[0].is_empty()) + || (1 < i && i < last_line) + || (i == last_line + && last_line != 0 + && (!line.chars().all(is_indent) || line.is_empty())) + { + result.push('\n'); + } + if i == 0 { + // Do not un-indent anything on same line as opening quote + result.push_str(line); + } else if line.len() > spaces { + // Whitespace-only lines may have fewer than the number of spaces + // being removed + result.push_str(&line[spaces..]); + } + } + result +} diff --git a/crates/hypertext-syntax/src/repr/component.rs b/crates/hypertext-syntax/src/repr/component.rs new file mode 100644 index 0000000..18fe8c7 --- /dev/null +++ b/crates/hypertext-syntax/src/repr/component.rs @@ -0,0 +1,133 @@ +use proc_macro2::TokenStream; +use quote::{ToTokens, quote, quote_spanned}; +use syn::{ + Ident, Lit, Token, + parse::{Parse, ParseStream}, + spanned::Spanned, + token::{Brace, Paren}, +}; + +use super::{AttributeValue, ElementBody, Generate, Generator, Node, ParenExpr, Syntax}; + +pub struct Component { + pub name: Ident, + pub attrs: Vec, + pub dotdot: Option, + pub body: ElementBody, +} + +impl Generate for Component { + type Context = Node; + + fn generate(&self, g: &mut Generator) { + let fields = self.attrs.iter().map(|attr| { + let name = &attr.name; + attr.value_expr() + .map_or_else(|| quote!(#name,), |value| quote!(#name: #value,)) + }); + + let children = match &self.body { + ElementBody::Normal { children, .. } => { + let buffer_ident = Generator::buffer_ident(); + + let block = g.block_with(Brace::default(), |g| { + g.push(children); + }); + + let lazy = quote! { + ::hypertext::Lazy::dangerously_create( + |#buffer_ident: &mut ::hypertext::Buffer| + #block + ) + }; + + let children_ident = Ident::new("children", self.name.span()); + + quote!( + #children_ident: #lazy, + ) + } + ElementBody::Void => quote!(), + }; + + let name = &self.name; + + let default = self + .dotdot + .as_ref() + .map(|dotdot| quote_spanned!(dotdot.span()=> ..::core::default::Default::default())) + .unwrap_or_default(); + + let init = quote! { + #name { + #(#fields)* + #children + #default + } + }; + + g.push_expr::(Paren::default(), &init); + } +} + +pub struct ComponentAttribute { + pub name: Ident, + pub value: Option, +} + +impl ComponentAttribute { + fn value_expr(&self) -> Option { + self.value.as_ref().map(|value| match value { + ComponentAttributeValue::Literal(lit) => lit.to_token_stream(), + ComponentAttributeValue::Ident(ident) => ident.to_token_stream(), + ComponentAttributeValue::Expr(expr) => { + let mut tokens = TokenStream::new(); + + expr.paren_token.surround(&mut tokens, |tokens| { + expr.expr.to_tokens(tokens); + }); + + tokens + } + }) + } +} + +impl Parse for ComponentAttribute { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + name: input.parse()?, + value: { + if input.peek(Token![=]) { + input.parse::()?; + + Some(input.parse()?) + } else { + None + } + }, + }) + } +} + +pub enum ComponentAttributeValue { + Literal(Lit), + Ident(Ident), + Expr(ParenExpr), +} + +impl Parse for ComponentAttributeValue { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Lit) { + input.parse().map(Self::Literal) + } else if lookahead.peek(Ident) { + input.parse().map(Self::Ident) + } else if lookahead.peek(Paren) { + input.parse().map(Self::Expr) + } else { + Err(lookahead.error()) + } + } +} diff --git a/crates/hypertext-syntax/src/repr/control.rs b/crates/hypertext-syntax/src/repr/control.rs new file mode 100644 index 0000000..5c57622 --- /dev/null +++ b/crates/hypertext-syntax/src/repr/control.rs @@ -0,0 +1,379 @@ +use std::convert::Infallible; + +use proc_macro2::TokenStream; +use quote::{ToTokens, quote}; +use syn::{ + Expr, Pat, PatType, Token, braced, + parse::{Parse, ParseStream}, + token::Brace, +}; + +use super::Many; +use crate::{AnyBlock, Context, Generate, Generator}; + +pub enum Control { + Let(Let), + If(If), + For(For), + While(While), + Match(Match), +} + +impl Parse for Control { + fn parse(input: ParseStream) -> syn::Result { + input.parse::()?; + + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![let]) { + input.parse().map(Self::Let) + } else if lookahead.peek(Token![if]) { + input.parse().map(Self::If) + } else if lookahead.peek(Token![for]) { + input.parse().map(Self::For) + } else if lookahead.peek(Token![while]) { + input.parse().map(Self::While) + } else if lookahead.peek(Token![match]) { + input.parse().map(Self::Match) + } else { + Err(lookahead.error()) + } + } +} + +impl Generate for Control { + type Context = C; + + fn generate(&self, g: &mut Generator) { + match self { + Self::Let(let_) => g.push(let_), + Self::If(if_) => g.push(if_), + Self::For(for_) => g.push(for_), + Self::While(while_) => g.push(while_), + Self::Match(match_) => g.push(match_), + } + } +} + +pub struct Let { + pub let_token: Token![let], + pub pat: Pat, + pub init: Option<(Token![=], Expr)>, + pub semi_token: Token![;], +} + +impl Parse for Let { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + let_token: input.parse()?, + pat: { + let pat = input.call(Pat::parse_single)?; + if input.peek(Token![:]) { + Pat::Type(PatType { + attrs: Vec::new(), + pat: Box::new(pat), + colon_token: input.parse()?, + ty: input.parse()?, + }) + } else { + pat + } + }, + init: if input.peek(Token![=]) { + Some((input.parse()?, input.parse()?)) + } else { + None + }, + semi_token: input.parse()?, + }) + } +} + +impl Generate for Let { + type Context = Infallible; + + fn generate(&self, g: &mut Generator) { + let let_token = self.let_token; + let pat = &self.pat; + let (eq_token, expr) = self + .init + .as_ref() + .map(|(eq_token, expr)| (eq_token, expr)) + .unzip(); + let semi_token = self.semi_token; + + g.push_stmt(quote! { + #let_token #pat #eq_token #expr #semi_token + }); + } +} + +pub struct ControlBlock { + pub brace_token: Brace, + pub children: Many, +} + +impl ControlBlock { + fn block(&self, g: &mut Generator) -> AnyBlock { + self.children.block(g, self.brace_token) + } +} + +impl Parse for ControlBlock { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + brace_token: braced!(content in input), + children: content.parse()?, + }) + } +} + +pub struct If { + pub if_token: Token![if], + pub cond: Expr, + pub then_block: ControlBlock, + pub else_branch: Option<(Token![else], Box>)>, +} + +impl Parse for If { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + if_token: input.parse()?, + cond: input.call(Expr::parse_without_eager_brace)?, + then_block: input.parse()?, + else_branch: if input.peek(Token![@]) && input.peek2(Token![else]) { + input.parse::()?; + + Some((input.parse()?, input.parse()?)) + } else { + None + }, + }) + } +} + +impl Generate for If { + type Context = C; + + fn generate(&self, g: &mut Generator) { + fn to_expr(if_: &If, g: &mut Generator) -> TokenStream { + let if_token = if_.if_token; + let cond = &if_.cond; + let then_block = if_.then_block.block(g); + let else_branch = if_.else_branch.as_ref().map(|(else_token, if_or_block)| { + let else_block = match &**if_or_block { + ControlIfOrBlock::If(if_) => to_expr(if_, g), + ControlIfOrBlock::Block(block) => block.block(g).to_token_stream(), + }; + + quote! { + #else_token #else_block + } + }); + + quote! { + #if_token #cond + #then_block + #else_branch + } + } + + let expr = to_expr(self, g); + + g.push_stmt(expr); + } +} + +pub enum ControlIfOrBlock { + If(If), + Block(ControlBlock), +} + +impl Parse for ControlIfOrBlock { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![if]) { + input.parse().map(Self::If) + } else if lookahead.peek(Brace) { + input.parse().map(Self::Block) + } else { + Err(lookahead.error()) + } + } +} + +pub struct For { + pub for_token: Token![for], + pub pat: Pat, + pub in_token: Token![in], + pub expr: Expr, + pub block: ControlBlock, +} + +impl Parse for For { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + for_token: input.parse()?, + pat: input.call(Pat::parse_multi_with_leading_vert)?, + in_token: input.parse()?, + expr: input.call(Expr::parse_without_eager_brace)?, + block: input.parse()?, + }) + } +} + +impl Generate for For { + type Context = C; + + fn generate(&self, g: &mut Generator) { + let for_token = self.for_token; + let pat = &self.pat; + let in_token = self.in_token; + let expr = &self.expr; + let block = self.block.block(g); + + g.push_stmt(quote! { + #for_token #pat #in_token #expr + #block + }); + } +} + +pub struct While { + pub while_token: Token![while], + pub cond: Expr, + pub block: ControlBlock, +} + +impl Parse for While { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + while_token: input.parse()?, + cond: input.call(Expr::parse_without_eager_brace)?, + block: input.parse()?, + }) + } +} + +impl Generate for While { + type Context = C; + + fn generate(&self, g: &mut Generator) { + let while_token = self.while_token; + let cond = &self.cond; + let block = self.block.block(g); + + g.push_stmt(quote! { + #while_token #cond + #block + }); + } +} + +pub struct Match { + pub match_token: Token![match], + pub expr: Expr, + pub brace_token: Brace, + pub arms: Vec>, +} + +impl Parse for Match { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + match_token: input.parse()?, + expr: input.call(Expr::parse_without_eager_brace)?, + brace_token: braced!(content in input), + arms: { + let mut arms = Vec::new(); + + while !content.is_empty() { + arms.push(content.parse()?); + } + + arms + }, + }) + } +} + +impl Generate for Match { + type Context = C; + + fn generate(&self, g: &mut Generator) { + let arms = self + .arms + .iter() + .map(|arm| { + let pat = arm.pat.clone(); + let guard = arm + .guard + .as_ref() + .map(|(if_token, guard)| quote!(#if_token #guard)); + let fat_arrow_token = arm.fat_arrow_token; + let block = match &arm.body { + MatchNodeArmBody::Block(block) => block.block(g), + MatchNodeArmBody::Child(child) => { + g.block_with(Brace::default(), |g| g.push(child)) + } + }; + let comma = arm.comma_token; + + quote!(#pat #guard #fat_arrow_token #block #comma) + }) + .collect::(); + + let match_token = self.match_token; + let expr = &self.expr; + + let mut stmt = quote!(#match_token #expr); + + self.brace_token + .surround(&mut stmt, |tokens| tokens.extend(arms)); + + g.push_stmt(stmt); + } +} + +pub struct MatchNodeArm { + pub pat: Pat, + pub guard: Option<(Token![if], Expr)>, + pub fat_arrow_token: Token![=>], + pub body: MatchNodeArmBody, + pub comma_token: Option, +} + +impl Parse for MatchNodeArm { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + pat: input.call(Pat::parse_multi_with_leading_vert)?, + guard: if input.peek(Token![if]) { + Some((input.parse()?, input.parse()?)) + } else { + None + }, + fat_arrow_token: input.parse()?, + body: input.parse()?, + comma_token: input.parse()?, + }) + } +} + +pub enum MatchNodeArmBody { + Block(ControlBlock), + Child(C), +} + +impl Parse for MatchNodeArmBody { + fn parse(input: ParseStream) -> syn::Result { + if input.peek(Brace) { + input.parse().map(Self::Block) + } else { + input.parse().map(Self::Child) + } + } +} diff --git a/crates/hypertext-syntax/src/repr/kw.rs b/crates/hypertext-syntax/src/repr/kw.rs new file mode 100644 index 0000000..5154508 --- /dev/null +++ b/crates/hypertext-syntax/src/repr/kw.rs @@ -0,0 +1,25 @@ +use syn::LitStr; + +syn::custom_keyword!(data); + +impl data { + pub fn lit(self) -> LitStr { + LitStr::new("data", self.span) + } +} + +syn::custom_keyword!(DOCTYPE); + +impl DOCTYPE { + pub fn lit(self) -> LitStr { + LitStr::new("DOCTYPE", self.span) + } +} + +syn::custom_keyword!(html); + +impl html { + pub fn lit(self) -> LitStr { + LitStr::new("html", self.span) + } +} diff --git a/crates/hypertext-syntax/src/repr/mod.rs b/crates/hypertext-syntax/src/repr/mod.rs new file mode 100644 index 0000000..59edbeb --- /dev/null +++ b/crates/hypertext-syntax/src/repr/mod.rs @@ -0,0 +1,318 @@ +#![expect(clippy::large_enum_variant)] + +mod attributes; +mod basics; +mod component; +mod control; +pub mod kw; + +use std::{borrow::Cow, marker::PhantomData}; + +use proc_macro2::{Span, TokenStream}; +use quote::{ToTokens, quote, quote_spanned}; +use syn::{ + LitStr, Token, braced, parenthesized, + parse::{Parse, ParseStream}, + token::{Brace, Paren}, +}; + +pub use self::{attributes::*, basics::*, component::*, control::*}; +use crate::{AnyBlock, Context, ElementCheck, ElementKind, Generate, Generator, syntaxes::Syntax}; + +pub type Document = Many>; + +pub enum Node { + Doctype(Doctype), + Element(Element), + Component(Component), + Literal(Literal), + Control(Control), + Expr(ParenExpr), + DisplayExpr(DisplayExpr), + DebugExpr(DebugExpr), + Group(Group), +} + +impl Context for Node { + fn is_control(&self) -> bool { + matches!(self, Self::Control(_)) + } + + fn marker_type() -> TokenStream { + quote!(::hypertext::context::Node) + } + + fn escape(s: &str) -> Cow<'_, str> { + html_escape::encode_text(s) + } +} + +impl Generate for Node { + type Context = Self; + + fn generate(&self, g: &mut Generator) { + match self { + Self::Doctype(doctype) => g.push(doctype), + Self::Element(element) => g.push(element), + Self::Component(component) => g.push(component), + Self::Literal(lit) => g.push_escaped_lit::(&lit.lit_str()), + Self::Control(control) => g.push(control), + Self::Expr(expr) => g.push(expr), + Self::DisplayExpr(display_expr) => g.push(display_expr), + Self::DebugExpr(debug_expr) => g.push(debug_expr), + Self::Group(group) => g.push(group), + } + } +} + +pub struct Doctype { + pub lt_token: Token![<], + pub bang_token: Token![!], + pub doctype_token: kw::DOCTYPE, + pub html_token: kw::html, + pub gt_token: Token![>], + pub phantom: PhantomData, +} + +impl Generate for Doctype { + type Context = Node; + + fn generate(&self, g: &mut Generator) { + g.push_lits(vec![ + LitStr::new("<", self.lt_token.span), + LitStr::new("!", self.bang_token.span), + self.doctype_token.lit(), + LitStr::new(" ", Span::mixed_site()), + self.html_token.lit(), + LitStr::new(">", self.gt_token.span), + ]); + } +} + +pub struct ParenExpr { + pub paren_token: Paren, + pub expr: TokenStream, + pub phantom: PhantomData, +} + +impl Parse for ParenExpr { + fn parse(input: ParseStream) -> syn::Result { + let content; + + Ok(Self { + paren_token: parenthesized!(content in input), + expr: content.parse()?, + phantom: PhantomData, + }) + } +} + +impl Generate for ParenExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.paren_token, &self.expr); + } +} + +impl ToTokens for ParenExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + } +} + +pub struct DisplayExpr { + pub percent_token: Token![%], + pub paren_expr: ParenExpr, +} + +impl DisplayExpr { + fn wrapped_expr(&self) -> TokenStream { + let wrapper = quote_spanned!(self.percent_token.span=> Displayed); + let mut new_paren_expr = TokenStream::new(); + self.paren_expr + .paren_token + .surround(&mut new_paren_expr, |tokens| { + quote_spanned!(self.paren_expr.paren_token.span=> &).to_tokens(tokens); + self.paren_expr.to_tokens(tokens); + }); + + quote!(::hypertext::#wrapper #new_paren_expr) + } +} + +impl Parse for DisplayExpr { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + percent_token: input.parse()?, + paren_expr: input.parse()?, + }) + } +} + +impl Generate for DisplayExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.paren_expr.paren_token, self.wrapped_expr()); + } +} + +pub struct DebugExpr { + pub question_token: Token![?], + pub expr: ParenExpr, +} + +impl DebugExpr { + fn wrapped_expr(&self) -> TokenStream { + let wrapper = quote_spanned!(self.question_token.span=> Debugged); + let mut new_paren_expr = TokenStream::new(); + self.expr + .paren_token + .surround(&mut new_paren_expr, |tokens| { + quote_spanned!(self.expr.paren_token.span=> &).to_tokens(tokens); + self.expr.to_tokens(tokens); + }); + + quote!(::hypertext::#wrapper #new_paren_expr) + } +} + +impl Parse for DebugExpr { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + question_token: input.parse()?, + expr: input.parse()?, + }) + } +} + +impl Generate for DebugExpr { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push_expr::(self.expr.paren_token, self.wrapped_expr()); + } +} + +pub struct Group(pub Many); + +impl Parse for Group { + fn parse(input: ParseStream) -> syn::Result { + let content; + braced!(content in input); + + Ok(Self(content.parse()?)) + } +} + +impl Generate for Group { + type Context = C; + + fn generate(&self, g: &mut Generator) { + g.push(&self.0); + } +} + +pub struct Many(pub Vec); + +impl Many { + fn block(&self, g: &mut Generator, brace_token: Brace) -> AnyBlock { + g.block_with(brace_token, |g| { + g.push_all(&self.0); + }) + } +} + +impl Parse for Many { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self({ + let mut children = Vec::new(); + + while !input.is_empty() { + children.push(input.parse()?); + } + + children + })) + } +} + +impl Generate for Many { + type Context = C; + + fn generate(&self, g: &mut Generator) { + if self.0.iter().any(Context::is_control) { + g.push_in_block(Brace::default(), |g| g.push_all(&self.0)); + } else { + g.push_all(&self.0); + } + } +} + +pub struct Element { + pub name: UnquotedName, + pub attrs: Vec, + pub body: ElementBody, +} + +impl Element {} + +impl Generate for Element { + type Context = Node; + + fn generate(&self, g: &mut Generator) { + let mut el_checks = ElementCheck::new(&self.name, self.body.kind()); + + g.push_str("<"); + g.push_lits(self.name.lits()); + + for attr in &self.attrs { + g.push(attr); + if let Some(check) = attr.name.check() { + el_checks.push_attribute(check); + } + } + + g.push_str(">"); + + match &self.body { + ElementBody::Normal { + children, + closing_name, + } => { + let name = closing_name.as_ref().map_or(&self.name, |closing_name| { + el_checks.set_closing_tag(closing_name); + closing_name + }); + + g.push(children); + g.push_str(""); + } + ElementBody::Void => {} + } + + g.record_element(el_checks); + } +} + +pub enum ElementBody { + Normal { + children: Many>, + closing_name: Option, + }, + Void, +} + +impl ElementBody { + const fn kind(&self) -> ElementKind { + match self { + Self::Normal { .. } => ElementKind::Normal, + Self::Void => ElementKind::Void, + } + } +} diff --git a/crates/hypertext-syntax/src/syntaxes/maud.rs b/crates/hypertext-syntax/src/syntaxes/maud.rs new file mode 100644 index 0000000..b295fce --- /dev/null +++ b/crates/hypertext-syntax/src/syntaxes/maud.rs @@ -0,0 +1,139 @@ +use std::marker::PhantomData; + +use proc_macro2::Span; +use syn::{ + Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, braced, + ext::IdentExt, + parse::{Parse, ParseStream}, + token::{Brace, Paren}, +}; + +use super::Syntax; +use crate::repr::{ + Attribute, Component, Doctype, Element, ElementBody, Group, Node, UnquotedName, kw, +}; + +pub struct Maud; + +impl Syntax for Maud {} + +impl Parse for Node { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Ident::peek_any) { + if input.fork().parse::()?.is_component() { + input.parse().map(Self::Component) + } else { + input.parse().map(Self::Element) + } + } else if lookahead.peek(Token![!]) { + input.parse().map(Self::Doctype) + } else if lookahead.peek(LitStr) + || lookahead.peek(LitInt) + || lookahead.peek(LitBool) + || lookahead.peek(LitFloat) + || lookahead.peek(LitChar) + { + input.parse().map(Self::Literal) + } else if lookahead.peek(Token![@]) { + input.parse().map(Self::Control) + } else if lookahead.peek(Paren) { + input.parse().map(Self::Expr) + } else if lookahead.peek(Token![%]) { + input.parse().map(Self::DisplayExpr) + } else if lookahead.peek(Token![?]) { + input.parse().map(Self::DebugExpr) + } else if lookahead.peek(Brace) { + input.parse().map(Self::Group) + } else { + Err(lookahead.error()) + } + } +} + +impl Parse for Doctype { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + lt_token: Token![<](Span::mixed_site()), + bang_token: input.parse()?, + doctype_token: input.parse()?, + html_token: kw::html(Span::mixed_site()), + gt_token: Token![>](Span::mixed_site()), + phantom: PhantomData, + }) + } +} + +impl Parse for Group> { + fn parse(input: ParseStream) -> syn::Result { + let content; + braced!(content in input); + + Ok(Self(content.parse()?)) + } +} + +impl Parse for Element { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + name: input.parse()?, + attrs: { + let mut attrs = Vec::new(); + + if input.peek(Token![#]) { + attrs.push(input.call(Attribute::parse_id)?); + } + + if input.peek(Token![.]) { + attrs.push(input.call(Attribute::parse_class_list)?); + } + + while !(input.peek(Token![;]) || input.peek(Brace)) { + attrs.push(input.parse()?); + } + + attrs + }, + body: input.parse()?, + }) + } +} + +impl Parse for ElementBody { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Brace) { + let content; + braced!(content in input); + content.parse().map(|children| Self::Normal { + children, + closing_name: None, + }) + } else if lookahead.peek(Token![;]) { + input.parse::().map(|_| Self::Void) + } else { + Err(lookahead.error()) + } + } +} + +impl Parse for Component { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + name: input.parse()?, + attrs: { + let mut attrs = Vec::new(); + + while !(input.peek(Token![..]) || input.peek(Token![;]) || input.peek(Brace)) { + attrs.push(input.parse()?); + } + + attrs + }, + dotdot: input.parse()?, + body: input.parse()?, + }) + } +} diff --git a/crates/hypertext-syntax/src/syntaxes/mod.rs b/crates/hypertext-syntax/src/syntaxes/mod.rs new file mode 100644 index 0000000..56a658a --- /dev/null +++ b/crates/hypertext-syntax/src/syntaxes/mod.rs @@ -0,0 +1,6 @@ +mod maud; +mod rsx; + +pub use self::{maud::Maud, rsx::Rsx}; + +pub trait Syntax {} diff --git a/crates/hypertext-syntax/src/syntaxes/rsx.rs b/crates/hypertext-syntax/src/syntaxes/rsx.rs new file mode 100644 index 0000000..7255d15 --- /dev/null +++ b/crates/hypertext-syntax/src/syntaxes/rsx.rs @@ -0,0 +1,262 @@ +use std::marker::PhantomData; + +use syn::{ + Ident, LitBool, LitChar, LitFloat, LitInt, LitStr, Token, + ext::IdentExt, + parse::{Parse, ParseStream, discouraged::Speculative}, + parse_quote, + token::Paren, +}; + +use super::Syntax; +use crate::repr::{ + Component, Doctype, Element, ElementBody, Group, Literal, Many, Node, UnquotedName, +}; + +pub struct Rsx; + +impl Syntax for Rsx {} + +impl Node { + fn parse_component(input: ParseStream) -> syn::Result { + input.parse::()?; + + let name = input.parse::()?; + + let mut attrs = Vec::new(); + + #[allow(clippy::suspicious_operation_groupings)] + while !(input.peek(Token![..]) + || input.peek(Token![>]) + || (input.peek(Token![/]) && input.peek2(Token![>]))) + { + attrs.push(input.parse()?); + } + + let dotdot = input.parse()?; + + let solidus = input.parse::>()?; + input.parse::]>()?; + + if solidus.is_some() { + Ok(Self::Component(Component { + name, + attrs, + dotdot, + body: ElementBody::Void, + })) + } else { + let mut children = Vec::new(); + + while !(input.peek(Token![<]) && input.peek2(Token![/])) { + if input.is_empty() { + children.insert( + 0, + Self::Component(Component { + name, + attrs, + dotdot, + body: ElementBody::Void, + }), + ); + + return Ok(Self::Group(Group(Many(children)))); + } + + children.push(input.parse()?); + } + + let fork = input.fork(); + fork.parse::()?; + fork.parse::()?; + let closing_name = fork.parse::()?; + if closing_name == name { + input.advance_to(&fork); + } else { + children.insert( + 0, + Self::Component(Component { + name, + attrs, + dotdot, + body: ElementBody::Void, + }), + ); + + return Ok(Self::Group(Group(Many(children)))); + } + input.parse::]>()?; + + Ok(Self::Component(Component { + name, + attrs, + dotdot, + body: ElementBody::Normal { + children: Many(children), + closing_name: Some(parse_quote!(#closing_name)), + }, + })) + } + } + + fn parse_element(input: ParseStream) -> syn::Result { + input.parse::()?; + + let name = input.parse()?; + + let mut attrs = Vec::new(); + + while !(input.peek(Token![>]) || (input.peek(Token![/]) && input.peek2(Token![>]))) { + attrs.push(input.parse()?); + } + + let solidus = input.parse::>()?; + input.parse::]>()?; + + if solidus.is_some() { + Ok(Self::Element(Element { + name, + attrs, + body: ElementBody::Void, + })) + } else { + let mut children = Vec::new(); + + while !(input.peek(Token![<]) && input.peek2(Token![/])) { + if input.is_empty() { + children.insert( + 0, + Self::Element(Element { + name, + attrs, + body: ElementBody::Void, + }), + ); + + return Ok(Self::Group(Group(Many(children)))); + } + children.push(input.parse()?); + } + + let fork = input.fork(); + fork.parse::()?; + fork.parse::()?; + let closing_name = fork.parse()?; + if closing_name == name { + input.advance_to(&fork); + } else { + children.insert( + 0, + Self::Element(Element { + name, + attrs, + body: ElementBody::Void, + }), + ); + + return Ok(Self::Group(Group(Many(children)))); + } + input.parse::]>()?; + + Ok(Self::Element(Element { + name, + attrs, + body: ElementBody::Normal { + children: Many(children), + closing_name: Some(closing_name), + }, + })) + } + } +} + +impl Parse for Node { + fn parse(input: ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(Token![<]) { + let fork = input.fork(); + fork.parse::()?; + let lookahead = fork.lookahead1(); + if lookahead.peek(Token![>]) { + input.parse().map(Self::Group) + } else if lookahead.peek(Ident::peek_any) { + if fork.parse::()?.is_component() { + input.call(Self::parse_component) + } else { + input.call(Self::parse_element) + } + } else if lookahead.peek(Token![!]) { + input.parse().map(Self::Doctype) + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![@]) { + input.parse().map(Self::Control) + } else if lookahead.peek(Paren) { + input.parse().map(Self::Expr) + } else if lookahead.peek(Token![%]) { + input.parse().map(Self::DisplayExpr) + } else if lookahead.peek(Token![?]) { + input.parse().map(Self::DebugExpr) + } else if lookahead.peek(LitStr) + || lookahead.peek(LitInt) + || lookahead.peek(LitBool) + || lookahead.peek(LitFloat) + || lookahead.peek(LitChar) + { + input.parse().map(Self::Literal) + } else if lookahead.peek(Ident::peek_any) { + let ident = input.call(Ident::parse_any)?; + + let ident_string = if input.peek(Ident::peek_any) + || input.peek(LitInt) + || input.peek(LitBool) + || input.peek(LitFloat) + { + format!("{ident} ") + } else { + ident.to_string() + }; + + Ok(Self::Literal(Literal::Str(LitStr::new( + &ident_string, + ident.span(), + )))) + } else { + Err(lookahead.error()) + } + } +} + +impl Parse for Doctype { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + lt_token: input.parse()?, + bang_token: input.parse()?, + doctype_token: input.parse()?, + html_token: input.parse()?, + gt_token: input.parse()?, + phantom: PhantomData, + }) + } +} + +impl Parse for Group> { + fn parse(input: ParseStream) -> syn::Result { + input.parse::()?; + input.parse::]>()?; + + let mut children = Vec::new(); + + while !(input.peek(Token![<]) && input.peek2(Token![/]) && input.peek3(Token![>])) { + children.push(input.parse()?); + } + + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(Many(children))) + } +} diff --git a/hypertext/Cargo.toml b/crates/hypertext/Cargo.toml similarity index 92% rename from hypertext/Cargo.toml rename to crates/hypertext/Cargo.toml index c52918b..6484f67 100644 --- a/hypertext/Cargo.toml +++ b/crates/hypertext/Cargo.toml @@ -1,7 +1,6 @@ [package] name = "hypertext" version.workspace = true -authors.workspace = true edition.workspace = true description.workspace = true documentation = "https://docs.rs/hypertext" @@ -19,13 +18,13 @@ all-features = true actix-web = { version = "4", default-features = false, optional = true } axum-core = { version = "0.5", default-features = false, optional = true } html-escape = { workspace = true, optional = true } -hypertext-macros.workspace = true +hypertext-proc-macros.workspace = true itoa = { version = "1", optional = true } ntex = { version = "2", default-features = false, optional = true } poem = { version = "3", default-features = false, optional = true } rocket = { version = "0.5", default-features = false, optional = true } ryu = { version = "1", optional = true } -salvo_core = { version = "0.81", default-features = false, optional = true } +salvo_core = { version = "0.82", default-features = false, optional = true } tide = { version = "0.16", default-features = false, optional = true } warp = { version = "0.4", default-features = false, optional = true } @@ -47,4 +46,3 @@ warp = ["alloc", "dep:warp"] [lints] workspace = true - diff --git a/hypertext/src/context.rs b/crates/hypertext/src/context.rs similarity index 100% rename from hypertext/src/context.rs rename to crates/hypertext/src/context.rs diff --git a/hypertext/src/lib.rs b/crates/hypertext/src/lib.rs similarity index 58% rename from hypertext/src/lib.rs rename to crates/hypertext/src/lib.rs index 634d8c4..8cf1332 100644 --- a/hypertext/src/lib.rs +++ b/crates/hypertext/src/lib.rs @@ -1,4 +1,4 @@ -//! A blazing fast type-checked HTML macro crate. +//! A blazing fast type checked HTML macro crate. //! //! # Features //! @@ -9,51 +9,66 @@ //! into one if there is no dynamic content between them. //! //! The entire crate is `#![no_std]` compatible, and allocation is completely -//! optional if you don't use any dynamic content. +//! optional (via [`maud::simple!`] and [`rsx::simple!`]) if you don't use any +//! dynamic content. //! //! The crate gives extreme importance to lazy rendering and minimizing //! allocation, so it will only render the HTML to a string when you finally -//! call [`Renderable::render`] at the end. This makes composing nested HTML -//! elements extremely cheap. +//! call [`.render()`][RenderableExt::render] at the end. This makes composing +//! nested HTML elements extremely cheap. //! //! ## Type-Checking //! //! All macro invocations are validated at compile time, so you can't ever -//! misspell an element/attribute or use invalid attributes. +//! misspell an element/attribute, use an element or attribute that doesn't +//! exist, use an attribute on an element that doesn't support it, or +//! accidentally use [void elements](validation::Void) as [normal +//! elements](validation::Normal) or vice versa. //! -//! It does this by looking in your current namespace, or a module named -//! `hypertext_elements` (all the valid HTML elements are defined in this crate -//! already in [`hypertext_elements`](validation::hypertext_elements), but it -//! doesn't hard-code this module so you can define your own elements). -//! -//! It then imports each element you use in your macro invocation, and then -//! attempts to access the corresponding associated type for each attribute you -//! use. +//! More details on how this works can be found in the [`validation` +//! module-level documentation](validation). //! //! # Example //! -//! ```rust +//! ``` //! use hypertext::prelude::*; -//! # use hypertext::{Lazy, context::Node, Buffer, validation::{Element, ElementKind, Attribute, Normal}}; +//! # use hypertext::{Lazy, context::{AttributeValue, Node}, Buffer, validation::{Attribute, Element, ElementKind, Normal}}; //! -//! # assert_eq!( +//! # let maud_result = //! maud! { //! div #main title="Main Div" { -//! h1.important { +//! h1 .important.blue { //! "Hello, world!" //! } //! //! @for i in 1..=3 { -//! p.{ "p-" (i) } { +//! p #(i) style="background: gray"[i % 2 == 0] { //! "This is paragraph number " (i) //! } //! } //! } //! } -//! # .render(), +//! # .render(); +//! +//! // or alternatively: +//! +//! # let rsx_result = +//! rsx! { +//!
+//!

"Hello, world!"

+//! @for i in 1..=3 { +//!

+//! "This is paragraph number " (i) +//!

+//! } +//!
+//! } +//! # .render(); //! -//! // expands to (roughly): +//! // expands to: //! +//! # assert_eq!(maud_result, rsx_result); +//! # assert_eq!(maud_result, //! Lazy::<_, Node>::dangerously_create(move |buffer: &mut Buffer| { //! const _: fn() = || { //! use hypertext_elements::*; @@ -73,14 +88,20 @@ //! { //! buffer //! .dangerously_get_string() -//! .push_str(r#"

Hello, world!

"#); +//! .push_str(r#"

Hello, world!

"#); //! for i in 1..=3 { -//! buffer.dangerously_get_string().push_str("

()); +//! buffer.dangerously_get_string().push_str(r#"""#); +//! if i % 2 == 0 { +//! buffer +//! .dangerously_get_string() +//! .push_str(r#" style="background: gray""#); +//! } //! buffer //! .dangerously_get_string() -//! .push_str(r#"">This is paragraph number "#); -//! i.render_to(buffer); +//! .push_str(">This is paragraph number "); +//! i.render_to(buffer.with_context::()); //! buffer.dangerously_get_string().push_str("

"); //! } //! } @@ -88,129 +109,33 @@ //! }) //! # .render()); //! ``` -//! -//! This approach is also extremely extensible, as you can define your own -//! traits to add attributes for your favourite libraries! In fact, this is -//! exactly what [`GlobalAttributes`] does, and why it is required in the above -//! example, as it defines the attributes that can be used on any element, for -//! example [`id`], [`class`], and [`title`]. This library comes with built-in -//! support for many popular frontend attribute-based frameworks in -//! [`validation::attributes`], such as [`HtmxAttributes`] and -//! [`AlpineJsAttributes`] -//! -//! Here's an example of how you could define your own attributes for use with -//! the wonderful frontend library [htmx](https://htmx.org): -//! -//! ```rust -//! use hypertext::{ -//! prelude::*, -//! validation::{Attribute, AttributeNamespace}, -//! }; -//! -//! trait HtmxAttributes: GlobalAttributes { -//! const hx_get: Attribute = Attribute; -//! const hx_on: AttributeNamespace = AttributeNamespace; -//! // ... -//! } -//! -//! impl HtmxAttributes for T {} -//! -//! assert_eq!( -//! maud! { -//! div hx-get="/api/endpoint" hx-on:click="alert('Hello, world!')" { -//! // ^^^^^^ note that it converts `-` to `_` for you during checking! -//! "Hello, world!" -//! } -//! } -//! .render() -//! .as_inner(), -//! r#"
Hello, world!
"#, -//! ); -//! ``` -//! -//! Wrapping an attribue name in quotes will bypass the type-checking, so you -//! can use any attribute you want, even if it doesn't exist in the current -//! context. -//! -//! ```rust -//! use hypertext::prelude::*; -//! -//! assert_eq!( -//! maud! { -//! div "custom-attribute"="value" { "Hello, world!" } -//! } -//! .render() -//! .as_inner(), -//! r#"
Hello, world!
"#, -//! ); -//! ``` -//! -//! This library also supports component structs, which are simply structs that -//! implement [`Renderable`]. If an element name is capitalized, it will be -//! treated as a component, with attributes representing the struct fields. The -//! [`#[component]`](component) macro can be used to easily turn functions into -//! components. -//! -//! ```rust -//! use hypertext::{Buffer, prelude::*}; -//! -//! struct Repeater { -//! count: usize, -//! children: R, -//! } -//! -//! impl Renderable for Repeater { -//! fn render_to(&self, buffer: &mut Buffer) { -//! maud! { -//! @for i in 0..self.count { -//! (self.children) -//! } -//! } -//! .render_to(buffer); -//! } -//! } -//! -//! assert_eq!( -//! maud! { -//! div { -//! Repeater count=3 { -//! // children are passed as a `Lazy` to the `children` field -//! p { "Hi!" } -//! } -//! } -//! } -//! .render() -//! .as_inner(), -//! "

Hi!

Hi!

Hi!

" -//! ); -//! ``` -//! -//! [`GlobalAttributes`]: validation::attributes::GlobalAttributes -//! [`id`]: validation::attributes::GlobalAttributes::id -//! [`class`]: validation::attributes::GlobalAttributes::class -//! [`title`]: validation::attributes::GlobalAttributes::title -//! [`HtmxAttributes`]: validation::attributes::HtmxAttributes -//! [`AlpineJsAttributes`]: validation::attributes::AlpineJsAttributes #![no_std] #![warn(clippy::missing_inline_in_public_items)] -#![cfg_attr(docsrs, expect(internal_features))] -#![cfg_attr(docsrs, feature(rustdoc_internals, doc_cfg, doc_auto_cfg))] +#![cfg_attr(all(docsrs, not(doctest)), expect(internal_features))] +#![cfg_attr( + all(docsrs, not(doctest)), + feature(rustdoc_internals, doc_cfg, doc_auto_cfg) +)] #[cfg(feature = "alloc")] -mod alloc; +extern crate alloc; + pub mod context; mod macros; pub mod prelude; +#[cfg(feature = "alloc")] +mod renderable; pub mod validation; +#[cfg(feature = "alloc")] mod web_frameworks; use core::{fmt::Debug, marker::PhantomData}; -#[cfg(feature = "alloc")] -pub use self::alloc::*; use self::context::{AttributeValue, Context, Node}; pub use self::macros::*; +#[cfg(feature = "alloc")] +pub use self::renderable::*; /// A raw pre-escaped string. /// @@ -230,7 +155,7 @@ pub use self::macros::*; /// /// # Example /// -/// ```rust +/// ``` /// use hypertext::{Raw, prelude::*}; /// /// fn get_some_html() -> String { @@ -321,8 +246,8 @@ pub type RawAttribute = Raw; /// A rendered HTML string. /// -/// This type is returned by [`Renderable::render`] ([`Rendered`]), as -/// well as [`Raw::rendered`] ([`Rendered`]). +/// This type is returned by [`RenderableExt::render`] ([`Rendered`]), +/// as well as [`Raw::rendered`] ([`Rendered`]). /// /// This type intentionally does **not** implement [`Renderable`] to discourage /// anti-patterns such as rendering to a string then embedding that HTML string @@ -365,4 +290,4 @@ macro_rules! const_precise_live_drops_hack { (&raw const (*(&raw const this).cast::()).$field).read() }}; } -pub(crate) use const_precise_live_drops_hack; +use const_precise_live_drops_hack; diff --git a/crates/hypertext/src/macros/attribute.rs b/crates/hypertext/src/macros/attribute.rs new file mode 100644 index 0000000..867e378 --- /dev/null +++ b/crates/hypertext/src/macros/attribute.rs @@ -0,0 +1,31 @@ +//! Variants of the [`attribute!`](crate::attribute!) macro. + +/// Generates an attribute value, borrowing the environment. +/// +/// This is identical to [`attribute!`](crate::attribute!), except that it does +/// not take ownership of the environment. This is useful when you want to build +/// a [`LazyAttribute`](crate::LazyAttribute) using some captured variables, but +/// you still want to be able to use the captured variables after the +/// invocation. +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::attribute_borrow as borrow; +/// Generates static HTML attributes. +/// +/// This will return a [`RawAttribute<&'static str>`](crate::RawAttribute), +/// which can be used in `const` contexts. +/// +/// Note that the macro cannot process any dynamic content, so you cannot +/// use any expressions inside the macro. +/// +/// # Example +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// assert_eq!( +/// attribute::simple! { "my attribute " 1 }.into_inner(), +/// "my attribute 1" +/// ); +/// ``` +pub use hypertext_proc_macros::attribute_simple as simple; diff --git a/crates/hypertext/src/macros/maud.rs b/crates/hypertext/src/macros/maud.rs new file mode 100644 index 0000000..51cf25a --- /dev/null +++ b/crates/hypertext/src/macros/maud.rs @@ -0,0 +1,36 @@ +//! Variants of the [`maud!`](crate::maud!) macro. + +/// Generates HTML using [`maud!`](crate::maud!) syntax, borrowing the +/// environment. +/// +/// This is identical to [`maud!`](crate::maud!), except that it does not take +/// ownership of the environment. This is useful when you want to build a +/// [`Lazy`](crate::Lazy) using some captured variables, but you still want to +/// be able to use the captured variables after the invocation. +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::maud_borrow as borrow; +/// Generates static HTML using [`maud!`](crate::maud!) syntax. +/// +/// This will return a [`Raw<&'static str>`](crate::Raw), which can be used +/// in `const` contexts. +///a +/// Note that the macro cannot process any dynamic content, so you cannot +/// use any expressions inside the macro. +/// +/// # Example +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// assert_eq!( +/// maud::simple! { +/// div #profile title="Profile" { +/// h1 { "Alice" } +/// } +/// } +/// .into_inner(), +/// r#"

Alice

"#, +/// ); +/// ``` +pub use hypertext_proc_macros::maud_simple as simple; diff --git a/crates/hypertext/src/macros/mod.rs b/crates/hypertext/src/macros/mod.rs new file mode 100644 index 0000000..b90b81d --- /dev/null +++ b/crates/hypertext/src/macros/mod.rs @@ -0,0 +1,141 @@ +pub mod attribute; +pub mod maud; +#[cfg(feature = "alloc")] +mod renderable; +pub mod rsx; + +/// Generates an attribute value, returning a +/// [`LazyAttribute`](crate::LazyAttribute). +/// +/// # Example +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// let attr = attribute! { "x" @for i in 0..5 { (i) } }; +/// +/// assert_eq!( +/// maud! { div title=attr { "Hi!" } }.render().as_inner(), +/// r#"
Hi!
"# +/// ); +/// ``` +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::attribute; +/// Generates HTML using Maud syntax, returning a [`Lazy`](crate::Lazy). +/// +/// Note that this is not a complete 1:1 port of [Maud](https://maud.lambda.xyz)'s +/// syntax as it is stricter in some cases to prevent anti-patterns. +/// +/// Some key differences are: +/// - `#` ([`id`](crate::validation::attributes::GlobalAttributes::id) +/// shorthand), if present, must be the first attribute. +/// - `.` ([`class`](crate::validation::attributes::GlobalAttributes::class) +/// shorthand), if present, come after `#` (if present) and before other +/// attributes. +/// +/// Additionally, the `DOCTYPE` constant present in maud is replaced +/// with a new `!DOCTYPE` syntax, which will render `` in its +/// place. +/// +/// For more details on the rest of Maud's syntax, see the [Maud Book](https://maud.lambda.xyz). +/// +/// # Example +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// let name = "Alice"; +/// +/// assert_eq!( +/// maud! { +/// div #profile title="Profile" { +/// h1 { (name) } +/// } +/// } +/// .render() +/// .as_inner(), +/// r#"

Alice

"# +/// ); +/// ``` +/// +/// ## Using `file` +/// +/// If the named argument `file` is provided, the contents of the file will +/// be interpreted at compile time as input to this macro. The path is +/// interpreted relative to the `CARGO_MANIFEST_DIR` environment +/// variable, which is usually the root of your crate. +/// +/// `static.maud`: +/// ```text +/// div #profile title="Profile" { +/// h1 { (name) } +/// } +/// ``` +/// +/// ``` +/// use hypertext::prelude::*; +/// # macro_rules! maud { (file = "static.maud") => { hypertext::maud! { div #profile title="Profile" { h1 { "Alice" } } } }; } +/// +/// let name = "Alice"; +/// +/// assert_eq!( +/// maud!(file = "static.maud").render().as_inner(), +/// r#"

Alice

"#, +/// ); +/// ``` +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::maud; +/// Generates HTML using RSX syntax, returning a [`Lazy`](crate::Lazy). +/// +/// # Examples +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// let name = "Alice"; +/// +/// assert_eq!( +/// rsx! { +///
+///

(name)

+///
+/// } +/// .render() +/// .as_inner(), +/// r#"

Alice

"# +/// ); +/// ``` +/// +/// ## Using `file` +/// +/// If the named argument `file` is provided, the contents of the file will be +/// interpreted at compile time as input to this macro. The path is interpreted +/// relative to the `CARGO_MANIFEST_DIR` environment variable, which is usually +/// the root of your crate. +/// +/// `static.html`: +/// ```html +///
+///

(name)

+///
+/// ``` +/// +/// ``` +/// use hypertext::prelude::*; +/// # macro_rules! rsx { (file = "static.html") => { hypertext::rsx! {

Alice

} }; } +/// +/// let name = "Alice"; +/// +/// assert_eq!( +/// rsx!(file = "static.html").render().as_inner(), +/// r#"

Alice

"#, +/// ); +/// ``` +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::rsx; + +#[cfg(feature = "alloc")] +pub use self::renderable::*; diff --git a/crates/hypertext/src/macros/renderable.rs b/crates/hypertext/src/macros/renderable.rs new file mode 100644 index 0000000..ca297b1 --- /dev/null +++ b/crates/hypertext/src/macros/renderable.rs @@ -0,0 +1,149 @@ +#![expect(clippy::doc_markdown)] + +/// Derives [`Renderable`](crate::Renderable) for a type. +/// +/// This is used in conjunction with `#[maud]`/`#[rsx]`, as well as +/// `#[attribute]`. +/// +/// # Examples +/// +/// ## `#[maud(...)]` +/// +/// Derives [`Renderable`](crate::Renderable) via the contents of +/// `#[maud(...)]`, which will be interpreted as input to +/// [`maud!`](crate::maud!). +/// +/// This is mutually exclusive with `#[rsx(...)]`. +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[derive(Renderable)] +/// #[maud(span { "My name is " (self.name) "!" })] +/// pub struct Person { +/// name: String, +/// } +/// +/// assert_eq!( +/// maud! { div { (Person { name: "Alice".into() }) } } +/// .render() +/// .as_inner(), +/// "
My name is Alice!
" +/// ); +/// ``` +/// +/// ## `#[rsx(...)]` +/// +/// Derives [`Renderable`](crate::Renderable) via the contents of `#[rsx(...)]`, +/// which will be interpreted as input to [`rsx!`](crate::rsx!). +/// +/// This is mutually exclusive with `#[maud(...)]`. +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[derive(Renderable)] +/// #[rsx( +/// "My name is " (self.name) "!" +/// )] +/// pub struct Person { +/// name: String, +/// } +/// +/// assert_eq!( +/// rsx! {
(Person { name: "Alice".into() })
} +/// .render() +/// .as_inner(), +/// "
My name is Alice!
" +/// ); +/// ``` +/// +/// ## `#[attribute(...)]` +/// +/// Derives [`Renderable`](crate::Renderable) +/// via the contents of `#[attribute(...)]`, which will be interpreted as input +/// to [`attribute!`](crate::attribute!). +/// +/// This can be used in conjunction with `#[rsx]`/`#[maud]`, as this will +/// derive the [`Renderable`](crate::Renderable) implementation, +/// whereas `#[maud(...)]`/`#[rsx(...)]` will derive the +/// [`Renderable`](crate::Renderable) implementation. +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[derive(Renderable)] +/// #[attribute((self.x) "," (self.y))] +/// pub struct Coordinates { +/// x: i32, +/// y: i32, +/// } +/// +/// assert_eq!( +/// maud! { div title=(Coordinates { x: 10, y: 20 }) { "Location" } } +/// .render() +/// .as_inner(), +/// r#"
Location
"# +/// ); +/// ``` +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::Renderable; +/// Turns a function returning a [`Renderable`](crate::Renderable) into a +/// struct that implements [`Renderable`](crate::Renderable). +/// +/// This macro generates a struct that has fields corresponding to the +/// function's parameters, and implements [`Renderable`](crate::Renderable) +/// by calling the function with the struct's fields as arguments. +/// +/// There are three types of parameters that are supported, described in +/// the table below: +/// +/// | Parameter Type | Stored As | Example Types | +/// |----------------|-----------|---------------| +/// | `T` | `T` | [`bool`], integers, floats, other [`Copy`] types | +/// | `&T` | `T` | [`&String`](crate::alloc::string::String) | +/// | `&'a T` | `&'a T` | [`&'a str`][str], [`&'a [T]`](slice), other cheap borrowed types | +/// +/// The name of the generated struct is derived from the function name by +/// converting it to PascalCase. If you would like to set a different name, +/// you can specify it as `#[renderable(MyStructName)]` on the function. +/// +/// The visibility of the generated struct is determined by the visibility +/// of the function. If you would like to set a different visibility, +/// you can specify it as `#[renderable(pub)]`, +/// `#[renderable(pub(crate))]`, etc. on the function. +/// +/// You can combine both of these by setting an attribute like +/// `#[renderable(pub MyStructName)]`. +/// +/// # Example +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[renderable] +/// fn nav_bar<'a>(title: &'a str, subtitle: &String, add_smiley: bool) -> impl Renderable { +/// maud! { +/// nav { +/// h1 { (title) } +/// h2 { (subtitle) } +/// @if add_smiley { +/// span { ":)" } +/// } +/// } +/// } +/// } +/// +/// assert_eq!( +/// maud! { +/// div { +/// NavBar title="My Nav Bar" subtitle=("My Subtitle".to_owned()) add_smiley=true; +/// } +/// } +/// .render() +/// .as_inner(), +/// "
" +/// ); +/// ``` +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::renderable; diff --git a/crates/hypertext/src/macros/rsx.rs b/crates/hypertext/src/macros/rsx.rs new file mode 100644 index 0000000..d0f4dfe --- /dev/null +++ b/crates/hypertext/src/macros/rsx.rs @@ -0,0 +1,36 @@ +//! Variants of the [`rsx!`](crate::rsx!) macro. + +/// Generates HTML using [`rsx!`](crate::rsx!) syntax, borrowing the +/// environment. +/// +/// This is identical to [`rsx!`](crate::rsx!), except that it does not take +/// ownership of the environment. This is useful when you want to build a +/// [`Lazy`](crate::Lazy) using some captured variables, but you still want to +/// be able to use the captured variables after the invocation. +#[cfg(feature = "alloc")] +#[cfg_attr(all(docsrs, not(doctest)), doc(cfg(feature = "alloc")))] +pub use hypertext_proc_macros::rsx_borrow as borrow; +/// Generates static HTML using [`rsx!`](crate::rsx!) syntax. +/// +/// This will return a [`Raw<&'static str>`](crate::Raw), which can be used +/// in `const` contexts. +/// +/// Note that the macro cannot process any dynamic content, so you cannot +/// use any expressions inside the macro. +/// +/// # Examples +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// assert_eq!( +/// rsx::simple! { +///
+///

Alice

+///
+/// } +/// .into_inner(), +/// r#"

Alice

"#, +/// ); +/// ``` +pub use hypertext_proc_macros::rsx_simple as simple; diff --git a/crates/hypertext/src/prelude.rs b/crates/hypertext/src/prelude.rs new file mode 100644 index 0000000..aa9293d --- /dev/null +++ b/crates/hypertext/src/prelude.rs @@ -0,0 +1,24 @@ +//! Re-exported items for convenience. +//! +//! This module re-exports all the commonly used items from the crate, +//! so you can use them without having to import them individually. It also +//! re-exports the [`hypertext_elements`] module, and any [framework-specific +//! attribute traits](crate::validation::attributes) that have been enabled, as +//! well as the [`GlobalAttributes`] trait. +#[cfg(feature = "alpine")] +pub use crate::validation::attributes::AlpineJsAttributes; +#[cfg(feature = "htmx")] +pub use crate::validation::attributes::HtmxAttributes; +#[cfg(feature = "hyperscript")] +pub use crate::validation::attributes::HyperscriptAttributes; +#[cfg(feature = "mathml")] +pub use crate::validation::attributes::MathMlGlobalAttributes; +#[cfg(feature = "alloc")] +pub use crate::{Renderable, RenderableExt as _, Rendered}; +pub use crate::{ + macros::*, + validation::{ + attributes::{AriaAttributes, EventHandlerAttributes, GlobalAttributes}, + hypertext_elements, + }, +}; diff --git a/crates/hypertext/src/renderable/buffer.rs b/crates/hypertext/src/renderable/buffer.rs new file mode 100644 index 0000000..e770bf3 --- /dev/null +++ b/crates/hypertext/src/renderable/buffer.rs @@ -0,0 +1,192 @@ +use core::{ + fmt::{self, Debug, Formatter}, + marker::PhantomData, + ptr, +}; + +use super::String; +use crate::{ + Renderable, Rendered, + context::{AttributeValue, Context, Node}, +}; + +/// A buffer used for rendering HTML. +/// +/// This is a wrapper around [`String`] that prevents accidental XSS +/// vulnerabilities by disallowing direct rendering of raw HTML into the buffer +/// without clearly opting into the risk of doing so. +#[derive(Clone, PartialEq, Eq)] +#[repr(transparent)] +pub struct Buffer { + inner: String, + context: PhantomData, +} + +/// A buffer used for rendering attribute values. +/// +/// This is a type alias for [`Buffer`]. +pub type AttributeBuffer = Buffer; + +#[expect( + clippy::missing_const_for_fn, + reason = "`Buffer` does not make sense in `const` contexts" +)] +impl Buffer { + /// Creates a new, empty [`Buffer`]. + #[inline] + #[must_use] + pub fn new() -> Self { + // XSS SAFETY: The buffer is empty and does not contain any HTML. + Self::dangerously_from_string(String::new()) + } + + /// Creates a new [`Buffer`] from the given [`String`]. + /// + /// It is recommended to add a `// XSS SAFETY` comment above the usage of + /// this function to indicate why the original string is safe to be used in + /// this context. + #[inline] + #[must_use] + pub fn dangerously_from_string(string: String) -> Self { + Self { + inner: string, + context: PhantomData, + } + } + + /// Creates a new [`&mut Buffer`](Buffer) from the given [`&mut + /// String`](String). + /// + /// It is recommended to add a `// XSS SAFETY` comment above the usage of + /// this function to indicate why the original string is safe to be used in + /// this context. + #[inline] + #[must_use] + pub fn dangerously_from_string_mut(string: &mut String) -> &mut Self { + // SAFETY: + // - `Buffer` is a `#[repr(transparent)]` wrapper around `String`, differing + // only in the zero-sized `PhantomData` marker type. + // - `PhantomData` does not affect memory layout, so the layout of `Buffer` + // and `String` is guaranteed to be identical by Rust's type system. + // - The lifetime of the reference is preserved, and there are no aliasing or + // validity issues, as both types are functionally identical at runtime. + unsafe { &mut *ptr::from_mut(string).cast::() } + } + + /// Pushes a [`Renderable`] value to the buffer. + /// + /// This is a convenience method that calls + /// [`value.render_to(self)`](Renderable::render_to). + #[inline] + pub fn push(&mut self, value: impl Renderable) { + value.render_to(self); + } + + /// Gets a mutable reference to the inner [`String`]. + /// + /// For [`Buffer`] (a.k.a. [`Buffer`]) writes, the caller must push + /// complete HTML nodes. If rendering string-like types, the pushed contents + /// must escape `&` to `&`, `<` to `<`, and `>` to `>`. + /// + /// For [`Buffer`] (a.k.a. [`AttributeBuffer`]) writes, the + /// caller must push attribute values which will eventually be surrounded by + /// double quotes. The pushed contents must escape `&` to `&`, `<` to + /// `<`, `>` to `>`, and `"` to `"`. + /// + /// This should only be needed in very specific cases, such as manually + /// constructing raw HTML, usually within a [`Renderable::render_to`] + /// implementation. + /// + /// It is recommended to add a `// XSS SAFETY` comment above the usage of + /// this method to indicate why it is safe to directly write to the + /// underlying buffer. + /// + /// # Example + /// + /// ``` + /// use hypertext::{Buffer, prelude::*}; + /// + /// fn get_some_html() -> String { + /// // get html from some source, such as a CMS + /// "

Some HTML from the CMS

".into() + /// } + /// + /// let mut buffer = Buffer::new(); + /// + /// buffer.push(maud! { + /// h1 { "My Document!" } + /// }); + /// + /// // XSS SAFETY: The CMS sanitizes the HTML before returning it. + /// buffer.dangerously_get_string().push_str(&get_some_html()); + /// + /// assert_eq!( + /// buffer.rendered().as_inner(), + /// "

My Document!

Some HTML from the CMS

" + /// ) + /// ``` + #[inline] + pub fn dangerously_get_string(&mut self) -> &mut String { + &mut self.inner + } + + /// Extracts the inner [`String`] from the buffer. + #[inline] + #[must_use] + pub fn into_inner(self) -> String { + self.inner + } + + /// Converts this into an [`&mut Buffer`](Buffer), where `Self: + /// AsMut>`. + /// + /// This is mostly used for converting from [`Buffer`] to + /// [`AttributeBuffer`]. + #[inline] + pub fn with_context(&mut self) -> &mut Buffer + where + Self: AsMut>, + { + self.as_mut() + } +} + +impl Buffer { + /// Renders the buffer to a [`Rendered`]. + #[inline] + #[must_use] + pub fn rendered(self) -> Rendered { + Rendered(self.inner) + } +} + +impl AsMut for Buffer { + #[inline] + fn as_mut(&mut self) -> &mut Self { + self + } +} + +impl AsMut for Buffer { + #[inline] + fn as_mut(&mut self) -> &mut AttributeBuffer { + // SAFETY: Both `Buffer` and `AttributeBuffer` are `#[repr(transparent)]` + // wrappers around `String`, differing only in the zero-sized `PhantomData` + // marker type. + unsafe { &mut *ptr::from_mut(self).cast::() } + } +} + +impl Default for Buffer { + #[inline] + fn default() -> Self { + Self::new() + } +} + +impl Debug for Buffer { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_tuple("Buffer").field(&self.inner).finish() + } +} diff --git a/hypertext/src/alloc/impls.rs b/crates/hypertext/src/renderable/impls.rs similarity index 65% rename from hypertext/src/alloc/impls.rs rename to crates/hypertext/src/renderable/impls.rs index 1b1b6b8..d8ce42b 100644 --- a/hypertext/src/alloc/impls.rs +++ b/crates/hypertext/src/renderable/impls.rs @@ -1,15 +1,15 @@ use core::fmt::{self, Write}; -use super::alloc::{ - borrow::{Cow, ToOwned}, - boxed::Box, - rc::Rc, - string::String, - sync::Arc, - vec::Vec, -}; use crate::{ - AttributeBuffer, Buffer, Raw, Renderable, Rendered, + AttributeBuffer, Buffer, Raw, Renderable, + alloc::{ + borrow::{Cow, ToOwned}, + boxed::Box, + rc::Rc, + string::String, + sync::Arc, + vec::Vec, + }, context::{AttributeValue, Context, Node}, }; @@ -22,44 +22,33 @@ impl, C: Context> Renderable for Raw { } #[inline] - fn render(&self) -> Rendered { - Rendered(self.as_str().into()) - } -} - -impl Renderable for fmt::Arguments<'_> { - #[inline] - fn render_to(&self, buffer: &mut Buffer) { - struct ElementEscaper<'a>(&'a mut String); - - impl Write for ElementEscaper<'_> { - #[inline] - fn write_str(&mut self, s: &str) -> fmt::Result { - html_escape::encode_text_to_string(s, self.0); - Ok(()) - } - } - - // XSS SAFETY: `ElementEscaper` will escape special characters. - _ = ElementEscaper(buffer.dangerously_get_string()).write_fmt(*self); + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: `Raw` values are expected to be pre-escaped for + // their respective rendering context. + Buffer::dangerously_from_string(self.as_str().into()) } } -impl Renderable for fmt::Arguments<'_> { +impl Renderable for fmt::Arguments<'_> +where + str: Renderable, +{ #[inline] - fn render_to(&self, buffer: &mut AttributeBuffer) { - struct AttributeEscaper<'a>(&'a mut String); + fn render_to(&self, buffer: &mut Buffer) { + struct Escaper<'a, C: Context>(&'a mut Buffer); - impl Write for AttributeEscaper<'_> { + impl Write for Escaper<'_, C> + where + str: Renderable, + { #[inline] fn write_str(&mut self, s: &str) -> fmt::Result { - html_escape::encode_double_quoted_attribute_to_string(s, self.0); + s.render_to(self.0); Ok(()) } } - // XSS SAFETY: `AttributeEscaper` will escape special characters. - _ = AttributeEscaper(buffer.dangerously_get_string()).write_fmt(*self); + _ = Escaper(buffer).write_fmt(*self); } } @@ -76,8 +65,9 @@ impl Renderable for char { } #[inline] - fn render(&self) -> Rendered { - Rendered(match *self { + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: we are manually performing escaping here + Buffer::dangerously_from_string(match *self { '&' => "&".into(), '<' => "<".into(), '>' => ">".into(), @@ -100,6 +90,18 @@ impl Renderable for char { c => s.push(c), } } + + #[inline] + fn to_buffer(&self) -> AttributeBuffer { + // XSS SAFETY: we are manually performing escaping here + AttributeBuffer::dangerously_from_string(match *self { + '&' => "&".into(), + '<' => "<".into(), + '>' => ">".into(), + '"' => """.into(), + c => c.into(), + }) + } } impl Renderable for str { @@ -110,8 +112,9 @@ impl Renderable for str { } #[inline] - fn render(&self) -> Rendered { - Rendered(html_escape::encode_text(self).into_owned()) + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: we use `html_escape` to ensure the text is properly escaped + Buffer::dangerously_from_string(html_escape::encode_text(self).into_owned()) } } @@ -124,24 +127,28 @@ impl Renderable for str { buffer.dangerously_get_string(), ); } -} -impl Renderable for String { #[inline] - fn render_to(&self, buffer: &mut Buffer) { - self.as_str().render_to(buffer); + fn to_buffer(&self) -> AttributeBuffer { + // XSS SAFETY: we use `html_escape` to ensure the text is properly escaped + AttributeBuffer::dangerously_from_string( + html_escape::encode_double_quoted_attribute(self).into_owned(), + ) } +} +impl Renderable for String +where + str: Renderable, +{ #[inline] - fn render(&self) -> Rendered { - Renderable::::render(self.as_str()) + fn render_to(&self, buffer: &mut Buffer) { + self.as_str().render_to(buffer); } -} -impl Renderable for String { #[inline] - fn render_to(&self, buffer: &mut AttributeBuffer) { - self.as_str().render_to(buffer); + fn to_buffer(&self) -> Buffer { + self.as_str().to_buffer() } } @@ -155,8 +162,9 @@ impl Renderable for bool { } #[inline] - fn render(&self) -> Rendered { - Rendered(if *self { "true" } else { "false" }.into()) + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: "true" and "false" are safe strings + Buffer::dangerously_from_string(if *self { "true" } else { "false" }.into()) } } @@ -171,8 +179,9 @@ macro_rules! render_via_itoa { } #[inline] - fn render(&self) -> Rendered { - Rendered(itoa::Buffer::new().format(*self).into()) + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: integers are safe + Buffer::dangerously_from_string(itoa::Buffer::new().format(*self).into()) } } )* @@ -195,8 +204,9 @@ macro_rules! render_via_ryu { } #[inline] - fn render(&self) -> Rendered { - Rendered(ryu::Buffer::new().format(*self).into()) + fn to_buffer(&self) -> Buffer { + // XSS SAFETY: floats are safe + Buffer::dangerously_from_string(ryu::Buffer::new().format(*self).into()) } } )* @@ -210,22 +220,16 @@ render_via_ryu! { macro_rules! render_via_deref { ($($Ty:ty)*) => { $( - impl Renderable for $Ty { - #[inline] - fn render_to(&self, buffer: &mut Buffer) { - T::render_to(&**self, buffer); - } - + impl + ?Sized, C: Context> Renderable for $Ty { #[inline] - fn render(&self) -> Rendered { - T::render(&**self) + fn render_to(&self, buffer: &mut Buffer) { + // T::render_to(&**self, buffer); + (**self).render_to(buffer); } - } - impl + ?Sized> Renderable for $Ty { #[inline] - fn render_to(&self, buffer: &mut AttributeBuffer) { - T::render_to(&**self, buffer); + fn to_buffer(&self) -> Buffer { + (**self).to_buffer() } } )* @@ -240,24 +244,15 @@ render_via_deref! { Arc } -impl<'a, B: 'a + Renderable + ToOwned + ?Sized> Renderable for Cow<'a, B> { - #[inline] - fn render_to(&self, buffer: &mut Buffer) { - B::render_to(&**self, buffer); - } - +impl<'a, B: 'a + Renderable + ToOwned + ?Sized, C: Context> Renderable for Cow<'a, B> { #[inline] - fn render(&self) -> Rendered { - B::render(&**self) + fn render_to(&self, buffer: &mut Buffer) { + (**self).render_to(buffer); } -} -impl<'a, B: 'a + Renderable + ToOwned + ?Sized> Renderable - for Cow<'a, B> -{ #[inline] - fn render_to(&self, buffer: &mut AttributeBuffer) { - B::render_to(&**self, buffer); + fn to_buffer(&self) -> Buffer { + (**self).to_buffer() } } @@ -311,8 +306,8 @@ macro_rules! impl_tuple { } }; (($i:tt $T:ident)) => { - #[cfg_attr(docsrs, doc(fake_variadic))] - #[cfg_attr(docsrs, doc = "This trait is implemented for tuples up to twelve items long.")] + #[cfg_attr(all(docsrs, not(doctest)), doc(fake_variadic))] + #[cfg_attr(all(docsrs, not(doctest)), doc = "This trait is implemented for tuples up to twelve items long.")] impl<$T: Renderable, C: Context> Renderable for ($T,) { #[inline] fn render_to(&self, buffer: &mut Buffer) { @@ -321,7 +316,7 @@ macro_rules! impl_tuple { } }; (($i0:tt $T0:ident) $(($i:tt $T:ident))+) => { - #[cfg_attr(docsrs, doc(hidden))] + #[cfg_attr(all(docsrs, not(doctest)), doc(hidden))] impl<$T0: Renderable, $($T: Renderable),*, C: Context> Renderable for ($T0, $($T,)*) { #[inline] fn render_to(&self, buffer: &mut Buffer) { diff --git a/crates/hypertext/src/renderable/mod.rs b/crates/hypertext/src/renderable/mod.rs new file mode 100644 index 0000000..1a2ca49 --- /dev/null +++ b/crates/hypertext/src/renderable/mod.rs @@ -0,0 +1,404 @@ +#![allow(clippy::doc_markdown)] + +mod buffer; +mod impls; + +use core::{ + fmt::{self, Debug, Display, Formatter}, + marker::PhantomData, +}; + +pub use self::buffer::*; +use crate::{ + Raw, Rendered, + alloc::string::String, + const_precise_live_drops_hack, + context::{AttributeValue, Context, Node}, +}; + +/// A type that can be rendered as an HTML node. +/// +/// For [`Renderable`] (a.k.a. [`Renderable`]) implementations, this +/// must render complete HTML nodes. If rendering string-like types, the +/// implementation must escape `&` to `&`, `<` to `<`, and `>` to `>`. +/// +/// For [`Renderable`] implementations, this must render an +/// attribute value which will eventually be surrounded by double quotes. The +/// implementation must escape `&` to `&`, `<` to `<`, `>` to `>`, and +/// `"` to `"`. +/// +/// +/// # Examples +/// +/// ## Implementing [`Renderable`] +/// +/// There are 3 ways to implement this trait. +/// +/// ### Manual [`impl Renderable`](Renderable) +/// +/// ``` +/// use hypertext::{Buffer, prelude::*}; +/// +/// pub struct Person { +/// name: String, +/// age: u8, +/// } +/// +/// impl Renderable for Person { +/// fn render_to(&self, buffer: &mut Buffer) { +/// buffer.push(maud! { +/// div { +/// h1 { (self.name) } +/// p { "Age: " (self.age) } +/// } +/// }); +/// } +/// } +/// +/// let person = Person { +/// name: "Alice".into(), +/// age: 20, +/// }; +/// +/// assert_eq!( +/// maud! { main { (person) } }.render().as_inner(), +/// "

Alice

Age: 20

", +/// ); +/// ``` +/// +/// ### [`#[derive(Renderable)]`](derive@crate::Renderable) +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[derive(Renderable)] +/// #[maud( +/// div { +/// h1 { (self.name) } +/// p { "Age: " (self.age) } +/// } +/// )] +/// struct Person { +/// name: String, +/// age: u8, +/// } +/// +/// let person = Person { +/// name: "Alice".into(), +/// age: 20, +/// }; +/// +/// assert_eq!( +/// maud! { main { (person) } }.render().as_inner(), +/// "

Alice

Age: 20

", +/// ); +/// ``` +/// +/// ### [`#[renderable]`](crate::renderable) +/// +/// ``` +/// use hypertext::prelude::*; +/// #[renderable] +/// fn person(name: &String, age: u8) -> impl Renderable { +/// maud! { +/// div { +/// h1 { (name) } +/// p { "Age: " (age) } +/// } +/// } +/// } +/// +/// assert_eq!( +/// maud! { main { (Person { name: "Alice".into(), age: 20 }) } } +/// .render() +/// .as_inner(), +/// "

Alice

Age: 20

", +/// ); +/// ``` +/// +/// ## Component Syntax +/// +/// In addition to the standard way of rendering a [`Renderable`] struct inside +/// a `(...)` node, you can also use the "component" syntax to make using these +/// types more like popular frontend frameworks such as React.js. +/// +/// ### [`maud!`](crate::maud!) +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[renderable] +/// fn person(name: &String, age: u8) -> impl Renderable { +/// maud! { +/// div { +/// h1 { (name) } +/// p { "Age: " (age) } +/// } +/// } +/// } +/// +/// assert_eq!( +/// maud! { main { Person name=("Alice".into()) age=20; } } +/// .render() +/// .as_inner(), +/// "

Alice

Age: 20

", +/// ); +/// ``` +/// +/// ### [`rsx!`](crate::rsx!) +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[renderable] +/// fn person(name: &String, age: u8) -> impl Renderable { +/// rsx! { +///
+///

(name)

+///

"Age: " (age)

+///
+/// } +/// } +/// +/// assert_eq!( +/// rsx! { +///
+/// +///
+/// } +/// .render() +/// .as_inner(), +/// "

Alice

Age: 20

", +/// ); +/// ``` +/// +/// ### `children` +/// +/// If you add children to the component node, the macro will pass them to the +/// `children` field of the type as a [`Lazy`]. +/// +/// ``` +/// use hypertext::prelude::*; +/// +/// #[renderable] +/// fn person(name: &String, age: u8, children: &R) -> impl Renderable { +/// maud! { +/// div { +/// h1 { (name) } +/// p { "Age: " (age) } +/// (children) +/// } +/// } +/// } +/// +/// assert_eq!( +/// maud! { +/// main { +/// Person name=("Alice".into()) age=20 { +/// p { "Pronouns: she/her" } +/// } +/// } +/// } +/// .render() +/// .as_inner(), +/// "

Alice

Age: 20

Pronouns: she/her

", +/// ); +/// ``` +pub trait Renderable { + /// Renders this value to the buffer. + fn render_to(&self, buffer: &mut Buffer); + + /// Creates a new [`Buffer`] from this value. + /// + /// This is a convenience method that creates a new [`Buffer`], + /// [`push`](Buffer::push)es `self` to it, then returns it. + /// + /// This may be overridden if `Self` is a string-like pre-escaped type that + /// can more efficiently be turned into a [`Buffer`] via + /// [`Buffer::dangerously_from_string`]. If overriden, the + /// implementation must match what [`render_to`](Renderable::render_to) + /// would produce. + #[inline] + fn to_buffer(&self) -> Buffer { + let mut buffer = Buffer::::new(); + buffer.push(self); + buffer + } +} + +/// An extension trait for [`Renderable`] types. +/// +/// This trait provides an additional method for rendering and memoizing values. +pub trait RenderableExt: Renderable { + /// Renders this value to a [`Rendered`]. + /// + /// This is usually the final step in rendering a value, converting it + /// into a [`Rendered`](Rendered) that can be returned as an HTTP + /// response or written to a file. + #[inline] + fn render(&self) -> Rendered { + self.to_buffer().rendered() + } + + /// Pre-renders the value and stores it in a [`Raw`] so that it can be + /// re-used among multiple renderings without re-computing the value. + /// + /// This should generally be avoided to prevent unnecessary allocations, but + /// may be useful if it is more expensive to compute and render the value. + #[inline] + fn memoize(&self) -> Raw { + // XSS SAFETY: The value has already been rendered and is assumed as safe. + Raw::dangerously_create(self.to_buffer().into_inner()) + } +} + +impl RenderableExt for T {} + +/// A value lazily rendered via a closure. +/// +/// For [`Lazy`] (a.k.a. [`Lazy`]), this must render complete +/// HTML nodes. If rendering string-like types, the closure must escape `&` to +/// `&`, `<` to `<`, and `>` to `>`. +/// +/// For [`Lazy`] (a.k.a. [`LazyAttribute`]), this must +/// render an attribute value which will eventually be surrounded by double +/// quotes. The closure must escape `&` to `&`, `<` to `<`, `>` to +/// `>`, and `"` to `"`. +#[derive(Clone, Copy)] +#[must_use = "`Lazy` does nothing unless `.render()` or `.render_to()` is called"] +pub struct Lazy), C: Context = Node> { + f: F, + context: PhantomData, +} + +/// An attribute value lazily rendered via a closure. +/// +/// This is a type alias for [`Lazy`]. +pub type LazyAttribute = Lazy; + +impl), C: Context> Lazy { + /// Creates a new [`Lazy`] from the given closure. + /// + /// It is recommended to add a `// XSS SAFETY` comment above the usage of + /// this function to indicate why it is safe to assume that the closure will + /// not write possibly unsafe HTML to the buffer. + #[inline] + pub const fn dangerously_create(f: F) -> Self { + Self { + f, + context: PhantomData, + } + } + + /// Extracts the inner closure. + #[inline] + pub const fn into_inner(self) -> F { + // SAFETY: `Lazy` has exactly one non-zero-sized field, which is `f`. + unsafe { const_precise_live_drops_hack!(self.f) } + } + + /// Gets a reference to the inner closure. + #[inline] + pub const fn as_inner(&self) -> &F { + &self.f + } +} + +impl), C: Context> Renderable for Lazy { + #[inline] + fn render_to(&self, buffer: &mut Buffer) { + (self.f)(buffer); + } +} + +impl), C: Context> Debug for Lazy { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_tuple("Lazy").finish_non_exhaustive() + } +} + +/// A value rendered via its [`Display`] implementation. +/// +/// This will handle escaping special characters for you. +/// +/// This can be created more easily via the `%(...)` syntax in +/// [`maud!`](crate::maud!), [`rsx!`](crate::rsx!), and +/// [`attribute!`](crate::attribute!) which will automatically wrap the +/// expression in this type. +/// +/// # Example +/// +/// ``` +/// use std::fmt::{self, Display, Formatter}; +/// +/// use hypertext::prelude::*; +/// +/// struct Greeting(&'static str); +/// +/// impl Display for Greeting { +/// fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { +/// write!(f, "Hello, {}!