Merge branch 'main' into debug-subscriptions

This commit is contained in:
Jonathan Kelley 2024-02-29 12:30:27 -08:00 committed by GitHub
commit d61c3fab20
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
45 changed files with 960 additions and 662 deletions

12
.github/free_space.sh vendored
View file

@ -1,12 +0,0 @@
df -h
sudo rm -rf ${GITHUB_WORKSPACE}/.git
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
sudo rm -rf /usr/share/dotnet
sudo apt-get remove -y '^ghc-8.*'
sudo apt-get remove -y '^dotnet-.*'
sudo apt-get remove -y '^llvm-.*'
sudo apt-get remove -y 'php.*'
sudo apt-get remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel
sudo apt-get autoremove -y
sudo apt-get clean
df -h

View file

@ -1,56 +0,0 @@
name: Build CLI for Release
# Will run automatically on every new release
on:
release:
types: [published]
jobs:
build-and-upload:
permissions:
contents: write
runs-on: ${{ matrix.platform.os }}
strategy:
matrix:
platform:
- {
target: x86_64-pc-windows-msvc,
os: windows-latest,
toolchain: "1.70.0",
}
- {
target: x86_64-apple-darwin,
os: macos-latest,
toolchain: "1.70.0",
}
- {
target: x86_64-unknown-linux-gnu,
os: ubuntu-latest,
toolchain: "1.70.0",
}
steps:
- uses: actions/checkout@v4
- name: Install stable
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.platform.toolchain }}
targets: ${{ matrix.platform.target }}
- uses: ilammy/setup-nasm@v1
# Setup the Github Actions Cache for the CLI package
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
workspaces: packages/cli -> ../../target
# This neat action can build and upload the binary in one go!
- name: Build and upload binary
uses: taiki-e/upload-rust-binary-action@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
target: ${{ matrix.platform.target }}
bin: dx
archive: dx-${{ matrix.platform.target }}
checksum: sha256
manifest_path: packages/cli/Cargo.toml

View file

@ -1,43 +0,0 @@
name: docs stable
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build-deploy:
runs-on: ubuntu-latest
environment: docs
steps:
# NOTE: Comment out when https://github.com/rust-lang/mdBook/pull/1306 is merged and released
# - name: Setup mdBook
# uses: peaceiris/actions-mdbook@v1
# with:
# mdbook-version: "0.4.10"
# NOTE: Delete when the previous one is enabled
- name: Setup mdBook
run: |
cargo install mdbook --git https://github.com/Demonthos/mdBook.git --branch master
- uses: actions/checkout@v4
- name: Build
run: cd docs &&
cd guide && mdbook build -d ../nightly/guide && cd .. &&
cd router && mdbook build -d ../nightly/router && cd ..
# cd reference && mdbook build -d ../nightly/reference && cd .. &&
# cd fermi && mdbook build -d ../nightly/fermi && cd ..
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@v4.5.0
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs/nightly # The folder the action should deploy.
target-folder: docs
repository-name: dioxuslabs/docsite
clean: false
token: ${{ secrets.DEPLOY_KEY }} # let's pretend I don't need it for now

View file

@ -1,38 +0,0 @@
name: Deploy Nightly Docs
on:
push:
branches:
- master
jobs:
deploy:
name: Build & Deploy
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- run: sudo apt-get update
- run: sudo apt install libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev
- uses: dtolnay/rust-toolchain@nightly
with:
toolchain: nightly-2024-02-01
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
- uses: ilammy/setup-nasm@v1
- name: cargo doc
run: cargo doc --no-deps --workspace --all-features
- name: Deploy
uses: JamesIves/github-pages-deploy-action@v4.5.0
with:
branch: gh-pages
folder: target/doc
target-folder: api-docs/nightly
repository-name: dioxuslabs/docsite
clean: false
token: ${{ secrets.DEPLOY_KEY }}

View file

@ -1,9 +1,13 @@
# Whenever an open PR is updated, the workflow will be triggered
#
# This can get expensive, so we do a lot of caching and checks to prevent unnecessary runs
name: Rust CI
on:
push:
branches:
- master
- main
paths:
- packages/**
- examples/**
@ -17,7 +21,7 @@ on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
branches:
- master
- main
paths:
- packages/**
- examples/**
@ -48,7 +52,7 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: ilammy/setup-nasm@v1
- run: cargo check --all --examples --tests --all-features --all-targets
@ -64,7 +68,7 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: ilammy/setup-nasm@v1
- uses: davidB/rust-cargo-make@v1
- uses: browser-actions/setup-firefox@latest
@ -75,7 +79,6 @@ jobs:
large-packages: false
docker-images: false
swap-storage: false
- run: cargo make tests
fmt:
@ -91,7 +94,7 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- run: cargo fmt --all -- --check
clippy:
@ -109,9 +112,36 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- run: cargo clippy --workspace --examples --tests --all-features --all-targets -- -D warnings
# Only run semver checks if the PR is not a draft and does not have the breaking label
# Breaking PRs don't need to follow semver since they are breaking changes
# However, this means we won't attempt to backport them, so you should be careful about using this label, as it will
# likely make future backporting difficult
#
# todo: fix this so even if breaking changes have been merged, the fix can be backported
#
# This will stop working once the first breaking change has been merged, so we should really try to just backport the fix
# and *then* run the semver checks. Basically "would backporting this PR cause a breaking change on stable?"
#
# semver:
# if: github.event.pull_request.draft == false && !contains(github.event.pull_request.labels.*.name, 'breaking')
# name: Semver Check
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v4
# - uses: dtolnay/rust-toolchain@stable
# - uses: Swatinem/rust-cache@v2
# with:
# cache-all-crates: "true"
# save-if: ${{ github.ref == 'refs/heads/main' }}
# - name: Check semver
# uses: obi1kenobi/cargo-semver-checks-action@v2
# with:
# manifest-path: ./Cargo.toml
# exclude: "dioxus-cli, dioxus-ext"
playwright:
if: github.event.pull_request.draft == false
name: Playwright Tests
@ -137,7 +167,7 @@ jobs:
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: Install dependencies
run: npm ci
@ -230,29 +260,8 @@ jobs:
with:
key: "${{ matrix.platform.target }}"
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/master' }}
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: test
run: |
${{ env.RUST_CARGO_COMMAND }} ${{ matrix.platform.command }} ${{ matrix.platform.args }} --target ${{ matrix.platform.target }}
# Coverage is disabled until we can fix it
# coverage:
# name: Coverage
# runs-on: ubuntu-latest
# container:
# image: xd009642/tarpaulin:develop-nightly
# options: --security-opt seccomp=unconfined
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# - name: Generate code coverage
# run: |
# apt-get update &&\
# apt-get install build-essential &&\
# apt install libwebkit2gtk-4.0-dev libgtk-3-dev libayatana-appindicator3-dev -y &&\
# cargo +nightly tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml
# - name: Upload to codecov.io
# uses: codecov/codecov-action@v2
# with:
# fail_ci_if_error: false

92
.github/workflows/merge.yml vendored Normal file
View file

@ -0,0 +1,92 @@
# Runs whenever a PR is merged:
# - attempt to backports fixes
# - upload nightly docs
#
# Future:
# - upload nightly CLI builds
# - upload nightly vscode extension
# - upload benchmarks
# - compute coverge
#
# Note that direct commits to master circumvent this workflow!
name: Backport merged pull request
on:
pull_request_target:
types: [closed]
permissions:
contents: write # so it can comment
pull-requests: write # so it can create pull requests
jobs:
# Attempt to backport a merged pull request to the latest stable release
backport:
name: Backport pull request
runs-on: ubuntu-latest
# Don't run on closed unmerged pull requests, or pull requests with the "breaking" label
if: github.event.pull_request.merged && !contains(github.event.pull_request.labels.*.name, 'breaking')
steps:
- uses: actions/checkout@v4
- name: Create backport pull requests
uses: korthout/backport-action@v2
# Upload nightly docs to the website
docs:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- run: sudo apt-get update
- run: sudo apt install libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev
- uses: dtolnay/rust-toolchain@nightly
with:
toolchain: nightly-2024-02-01
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
save-if: ${{ github.ref == 'refs/heads/main' }}
- uses: ilammy/setup-nasm@v1
- name: cargo doc
run: cargo doc --no-deps --workspace --all-features
- name: Deploy
uses: JamesIves/github-pages-deploy-action@v4.5.0
with:
branch: gh-pages
folder: target/doc
target-folder: api-docs/nightly
repository-name: dioxuslabs/docsite
clean: false
token: ${{ secrets.DEPLOY_KEY }}
# Attempt to backport a merged pull request to the latest stable release
#
# If the backported PR is succesfully merged
# Any PR without the "breaking" label will be attempted to be backported to the latest stable release
# Coverage is disabled until we can fix it
# coverage:
# name: Coverage
# runs-on: ubuntu-latest
# container:
# image: xd009642/tarpaulin:develop-nightly
# options: --security-opt seccomp=unconfined
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# - name: Generate code coverage
# run: |
# apt-get update &&\
# apt-get install build-essential &&\
# apt install libwebkit2gtk-4.0-dev libgtk-3-dev libayatana-appindicator3-dev -y &&\
# cargo +nightly tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml
# - name: Upload to codecov.io
# uses: codecov/codecov-action@v2
# with:
# fail_ci_if_error: false

48
.github/workflows/promote.yml vendored Normal file
View file

@ -0,0 +1,48 @@
# Promote the current main branch to a stable release.
# This will not actually release anything, so you need to run the release workflow after this.
#
# IE if the current master version is 0.4.0-rc.7, this will create a PR to promote it to 0.4.0
#
# - update the version in the Cargo.toml to v0.4.0
# - generate a v0.4 branch
# - push the branch to the repository
# - then bump 0.4.0-rc.1 to 0.5.0-rc.0
#
# This means main will never be a "stable" release, and we can always merge breaking changes to main
# and backport them to the latest stable release
#
# This is configured to be ran manually, but could honestly just be a release workflow
name: Promote main to stable branch
on:
workflow_dispatch:
permissions:
actions: write
jobs:
promote:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Publish the next pre-release
run: |
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# go from eg 0.4.0-rc.7 to 0.4.0, committing the change
cargo workspaces version -y minor
# create a new branch for the release
RELEASE_BRANCH=$(cargo metadata --no-deps --format-version 1 | jq -r '.packages[0].version')
RELEASE_BRANCH=v$(echo $RELEASE_BRANCH | sed 's/\.[0-9]*$//')
git branch $RELEASE_BRANCH
# go from 0.4.0 to 0.5.0-rc.0
cargo workspaces version -y preminor --pre-id rc
# push the new branch to the repository
git push origin $RELEASE_BRANCH
# push the new version to the repository
git push origin main

155
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,155 @@
# Release workflow
#
# We parallelize builds, dump all the artifacts into a release, and then publish the release
# This guarantees everything is properly built and cached in case anything goes wrong
#
# The artifacts also need to get pushed to the various places
# - the CLI goes to the releases page for binstall
# - the extension goes to the marketplace
# - the docs go to the website
#
# We need to be aware of the channel we're releasing
# - prerelease is master
# - stable is whatever the latest stable release is (ie 0.4 or 0.5 or 0.6 etc)
#
# It's intended that this workflow is run manually, and only when we're ready to release
name: Publish
on:
workflow_dispatch:
inputs:
channel:
type: choice
name: "Branch to publish"
required: true
description: Choose the branch to publish.
options:
- main
- v0.4
- v0.5
- v0.6
env:
# make sure we have the right version
# main is always a prepatch until we hit 1.0, and then this script needs to be updated
# note that we need to promote the prepatch to a minor bump when we actually do a release
# this means the version in git will always be one minor bump ahead of the actual release - basically meaning once
# we release a version, it's fair game to merge breaking changes to main since all semver-compatible changes will be
# backported automatically
SEMVER: ${{ github.event.inputs.channel == 'main' && 'prerelease' || 'patch' }}
PRERELEASE_TAG: ${{ github.event.inputs.channel == 'main' && '-pre' || '' }}
jobs:
# First, run checks (clippy, tests, etc) and then publish the crates to crates.io
release-crates:
steps:
# Checkout the right branch, and the nightly stuff
- uses: actions/checkout@v4
ref: ${{ github.event.inputs.channel }}
- run: sudo apt-get update
- run: sudo apt install libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev
- uses: dtolnay/rust-toolchain@nightly
with:
toolchain: nightly-2024-02-01
- uses: Swatinem/rust-cache@v2
with:
cache-all-crates: "true"
- uses: ilammy/setup-nasm@v1
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@v1.3.1
with: # speed things up a bit
large-packages: false
docker-images: false
swap-storage: false
# Just make sure clippy is happy before doing anything else
# Don't publish versions with clippy errors!
- name: Clippy
run: cargo clippy --workspace --all --examples --tests --all-features --all-targets -- -D warnings
# Build the docs here too before publishing, to ensure they're up to date
- name: cargo doc
run: RUSTDOCFLAGS="--cfg docsrs" cargo doc --no-deps --workspace --all-features
- name: Publish to crates.io
run: |
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
cargo workspaces version -y ${{ env.SEMVER }} --pre-id rc --no-git-commit
# todo: actually just publish!
# cargo workspaces publish -y ${{ github.event.inputs.semver }}
# this will be more useful when we publish the website with updated docs
# Build the docs.rs docs and publish them to the website under the right folder
# v0.4.x -> docs/0.4
# v0.5.x -> docs/0.5 etc
# main -> docs/nightly
# strip the v from the channel, and the .x from the end, and replace main with nightly
# - name: determine docs folder by channel
# id: determine_docs_folder
# run: echo "::set-output name=folder::$(echo ${{ github.event.inputs.channel }} | sed 's/v//g' | sed 's/\.x//g' | sed 's/main/nightly/g')"
# Build the CLI for all platforms, uploading the artifacts to our releases page
release-cli:
needs: release-crates
permissions:
contents: write
strategy:
matrix:
include:
- target: x86_64-pc-windows-msvc
os: windows-latest
- target: aaarch64-pc-windows-msvc
os: windows-latest
- target: x86_64-apple-darwin
os: macos-latest
- target: aarch64-apple-darwin
os: macos-latest
- target: x86_64-unknown-linux-gnu
os: ubuntu-latest
- target: aarch64-unknown-linux-gnu
os: ubuntu-latest
runs-on: ${{ matrix.platform.os }}
steps:
- name: Checkout
uses: actions/checkout@v4
ref: ${{ github.event.inputs.channel }}
- name: Install stable
uses: dtolnay/rust-toolchain@master
with:
toolchain: "1.70.0"
targets: ${{ matrix.platform.target }}
- uses: ilammy/setup-nasm@v1
- name: Setup cache
uses: Swatinem/rust-cache@v2
with:
workspaces: packages/cli -> ../../target
- name: Free Disk Space
uses: jlumbroso/free-disk-space@v1.3.1
with: # speed things up a bit
large-packages: false
docker-images: false
swap-storage: false
# Todo: we want `cargo install dx` to actually just use a prebuilt binary instead of building it
- name: Build and upload CLI binaries
uses: taiki-e/upload-rust-binary-action@v1
with:
bin: dx
token: ${{ secrets.GITHUB_TOKEN }}
target: ${{ matrix.platform.target }}
archive: dx-${{ matrix.platform.target }}${{ env.PRERELEASE_TAG }}
checksum: sha256
manifest_path: packages/cli/Cargo.toml
# todo: these things
# Run benchmarks, which we'll use to display on the website
# release-benchmarks:
# Build the vscode extension, uploading the artifact to the marketplace
# release-extension:

View file

@ -1,37 +0,0 @@
name: Clear cache
on:
workflow_dispatch:
permissions:
actions: write
jobs:
clear-cache:
runs-on: ubuntu-latest
steps:
- name: Clear cache
uses: actions/github-script@v7
with:
github-token: ${{ secrets.cache_controller }}
script: |
console.log("About to clear")
while (true) {
const caches = await github.rest.actions.getActionsCacheList({
owner: context.repo.owner,
repo: context.repo.repo
})
if (caches.data.actions_caches.length === 0) {
break
}
for (const cache of caches.data.actions_caches) {
console.log(cache)
github.rest.actions.deleteActionsCacheById({
owner: context.repo.owner,
repo: context.repo.repo,
cache_id: cache.id,
})
}
}
console.log("Clear completed")

450
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -51,40 +51,41 @@ members = [
exclude = ["examples/mobile_demo", "examples/openid_connect_demo",]
[workspace.package]
version = "0.4.3"
version = "0.5.0-alpha.0"
# dependencies that are shared across packages
[workspace.dependencies]
dioxus = { path = "packages/dioxus", version = "0.4.0" }
dioxus-lib = { path = "packages/dioxus-lib", version = "0.4.0" }
dioxus-core = { path = "packages/core", version = "0.4.2" }
dioxus-core-macro = { path = "packages/core-macro", version = "0.4.0" }
dioxus-config-macro = { path = "packages/config-macro", version = "0.4.0" }
dioxus-router = { path = "packages/router", version = "0.4.1" }
dioxus-router-macro = { path = "packages/router-macro", version = "0.4.1" }
dioxus-html = { path = "packages/html", version = "0.4.0" }
dioxus-html-internal-macro = { path = "packages/html-internal-macro", version = "0.4.0" }
dioxus-hooks = { path = "packages/hooks", version = "0.4.0" }
dioxus-web = { path = "packages/web", version = "0.4.0" }
dioxus-ssr = { path = "packages/ssr", version = "0.4.0", default-features = false }
dioxus-desktop = { path = "packages/desktop", version = "0.4.0" }
dioxus-mobile = { path = "packages/mobile", version = "0.4.0" }
dioxus-interpreter-js = { path = "packages/interpreter", version = "0.4.0" }
dioxus-liveview = { path = "packages/liveview", version = "0.4.0" }
dioxus-autofmt = { path = "packages/autofmt", version = "0.4.0" }
dioxus-check = { path = "packages/check", version = "0.4.0" }
dioxus-rsx = { path = "packages/rsx", version = "0.4.0" }
dioxus-tui = { path = "packages/dioxus-tui", version = "0.4.0" }
plasmo = { path = "packages/plasmo", version = "0.4.0" }
dioxus-native-core = { path = "packages/native-core", version = "0.4.0" }
dioxus-native-core-macro = { path = "packages/native-core-macro", version = "0.4.0" }
rsx-rosetta = { path = "packages/rsx-rosetta", version = "0.4.0" }
dioxus-signals = { path = "packages/signals" }
dioxus-cli-config = { path = "packages/cli-config", version = "0.4.1" }
generational-box = { path = "packages/generational-box", version = "0.4.3" }
dioxus-hot-reload = { path = "packages/hot-reload", version = "0.4.0" }
dioxus-fullstack = { path = "packages/fullstack", version = "0.4.1" }
dioxus_server_macro = { path = "packages/server-macro", version = "0.4.1" }
dioxus = { path = "packages/dioxus", version = "0.5.0-alpha.0" }
dioxus-lib = { path = "packages/dioxus-lib", version = "0.5.0-alpha.0" }
dioxus-core = { path = "packages/core", version = "0.5.0-alpha.0" }
dioxus-core-macro = { path = "packages/core-macro", version = "0.5.0-alpha.0" }
dioxus-config-macro = { path = "packages/config-macro", version = "0.5.0-alpha.0" }
dioxus-router = { path = "packages/router", version = "0.5.0-alpha.0" }
dioxus-router-macro = { path = "packages/router-macro", version = "0.5.0-alpha.0" }
dioxus-html = { path = "packages/html", version = "0.5.0-alpha.0" }
dioxus-html-internal-macro = { path = "packages/html-internal-macro", version = "0.5.0-alpha.0" }
dioxus-hooks = { path = "packages/hooks", version = "0.5.0-alpha.0" }
dioxus-web = { path = "packages/web", version = "0.5.0-alpha.0" }
dioxus-ssr = { path = "packages/ssr", version = "0.5.0-alpha.0", default-features = false }
dioxus-desktop = { path = "packages/desktop", version = "0.5.0-alpha.0" }
dioxus-mobile = { path = "packages/mobile", version = "0.5.0-alpha.0" }
dioxus-interpreter-js = { path = "packages/interpreter", version = "0.5.0-alpha.0" }
dioxus-liveview = { path = "packages/liveview", version = "0.5.0-alpha.0" }
dioxus-autofmt = { path = "packages/autofmt", version = "0.5.0-alpha.0" }
dioxus-check = { path = "packages/check", version = "0.5.0-alpha.0" }
dioxus-rsx = { path = "packages/rsx", version = "0.5.0-alpha.0" }
dioxus-tui = { path = "packages/dioxus-tui", version = "0.5.0-alpha.0" }
plasmo = { path = "packages/plasmo", version = "0.5.0-alpha.0" }
dioxus-native-core = { path = "packages/native-core", version = "0.5.0-alpha.0" }
dioxus-native-core-macro = { path = "packages/native-core-macro", version = "0.5.0-alpha.0" }
rsx-rosetta = { path = "packages/rsx-rosetta", version = "0.5.0-alpha.0" }
dioxus-signals = { path = "packages/signals", version = "0.5.0-alpha.0" }
dioxus-cli-config = { path = "packages/cli-config", version = "0.5.0-alpha.0" }
generational-box = { path = "packages/generational-box", version = "0.5.0-alpha.0" }
dioxus-hot-reload = { path = "packages/hot-reload", version = "0.5.0-alpha.0" }
dioxus-fullstack = { path = "packages/fullstack", version = "0.5.0-alpha.0" }
dioxus_server_macro = { path = "packages/server-macro", version = "0.5.0-alpha.0", default-features = false}
dioxus-ext = { path = "packages/extension", version = "0.4.0" }
tracing = "0.1.37"
tracing-futures = "0.2.5"
toml = "0.8"
@ -99,16 +100,16 @@ thiserror = "1.0.40"
prettyplease = { package = "prettier-please", version = "0.2", features = [
"verbatim",
] }
manganis-cli-support = { git = "https://github.com/DioxusLabs/collect-assets", rev = "f982698", features = [
manganis-cli-support = { version = "0.1.0", features = [
"webp",
"html",
] }
manganis = { git = "https://github.com/DioxusLabs/collect-assets", rev = "f982698" }
manganis = { version = "0.1.0" }
lru = "0.12.2"
async-trait = "0.1.77"
axum = "0.7.0"
axum-server = "0.6.0"
axum-server = {version = "0.6.0", default-features = false}
tower = "0.4.13"
http = "1.0.0"
tower-http = "0.5.1"
@ -125,7 +126,7 @@ reqwest = "0.11.24"
# It is not meant to be published, but is used so "cargo run --example XYZ" works properly
[package]
name = "dioxus-examples"
version = "0.4.3"
version = "0.5.0-alpha.0"
authors = ["Jonathan Kelley"]
edition = "2021"
description = "Top level crate for the Dioxus repository"
@ -192,4 +193,4 @@ required-features = ["http"]
[[example]]
name = "image_generator_openai"
required-features = ["http"]
required-features = ["http"]

View file

@ -8,8 +8,8 @@ publish = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
dioxus = { path = "../../packages/dioxus", version = "*" }
dioxus-web = { path = "../../packages/web", version = "*" }
dioxus = { workspace = true }
dioxus-web = { workspace = true }
log = "0.4.6"

View file

@ -1,7 +1,7 @@
//! A simple example that shows how to use the use_future hook to run a background task.
//!
//! use_future assumes your future will never complete - it won't return a value.
//! If you want to return a value, use use_resource instead.
//! use_future won't return a value, analagous to use_effect.
//! If you want to return a value from a future, use use_resource instead.
use dioxus::prelude::*;
use std::time::Duration;

1
examples/tailwind/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
dist

View file

@ -1,6 +1,6 @@
[package]
name = "dioxus-cli-config"
version = "0.4.1"
version = { workspace = true }
authors = ["Jonathan Kelley"]
edition = "2021"
description = "Configuration for the Dioxus CLI"

View file

@ -1,6 +1,6 @@
[package]
name = "dioxus-cli"
version = "0.4.3"
version = { workspace = true }
authors = ["Jonathan Kelley"]
edition = "2021"
description = "CLI tool for developing, testing, and publishing Dioxus apps"

View file

@ -2,7 +2,9 @@
use dioxus::prelude::*;
use dioxus_core::{AttributeValue, DynamicNode, NoOpMutations, VComponent, VNode, *};
use std::{cfg, collections::HashSet, default::Default};
use std::{
cfg, collections::HashSet, default::Default, sync::atomic::AtomicUsize, sync::atomic::Ordering,
};
fn random_ns() -> Option<&'static str> {
let namespace = rand::random::<u8>() % 2;
@ -220,20 +222,14 @@ fn create_random_dynamic_attr() -> Attribute {
)
}
static mut TEMPLATE_COUNT: usize = 0;
static TEMPLATE_COUNT: AtomicUsize = AtomicUsize::new(0);
fn create_template_location() -> &'static str {
Box::leak(
format!(
"{}{}",
concat!(file!(), ":", line!(), ":", column!(), ":"),
{
unsafe {
let old = TEMPLATE_COUNT;
TEMPLATE_COUNT += 1;
old
}
}
TEMPLATE_COUNT.fetch_add(1, Ordering::Relaxed)
)
.into_boxed_str(),
)

View file

@ -14,8 +14,9 @@ pub(crate) fn check_app_exits(app: fn() -> Element) {
let should_panic = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(true));
let should_panic_clone = should_panic.clone();
std::thread::spawn(move || {
std::thread::sleep(std::time::Duration::from_secs(30));
std::thread::sleep(std::time::Duration::from_secs(60));
if should_panic_clone.load(std::sync::atomic::Ordering::SeqCst) {
eprintln!("App did not exit in time");
std::process::exit(exitcode::SOFTWARE);
}
});
@ -31,7 +32,7 @@ pub(crate) fn check_app_exits(app: fn() -> Element) {
fn mock_event(id: &'static str, value: &'static str) {
use_hook(move || {
spawn(async move {
tokio::time::sleep(std::time::Duration::from_millis(2000)).await;
tokio::time::sleep(std::time::Duration::from_millis(5000)).await;
let js = format!(
r#"

View file

@ -43,6 +43,7 @@ fn handle_edits_code() -> String {
}"#;
let polling_request = format!(
r#"// Poll for requests
window.interpreter = new JSChannel();
window.interpreter.wait_for_request = (headless) => {{
fetch(new Request("{EDITS_PATH}"))
.then(response => {{
@ -50,11 +51,11 @@ fn handle_edits_code() -> String {
.then(bytes => {{
// In headless mode, the requestAnimationFrame callback is never called, so we need to run the bytes directly
if (headless) {{
run_from_bytes(bytes);
window.interpreter.run_from_bytes(bytes);
}}
else {{
requestAnimationFrame(() => {{
run_from_bytes(bytes);
window.interpreter.run_from_bytes(bytes);
}});
}}
window.interpreter.wait_for_request(headless);
@ -74,7 +75,7 @@ fn handle_edits_code() -> String {
interpreter.replace_range(import_start..import_end, "");
}
format!("{interpreter}\nconst config = new InterpreterConfig(true);")
format!("{interpreter}\nconst intercept_link_redirects = true;")
}
static DEFAULT_INDEX: &str = include_str!("./index.html");

View file

@ -1,6 +1,6 @@
[package]
name = "dioxus-ext"
version = "0.1.0"
version = { workspace = true }
edition = "2021"
publish = false
@ -12,6 +12,5 @@ dioxus-autofmt = { workspace = true }
rsx-rosetta = { workspace = true }
html_parser = { workspace = true }
[lib]
crate-type = ["cdylib", "rlib"]

View file

@ -2,14 +2,14 @@
"name": "dioxus",
"displayName": "Dioxus",
"description": "Useful tools for working with Dioxus",
"version": "0.0.2",
"version": "0.4.0",
"publisher": "DioxusLabs",
"private": true,
"license": "MIT",
"icon": "static/icon.png",
"repository": {
"type": "git",
"url": "https://github.com/DioxusLabs/cli"
"url": "https://github.com/DioxusLabs/dioxus"
},
"engines": {
"vscode": "^1.68.1"

View file

@ -13,10 +13,10 @@ resolver = "2"
[dependencies]
# server functions
server_fn = { version = "0.6.5", features = ["json", "url", "browser"], default-features = false }
dioxus_server_macro = { workspace = true, version = "0.6.5", default-features = false }
dioxus_server_macro = { workspace = true }
# axum
axum = { workspace = true, features = ["ws", "macros"], default-features = false, optional = true }
axum = { workspace = true, features = ["ws", "macros"], optional = true }
tower-http = { workspace = true, optional = true, features = ["fs", "compression-gzip"] }
dioxus-lib = { workspace = true }
@ -44,7 +44,7 @@ anymap = { version = "0.12.1", optional = true }
serde = "1.0.159"
serde_json = { version = "1.0.95", optional = true }
tokio-stream = { version = "0.1.12", features = ["sync"], optional = true }
futures-util = { workspace = true, default-features = false }
futures-util = { workspace = true }
ciborium = "0.2.1"
base64 = "0.21.0"

View file

@ -1,7 +1,7 @@
[package]
name = "generational-box"
authors = ["Evan Almloff"]
version = "0.4.3"
version = { workspace = true }
edition = "2021"
description = "A box backed by a generational runtime"
license = "MIT OR Apache-2.0"

View file

@ -14,6 +14,18 @@ pub fn try_use_context<T: 'static + Clone>() -> Option<T> {
/// Consume some context in the tree, providing a sharable handle to the value
///
/// Does not regenerate the value if the value is changed at the parent.
/// ```rust
/// fn Parent() -> Element {
/// use_context_provider(|| Theme::Dark);
/// rsx! { Child {} }
/// }
/// #[component]
/// fn Child() -> Element {
/// //gets context provided by parent element with use_context_provider
/// let user_theme = use_context::<Theme>();
/// rsx! { "user using dark mode: {user_theme == Theme::Dark}" }
/// }
/// ```
#[must_use]
pub fn use_context<T: 'static + Clone>() -> T {
use_hook(|| consume_context::<T>())
@ -22,6 +34,24 @@ pub fn use_context<T: 'static + Clone>() -> T {
/// Provide some context via the tree and return a reference to it
///
/// Once the context has been provided, it is immutable. Mutations should be done via interior mutability.
/// Context can be read by any child components of the context provider, and is a solution to prop
/// drilling, using a context provider with a Signal inside is a good way to provide global/shared
/// state in your app:
/// ```rust
///fn app() -> Element {
/// use_context_provider(|| Signal::new(0));
/// rsx! { Child {} }
///}
/// // This component does read from the signal, so when the signal changes it will rerun
///#[component]
///fn Child() -> Element {
/// let signal: Signal<i32> = use_context();
/// rsx! {
/// button { onclick: move |_| signal += 1, "increment context" }
/// p {"{signal}"}
/// }
///}
/// ```
pub fn use_context_provider<T: 'static + Clone>(f: impl FnOnce() -> T) -> T {
use_hook(|| {
let val = f();

View file

@ -93,7 +93,8 @@ where
}
/// Get a handle to a coroutine higher in the tree
///
/// Analagous to use_context_provider and use_context,
/// but used for coroutines specifically
/// See the docs for [`use_coroutine`] for more details.
#[must_use]
pub fn use_coroutine_handle<M: 'static>() -> Coroutine<M> {

View file

@ -1,10 +1,22 @@
use dioxus_core::prelude::*;
use dioxus_signals::ReactiveContext;
/// Create a new effect. The effect will be run immediately and whenever any signal it reads changes.
/// The signal will be owned by the current component and will be dropped when the component is dropped.
///
/// `use_effect` will subscribe to any changes in the signal values it captures
/// effects will always run after first mount and then whenever the signal values change
/// If the use_effect call was skipped due to an early return, the effect will no longer activate.
/// ```rust
/// fn app() -> Element {
/// let mut count = use_signal(|| 0);
/// //the effect runs again each time count changes
/// use_effect(move || println!("Count changed to {count}"));
///
/// rsx! {
/// h1 { "High-Five counter: {count}" }
/// button { onclick: move |_| count += 1, "Up high!" }
/// button { onclick: move |_| count -= 1, "Down low!" }
/// }
/// }
/// ```
#[track_caller]
pub fn use_effect(mut callback: impl FnMut() + 'static) {
// let mut run_effect = use_hook(|| CopyValue::new(true));

View file

@ -8,10 +8,34 @@ use dioxus_signals::*;
use dioxus_signals::{Readable, Writable};
use std::future::Future;
/// A hook that allows you to spawn a future
///
/// A hook that allows you to spawn a future.
/// This future will **not** run on the server
/// The future is spawned on the next call to `flush_sync` which means that it will not run on the server.
/// To run a future on the server, you should use `spawn` directly.
/// `use_future` **won't return a value**.
/// If you want to return a value from a future, use `use_resource` instead.
/// ```rust
/// fn app() -> Element {
/// let mut count = use_signal(|| 0);
/// let mut running = use_signal(|| true);
/// // `use_future` will spawn an infinitely running future that can be started and stopped
/// use_future(move || async move {
/// loop {
/// if running() {
/// count += 1;
/// }
/// tokio::time::sleep(Duration::from_millis(400)).await;
/// }
/// });
/// rsx! {
/// div {
/// h1 { "Current count: {count}" }
/// button { onclick: move |_| running.toggle(), "Start/Stop the count"}
/// button { onclick: move |_| count.set(0), "Reset the count" }
/// }
/// }
/// }
/// ```
pub fn use_future<F>(mut future: impl FnMut() -> F + 'static) -> UseFuture
where
F: Future + 'static,

View file

@ -10,8 +10,32 @@ use futures_util::{future, pin_mut, FutureExt};
use std::future::Future;
/// A memo that resolve to a value asynchronously.
/// Unlike `use_future`, `use_resource` runs on the **server**
/// See [`Resource`] for more details.
/// ```rust
///fn app() -> Element {
/// let country = use_signal(|| WeatherLocation {
/// city: "Berlin".to_string(),
/// country: "Germany".to_string(),
/// coordinates: (52.5244, 13.4105)
/// });
///
/// This runs on the server
/// let current_weather = //run a future inside the use_resource hook
/// use_resource(move || async move { get_weather(&country.read().clone()).await });
///
/// rsx! {
/// //the value of the future can be polled to
/// //conditionally render elements based off if the future
/// //finished (Some(Ok(_)), errored Some(Err(_)),
/// //or is still finishing (None)
/// match current_weather.value() {
/// Some(Ok(weather)) => WeatherElement { weather },
/// Some(Err(e)) => p { "Loading weather failed, {e}" }
/// None => p { "Loading..." }
/// }
/// }
///}
/// ```
#[must_use = "Consider using `cx.spawn` to run a future without reading its value"]
pub fn use_resource<T, F>(future: impl Fn() -> F + 'static) -> Resource<T>
where

View file

@ -25,4 +25,4 @@ name = "tests"
path = "tests/progress.rs"
[dev-dependencies]
trybuild = { version = "1.0.82", features = ["diff"] }
trybuild = { version = "1.0.82", features = ["diff"] }

View file

@ -2285,8 +2285,8 @@ trait_methods! {
/// <https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/transform-origin>
transform_origin: "transform-origin";
/// <https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/_type>
r#type: "_type";
/// <https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/type>
r#type: "type";
/// <https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/u1>
u1: "u1";

View file

@ -17,7 +17,7 @@ web-sys = { version = "0.3.56", optional = true, features = [
"Element",
"Node",
] }
sledgehammer_bindgen = { version = "0.3.1", default-features = false, optional = true }
sledgehammer_bindgen = { version = "0.4.0", default-features = false, optional = true }
sledgehammer_utils = { version = "0.2", optional = true }
serde = { version = "1.0", features = ["derive"], optional = true }

View file

@ -1,4 +1,4 @@
export function setAttributeInner(node, field, value, ns) {
this.setAttributeInner = function (node, field, value, ns) {
const name = field;
if (ns === "style") {
// ????? why do we need to do this

View file

@ -0,0 +1,79 @@
export function setAttributeInner(node, field, value, ns) {
const name = field;
if (ns === "style") {
// ????? why do we need to do this
if (node.style === undefined) {
node.style = {};
}
node.style[name] = value;
} else if (!!ns) {
node.setAttributeNS(ns, name, value);
} else {
switch (name) {
case "value":
if (value !== node.value) {
node.value = value;
}
break;
case "initial_value":
node.defaultValue = value;
break;
case "checked":
node.checked = truthy(value);
break;
case "initial_checked":
node.defaultChecked = truthy(value);
break;
case "selected":
node.selected = truthy(value);
break;
case "initial_selected":
node.defaultSelected = truthy(value);
break;
case "dangerous_inner_html":
node.innerHTML = value;
break;
default:
// https://github.com/facebook/react/blob/8b88ac2592c5f555f315f9440cbb665dd1e7457a/packages/react-dom/src/shared/DOMProperty.js#L352-L364
if (!truthy(value) && bool_attrs.hasOwnProperty(name)) {
node.removeAttribute(name);
} else {
node.setAttribute(name, value);
}
}
}
}
const bool_attrs = {
allowfullscreen: true,
allowpaymentrequest: true,
async: true,
autofocus: true,
autoplay: true,
checked: true,
controls: true,
default: true,
defer: true,
disabled: true,
formnovalidate: true,
hidden: true,
ismap: true,
itemscope: true,
loop: true,
multiple: true,
muted: true,
nomodule: true,
novalidate: true,
open: true,
playsinline: true,
readonly: true,
required: true,
reversed: true,
selected: true,
truespeed: true,
webkitdirectory: true,
};
function truthy(val) {
return val === "true" || val === true;
}

View file

@ -1,12 +1,6 @@
class InterpreterConfig {
constructor(intercept_link_redirects) {
this.intercept_link_redirects = intercept_link_redirects;
}
}
// this handler is only provided on the desktop and liveview implementations since this
// method is not used by the web implementation
async function handler(event, name, bubbles, config) {
this.handler = async function (event, name, bubbles) {
let target = event.target;
if (target != null) {
let preventDefaultRequests = null;
@ -17,7 +11,7 @@ async function handler(event, name, bubbles, config) {
if (event.type === "click") {
// todo call prevent default if it's the right type of event
if (config.intercept_link_redirects) {
if (intercept_link_redirects) {
let a_element = target.closest("a");
if (a_element != null) {
event.preventDefault();
@ -35,7 +29,7 @@ async function handler(event, name, bubbles, config) {
const href = a_element.getAttribute("href");
if (href !== "" && href !== null && href !== undefined) {
window.ipc.postMessage(
window.interpreter.serializeIpcMessage("browser_open", { href })
this.serializeIpcMessage("browser_open", { href })
);
}
}
@ -142,7 +136,7 @@ async function handler(event, name, bubbles, config) {
return;
}
window.ipc.postMessage(
window.interpreter.serializeIpcMessage("user_event", {
this.serializeIpcMessage("user_event", {
name: name,
element: parseInt(realId),
data: contents,
@ -223,43 +217,40 @@ class ListenerMap {
delete this.local[id];
}
}
function LoadChild(array) {
this.LoadChild = function (array) {
// iterate through each number and get that child
node = stack[stack.length - 1];
let node = this.stack[this.stack.length - 1];
for (let i = 0; i < array.length; i++) {
end = array[i];
for (node = node.firstChild; end > 0; end--) {
this.end = array[i];
for (node = node.firstChild; this.end > 0; this.end--) {
node = node.nextSibling;
}
}
return node;
}
const listeners = new ListenerMap();
let nodes = [];
let stack = [];
let root;
const templates = {};
let node, els, end, k;
this.listeners = new ListenerMap();
this.nodes = [];
this.stack = [];
this.templates = {};
this.end = null;
function AppendChildren(id, many) {
root = nodes[id];
els = stack.splice(stack.length - many);
for (k = 0; k < many; k++) {
this.AppendChildren = function (id, many) {
let root = this.nodes[id];
let els = this.stack.splice(this.stack.length - many);
for (let k = 0; k < many; k++) {
root.appendChild(els[k]);
}
}
window.interpreter = {}
window.interpreter.initialize = function (root) {
nodes = [root];
stack = [root];
listeners.root = root;
this.initialize = function (root) {
this.nodes = [root];
this.stack = [root];
this.listeners.root = root;
}
window.interpreter.getClientRect = function (id) {
const node = nodes[id];
this.getClientRect = function (id) {
const node = this.nodes[id];
if (!node) {
return;
}
@ -271,8 +262,8 @@ window.interpreter.getClientRect = function (id) {
};
}
window.interpreter.scrollTo = function (id, behavior) {
const node = nodes[id];
this.scrollTo = function (id, behavior) {
const node = this.nodes[id];
if (!node) {
return false;
}
@ -283,8 +274,8 @@ window.interpreter.scrollTo = function (id, behavior) {
}
/// Set the focus on the element
window.interpreter.setFocus = function (id, focus) {
const node = nodes[id];
this.setFocus = function (id, focus) {
const node = this.nodes[id];
if (!node) {
return false;
}
@ -579,7 +570,7 @@ async function serialize_event(event) {
}
}
}
window.interpreter.serializeIpcMessage = function (method, params = {}) {
this.serializeIpcMessage = function (method, params = {}) {
return JSON.stringify({ method, params });
}

View file

@ -20,7 +20,7 @@ pub use write_native_mutations::*;
#[cfg(all(feature = "minimal_bindings", feature = "webonly"))]
pub mod minimal_bindings {
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
#[wasm_bindgen(module = "/src/common.js")]
#[wasm_bindgen(module = "/src/common_exported.js")]
extern "C" {
pub fn setAttributeInner(node: JsValue, name: &str, value: JsValue, ns: Option<&str>);
}

View file

@ -19,7 +19,7 @@ mod js {
this.global = {};
// non bubbling events listen at the element the listener was created at
this.local = {};
this.root = null;
this.root = root;
this.handler = null;
}
@ -65,35 +65,33 @@ mod js {
delete this.local[id];
}
}
function LoadChild(ptr, len) {
this.LoadChild = function(ptr, len) {
// iterate through each number and get that child
node = stack[stack.length - 1];
ptr_end = ptr + len;
let node = this.stack[this.stack.length - 1];
let ptr_end = ptr + len;
for (; ptr < ptr_end; ptr++) {
end = m.getUint8(ptr);
let end = this.m.getUint8(ptr);
for (node = node.firstChild; end > 0; end--) {
node = node.nextSibling;
}
}
return node;
}
const listeners = new ListenerMap();
let nodes = [];
let stack = [];
let root;
const templates = {};
let node, els, end, ptr_end, k;
export function save_template(nodes, tmpl_id) {
templates[tmpl_id] = nodes;
this.listeners = new ListenerMap();
this.nodes = [];
this.stack = [];
this.templates = {};
this.save_template = function(nodes, tmpl_id) {
this.templates[tmpl_id] = nodes;
}
export function hydrate(ids) {
this.hydrate = function (ids) {
const hydrateNodes = document.querySelectorAll('[data-node-hydration]');
for (let i = 0; i < hydrateNodes.length; i++) {
const hydrateNode = hydrateNodes[i];
const hydration = hydrateNode.getAttribute('data-node-hydration');
const split = hydration.split(',');
const id = ids[parseInt(split[0])];
nodes[id] = hydrateNode;
this.nodes[id] = hydrateNode;
if (split.length > 1) {
hydrateNode.listening = split.length - 1;
hydrateNode.setAttribute('data-dioxus-id', id);
@ -102,7 +100,7 @@ mod js {
const split2 = listener.split(':');
const event_name = split2[0];
const bubbles = split2[1] === '1';
listeners.create(event_name, hydrateNode, bubbles);
this.listeners.create(event_name, hydrateNode, bubbles);
}
}
}
@ -115,91 +113,77 @@ mod js {
const id = currentNode.textContent;
const split = id.split('node-id');
if (split.length > 1) {
nodes[ids[parseInt(split[1])]] = currentNode.nextSibling;
this.nodes[ids[parseInt(split[1])]] = currentNode.nextSibling;
}
currentNode = treeWalker.nextNode();
}
}
export function get_node(id) {
return nodes[id];
this.get_node = function(id) {
return this.nodes[id];
}
export function initialize(root, handler) {
listeners.handler = handler;
nodes = [root];
stack = [root];
listeners.root = root;
this.initialize = function(root, handler) {
this.listeners.handler = handler;
this.nodes = [root];
this.stack = [root];
this.listeners.root = root;
}
function AppendChildren(id, many){
root = nodes[id];
els = stack.splice(stack.length-many);
for (k = 0; k < many; k++) {
this.AppendChildren = function (id, many){
let root = this.nodes[id];
let els = this.stack.splice(this.stack.length-many);
for (let k = 0; k < many; k++) {
root.appendChild(els[k]);
}
}
"#;
extern "C" {
#[wasm_bindgen]
pub fn save_template(nodes: Vec<Node>, tmpl_id: u16);
#[wasm_bindgen]
pub fn hydrate(ids: Vec<u32>);
#[wasm_bindgen]
pub fn get_node(id: u32) -> Node;
#[wasm_bindgen]
pub fn initialize(root: Node, handler: &Function);
}
fn mount_to_root() {
"{AppendChildren(root, stack.length-1);}"
"{this.AppendChildren(this.listeners.root, this.stack.length-1);}"
}
fn push_root(root: u32) {
"{stack.push(nodes[$root$]);}"
"{this.stack.push(this.nodes[$root$]);}"
}
fn append_children(id: u32, many: u16) {
"{AppendChildren($id$, $many$);}"
"{this.AppendChildren($id$, $many$);}"
}
fn pop_root() {
"{stack.pop();}"
"{this.stack.pop();}"
}
fn replace_with(id: u32, n: u16) {
"{root = nodes[$id$]; els = stack.splice(stack.length-$n$); if (root.listening) { listeners.removeAllNonBubbling(root); } root.replaceWith(...els);}"
"{const root = this.nodes[$id$]; let els = this.stack.splice(this.stack.length-$n$); if (root.listening) { this.listeners.removeAllNonBubbling(root); } root.replaceWith(...els);}"
}
fn insert_after(id: u32, n: u16) {
"{nodes[$id$].after(...stack.splice(stack.length-$n$));}"
"{this.nodes[$id$].after(...this.stack.splice(this.stack.length-$n$));}"
}
fn insert_before(id: u32, n: u16) {
"{nodes[$id$].before(...stack.splice(stack.length-$n$));}"
"{this.nodes[$id$].before(...this.stack.splice(this.stack.length-$n$));}"
}
fn remove(id: u32) {
"{node = nodes[$id$]; if (node !== undefined) { if (node.listening) { listeners.removeAllNonBubbling(node); } node.remove(); }}"
"{let node = this.nodes[$id$]; if (node !== undefined) { if (node.listening) { this.listeners.removeAllNonBubbling(node); } node.remove(); }}"
}
fn create_raw_text(text: &str) {
"{stack.push(document.createTextNode($text$));}"
"{this.stack.push(document.createTextNode($text$));}"
}
fn create_text_node(text: &str, id: u32) {
"{node = document.createTextNode($text$); nodes[$id$] = node; stack.push(node);}"
"{let node = document.createTextNode($text$); this.nodes[$id$] = node; this.stack.push(node);}"
}
fn create_placeholder(id: u32) {
"{node = document.createElement('pre'); node.hidden = true; stack.push(node); nodes[$id$] = node;}"
"{let node = document.createElement('pre'); node.hidden = true; this.stack.push(node); this.nodes[$id$] = node;}"
}
fn new_event_listener(event_name: &str<u8, evt>, id: u32, bubbles: u8) {
r#"node = nodes[id]; if(node.listening){node.listening += 1;}else{node.listening = 1;} node.setAttribute('data-dioxus-id', `\${id}`); listeners.create($event_name$, node, $bubbles$);"#
r#"let node = this.nodes[id]; if(node.listening){node.listening += 1;}else{node.listening = 1;} node.setAttribute('data-dioxus-id', `\${id}`); this.listeners.create($event_name$, node, $bubbles$);"#
}
fn remove_event_listener(event_name: &str<u8, evt>, id: u32, bubbles: u8) {
"{node = nodes[$id$]; node.listening -= 1; node.removeAttribute('data-dioxus-id'); listeners.remove(node, $event_name$, $bubbles$);}"
"{let node = this.nodes[$id$]; node.listening -= 1; node.removeAttribute('data-dioxus-id'); this.listeners.remove(node, $event_name$, $bubbles$);}"
}
fn set_text(id: u32, text: &str) {
"{nodes[$id$].textContent = $text$;}"
"{this.nodes[$id$].textContent = $text$;}"
}
fn set_attribute(id: u32, field: &str<u8, attr>, value: &str, ns: &str<u8, ns_cache>) {
"{node = nodes[$id$]; setAttributeInner(node, $field$, $value$, $ns$);}"
"{let node = this.nodes[$id$]; this.setAttributeInner(node, $field$, $value$, $ns$);}"
}
fn remove_attribute(id: u32, field: &str<u8, attr>, ns: &str<u8, ns_cache>) {
r#"{
node = nodes[$id$];
let node = this.nodes[$id$];
if (!ns) {
switch (field) {
case "value":
@ -226,29 +210,54 @@ mod js {
}"#
}
fn assign_id(ptr: u32, len: u8, id: u32) {
"{nodes[$id$] = LoadChild($ptr$, $len$);}"
"{this.nodes[$id$] = this.LoadChild($ptr$, $len$);}"
}
fn hydrate_text(ptr: u32, len: u8, value: &str, id: u32) {
r#"{
node = LoadChild($ptr$, $len$);
if (node.nodeType == Node.TEXT_NODE) {
let node = this.LoadChild($ptr$, $len$);
if (node.nodeType == node.TEXT_NODE) {
node.textContent = value;
} else {
let text = document.createTextNode(value);
node.replaceWith(text);
node = text;
}
nodes[$id$] = node;
this.nodes[$id$] = node;
}"#
}
fn replace_placeholder(ptr: u32, len: u8, n: u16) {
"{els = stack.splice(stack.length - $n$); node = LoadChild($ptr$, $len$); node.replaceWith(...els);}"
"{els = this.stack.splice(this.stack.length - $n$); let node = this.LoadChild($ptr$, $len$); node.replaceWith(...els);}"
}
fn load_template(tmpl_id: u16, index: u16, id: u32) {
"{node = templates[$tmpl_id$][$index$].cloneNode(true); nodes[$id$] = node; stack.push(node);}"
"{let node = this.templates[$tmpl_id$][$index$].cloneNode(true); this.nodes[$id$] = node; this.stack.push(node);}"
}
}
#[cfg(feature = "webonly")]
#[wasm_bindgen::prelude::wasm_bindgen(inline_js = r#"
export function save_template(channel, nodes, tmpl_id) {
channel.save_template(nodes, tmpl_id);
}
export function hydrate(channel, ids) {
channel.hydrate(ids);
}
export function get_node(channel, id) {
return channel.get_node(id);
}
export function initialize(channel, root, handler) {
channel.initialize(root, handler);
}
"#)]
extern "C" {
pub fn save_template(channel: &JSChannel, nodes: Vec<Node>, tmpl_id: u16);
pub fn hydrate(channel: &JSChannel, ids: Vec<u32>);
pub fn get_node(channel: &JSChannel, id: u32) -> Node;
pub fn initialize(channel: &JSChannel, root: Node, handler: &Function);
}
#[cfg(feature = "binary-protocol")]
pub mod binary_protocol {
use sledgehammer_bindgen::bindgen;
@ -259,61 +268,58 @@ pub mod binary_protocol {
const JS_FILE: &str = "./src/interpreter.js";
const JS_FILE: &str = "./src/common.js";
fn mount_to_root() {
"{AppendChildren(root, stack.length-1);}"
}
fn push_root(root: u32) {
"{stack.push(nodes[$root$]);}"
"{this.stack.push(this.nodes[$root$]);}"
}
fn append_children(id: u32, many: u16) {
"{AppendChildren($id$, $many$);}"
"{this.AppendChildren($id$, $many$);}"
}
fn append_children_to_top(many: u16) {
"{
root = stack[stack.length-many-1];
els = stack.splice(stack.length-many);
for (k = 0; k < many; k++) {
let root = this.stack[this.stack.length-many-1];
let els = this.stack.splice(this.stack.length-many);
for (let k = 0; k < many; k++) {
root.appendChild(els[k]);
}
}"
}
fn pop_root() {
"{stack.pop();}"
"{this.stack.pop();}"
}
fn replace_with(id: u32, n: u16) {
"{root = nodes[$id$]; els = stack.splice(stack.length-$n$); if (root.listening) { listeners.removeAllNonBubbling(root); } root.replaceWith(...els);}"
"{let root = this.nodes[$id$]; let els = this.stack.splice(this.stack.length-$n$); if (root.listening) { this.listeners.removeAllNonBubbling(root); } root.replaceWith(...els);}"
}
fn insert_after(id: u32, n: u16) {
"{nodes[$id$].after(...stack.splice(stack.length-$n$));}"
"{this.nodes[$id$].after(...this.stack.splice(this.stack.length-$n$));}"
}
fn insert_before(id: u32, n: u16) {
"{nodes[$id$].before(...stack.splice(stack.length-$n$));}"
"{this.nodes[$id$].before(...this.stack.splice(this.stack.length-$n$));}"
}
fn remove(id: u32) {
"{node = nodes[$id$]; if (node !== undefined) { if (node.listening) { listeners.removeAllNonBubbling(node); } node.remove(); }}"
"{let node = this.nodes[$id$]; if (node !== undefined) { if (node.listening) { this.listeners.removeAllNonBubbling(node); } node.remove(); }}"
}
fn create_raw_text(text: &str) {
"{stack.push(document.createTextNode($text$));}"
"{this.stack.push(document.createTextNode($text$));}"
}
fn create_text_node(text: &str, id: u32) {
"{node = document.createTextNode($text$); nodes[$id$] = node; stack.push(node);}"
"{let node = document.createTextNode($text$); this.nodes[$id$] = node; this.stack.push(node);}"
}
fn create_element(element: &'static str<u8, el>) {
"{stack.push(document.createElement($element$))}"
"{this.stack.push(document.createElement($element$))}"
}
fn create_element_ns(element: &'static str<u8, el>, ns: &'static str<u8, namespace>) {
"{stack.push(document.createElementNS($ns$, $element$))}"
"{this.stack.push(document.createElementNS($ns$, $element$))}"
}
fn create_placeholder(id: u32) {
"{node = document.createElement('pre'); node.hidden = true; stack.push(node); nodes[$id$] = node;}"
"{let node = document.createElement('pre'); node.hidden = true; this.stack.push(node); this.nodes[$id$] = node;}"
}
fn add_placeholder() {
"{node = document.createElement('pre'); node.hidden = true; stack.push(node);}"
"{let node = document.createElement('pre'); node.hidden = true; this.stack.push(node);}"
}
fn new_event_listener(event: &str<u8, evt>, id: u32, bubbles: u8) {
r#"
bubbles = bubbles == 1;
node = nodes[id];
let node = this.nodes[id];
if(node.listening){
node.listening += 1;
} else {
@ -325,7 +331,7 @@ pub mod binary_protocol {
// if this is a mounted listener, we send the event immediately
if (event_name === "mounted") {
window.ipc.postMessage(
window.interpreter.serializeIpcMessage("user_event", {
this.serializeIpcMessage("user_event", {
name: event_name,
element: id,
data: null,
@ -333,26 +339,26 @@ pub mod binary_protocol {
})
);
} else {
listeners.create(event_name, node, bubbles, (event) => {
handler(event, event_name, bubbles, config);
this.listeners.create(event_name, node, bubbles, (event) => {
this.handler(event, event_name, bubbles);
});
}"#
}
fn remove_event_listener(event_name: &str<u8, evt>, id: u32, bubbles: u8) {
"{node = nodes[$id$]; node.listening -= 1; node.removeAttribute('data-dioxus-id'); listeners.remove(node, $event_name$, $bubbles$);}"
"{let node = this.nodes[$id$]; node.listening -= 1; node.removeAttribute('data-dioxus-id'); this.listeners.remove(node, $event_name$, $bubbles$);}"
}
fn set_text(id: u32, text: &str) {
"{nodes[$id$].textContent = $text$;}"
"{this.nodes[$id$].textContent = $text$;}"
}
fn set_attribute(id: u32, field: &str<u8, attr>, value: &str, ns: &str<u8, ns_cache>) {
"{node = nodes[$id$]; setAttributeInner(node, $field$, $value$, $ns$);}"
"{let node = this.nodes[$id$]; this.setAttributeInner(node, $field$, $value$, $ns$);}"
}
fn set_top_attribute(field: &str<u8, attr>, value: &str, ns: &str<u8, ns_cache>) {
"{setAttributeInner(stack[stack.length-1], $field$, $value$, $ns$);}"
"{this.setAttributeInner(this.stack[this.stack.length-1], $field$, $value$, $ns$);}"
}
fn remove_attribute(id: u32, field: &str<u8, attr>, ns: &str<u8, ns_cache>) {
r#"{
node = nodes[$id$];
let node = this.nodes[$id$];
if (!ns) {
switch (field) {
case "value":
@ -379,29 +385,29 @@ pub mod binary_protocol {
}"#
}
fn assign_id(array: &[u8], id: u32) {
"{nodes[$id$] = LoadChild($array$);}"
"{this.nodes[$id$] = this.LoadChild($array$);}"
}
fn hydrate_text(array: &[u8], value: &str, id: u32) {
r#"{
node = LoadChild($array$);
if (node.nodeType == Node.TEXT_NODE) {
let node = this.LoadChild($array$);
if (node.nodeType == node.TEXT_NODE) {
node.textContent = value;
} else {
let text = document.createTextNode(value);
node.replaceWith(text);
node = text;
}
nodes[$id$] = node;
this.nodes[$id$] = node;
}"#
}
fn replace_placeholder(array: &[u8], n: u16) {
"{els = stack.splice(stack.length - $n$); node = LoadChild($array$); node.replaceWith(...els);}"
"{let els = this.stack.splice(this.stack.length - $n$); let node = this.LoadChild($array$); node.replaceWith(...els);}"
}
fn load_template(tmpl_id: u16, index: u16, id: u32) {
"{node = templates[$tmpl_id$][$index$].cloneNode(true); nodes[$id$] = node; stack.push(node);}"
"{let node = this.templates[$tmpl_id$][$index$].cloneNode(true); this.nodes[$id$] = node; this.stack.push(node);}"
}
fn add_templates(tmpl_id: u16, len: u16) {
"{templates[$tmpl_id$] = stack.splice(stack.length-$len$);}"
"{this.templates[$tmpl_id$] = this.stack.splice(this.stack.length-$len$);}"
}
}
}

View file

@ -1,4 +1,4 @@
const config = new InterpreterConfig(false);
const intercept_link_redirects = false;
function main() {
let root = window.document.getElementById("main");
@ -9,6 +9,7 @@ function main() {
class IPC {
constructor(root) {
window.interpreter = new JSChannel();
window.interpreter.initialize(root);
const ws = new WebSocket(WS_ADDR);
ws.binaryType = "arraybuffer";
@ -34,7 +35,7 @@ class IPC {
// The first byte tells the shim if this is a binary of text frame
if (binaryFrame) {
// binary frame
run_from_bytes(messageData);
window.interpreter.run_from_bytes(messageData);
}
else {
// text frame

View file

@ -559,6 +559,7 @@ impl RouteEnum {
#(#type_defs)*
#[allow(non_camel_case_types)]
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, PartialEq)]
pub enum #match_error_name {
#(#error_variants),*

View file

@ -309,6 +309,7 @@ pub(crate) fn create_error_type(
quote! {
#[allow(non_camel_case_types)]
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, PartialEq)]
pub enum #error_name {
ExtraSegments(String),

View file

@ -1,7 +1,7 @@
[package]
name = "dioxus-signals"
authors = ["Jonathan Kelley"]
version = "0.4.3"
authors = ["Jonathan Kelley", "Evan Almloff"]
version = { workspace = true }
edition = "2021"
description = "Signals for Dioxus"
license = "MIT OR Apache-2.0"

View file

@ -39,7 +39,9 @@ pub trait Readable {
MappedSignal::new(try_read, peek)
}
/// Get the current value of the state. If this is a signal, this will subscribe the current scope to the signal. If the value has been dropped, this will panic.
/// Get the current value of the state. If this is a signal, this will subscribe the current scope to the signal.
/// If the value has been dropped, this will panic. Calling this on a Signal is the same as
/// using the signal() syntax to read and subscribe to its value
#[track_caller]
fn read(&self) -> ReadableRef<Self> {
self.try_read().unwrap()

View file

@ -20,7 +20,7 @@ tracing = { workspace = true }
http = { workspace = true }
async-trait = { workspace = true }
serde_json = { workspace = true }
chrono = { verison = "0.4.34", optional = true }
chrono = { version = "0.4.34", optional = true }
[target.'cfg(target_arch = "wasm32")'.dependencies]
tokio = { version = "1.28", features = ["io-util"], optional = true }

View file

@ -110,6 +110,7 @@ impl WebsysDom {
}));
dioxus_interpreter_js::initialize(
interpreter.js_channel(),
root.clone().unchecked_into(),
handler.as_ref().unchecked_ref(),
);

View file

@ -62,7 +62,7 @@ impl WebsysDom {
// Now that we've flushed the edits and the dom nodes exist, we can send the mounted events.
{
for id in self.queued_mounted_events.drain(..) {
let node = get_node(id.0 as u32);
let node = get_node(self.interpreter.js_channel(), id.0 as u32);
if let Some(element) = node.dyn_ref::<web_sys::Element>() {
let _ = self.event_channel.unbounded_send(UiEvent {
name: "mounted".to_string(),
@ -91,7 +91,7 @@ impl WriteMutations for WebsysDom {
self.templates
.insert(template.name.to_owned(), self.max_template_id);
save_template(roots, self.max_template_id);
save_template(self.interpreter.js_channel(), roots, self.max_template_id);
self.max_template_id += 1
}

View file

@ -23,7 +23,7 @@ impl WebsysDom {
// Recursively rehydrate the dom from the VirtualDom
self.rehydrate_scope(root_scope, dom, &mut ids, &mut to_mount)?;
dioxus_interpreter_js::hydrate(ids);
dioxus_interpreter_js::hydrate(self.interpreter.js_channel(), ids);
#[cfg(feature = "mounted")]
for id in to_mount {
@ -168,7 +168,11 @@ impl WriteMutations for OnlyWriteTemplates<'_> {
self.0
.templates
.insert(template.name.to_owned(), self.0.max_template_id);
save_template(roots, self.0.max_template_id);
save_template(
self.0.interpreter.js_channel(),
roots,
self.0.max_template_id,
);
self.0.max_template_id += 1
}