Merge branch 'master' into optimize-remove_attribute

This commit is contained in:
Evan Almloff 2023-07-20 10:56:38 -07:00
commit 09f1899539
43 changed files with 669 additions and 493 deletions

View file

@ -81,6 +81,7 @@ futures-util = { version = "0.3", default-features = false }
rustc-hash = "1.1.0"
wasm-bindgen = "0.2.87"
html_parser = "0.7.0"
thiserror = "1.0.40"
# This is a "virtual package"
# It is not meant to be published, but is used so "cargo run --example XYZ" works properly
@ -117,6 +118,6 @@ rand = { version = "0.8.4", features = ["small_rng"] }
tokio = { version = "1.16.1", features = ["full"] }
reqwest = { version = "0.11.9", features = ["json"] }
fern = { version = "0.6.0", features = ["colored"] }
thiserror = "1.0.30"
env_logger = "0.10.0"
simple_logger = "4.0.0"
thiserror = { workspace = true }

View file

@ -3,7 +3,7 @@ name = "dioxus-guide"
version = "0.0.1"
edition = "2021"
description = "Dioxus guide, including testable examples"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
publish = false
[dev-dependencies]

View file

@ -3,7 +3,7 @@ name = "dioxus-router-guide"
version = "0.0.1"
edition = "2021"
description = "Dioxus router guide, including testable examples"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
publish = false
[dev-dependencies]

View file

@ -4,7 +4,7 @@ version = "0.3.0"
edition = "2021"
authors = ["Jonathan Kelley"]
description = "Autofomatter for Dioxus RSX"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]
@ -14,9 +14,9 @@ keywords = ["dom", "ui", "gui", "react"]
dioxus-rsx = { workspace = true }
proc-macro2 = { version = "1.0.6", features = ["span-locations"] }
quote = "1.0"
syn = { version = "1.0.11", features = ["full", "extra-traits", "visit"] }
syn = { version = "2.0", features = ["full", "extra-traits", "visit"] }
serde = { version = "1.0.136", features = ["derive"] }
prettyplease = { package = "prettier-please", version = "0.1.16", features = [
prettyplease = { package = "prettier-please", version = "0.2", features = [
"verbatim",
] }

View file

@ -86,7 +86,8 @@ pub fn fmt_file(contents: &str) -> Vec<FormattedBlock> {
MacroDelimiter::Paren(b) => b.span,
MacroDelimiter::Brace(b) => b.span,
MacroDelimiter::Bracket(b) => b.span,
};
}
.join();
let mut formatted = String::new();

View file

@ -5,12 +5,12 @@ authors = ["Jonathan Kelley"]
edition = "2021"
description = "CLI tool for developing, testing, and publishing Dioxus apps"
repository = "https://github.com/DioxusLabs/dioxus/"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
[dependencies]
# cli core
clap = { version = "4.2", features = ["derive"] }
thiserror = "1.0.30"
thiserror = { workspace = true }
wasm-bindgen-cli-support = "0.2"
colored = "2.0.0"
@ -57,7 +57,7 @@ flate2 = "1.0.22"
tar = "0.4.38"
zip = "0.6.2"
tower = "0.4.12"
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
proc-macro2 = { version = "1.0", features = ["span-locations"] }
lazy_static = "1.4.0"

View file

@ -4,7 +4,7 @@ version = "0.3.0"
authors = ["Jonathan Kelley"]
edition = "2021"
description = "Core macro for Dioxus Virtual DOM"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]
@ -15,7 +15,7 @@ proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0" }
quote = "1.0"
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
dioxus-rsx = { workspace = true }
# testing

View file

@ -135,7 +135,7 @@ impl ToTokens for InlinePropsBody {
quote! { <#struct_generics> },
)
} else {
let lifetime: LifetimeDef = parse_quote! { 'a };
let lifetime: LifetimeParam = parse_quote! { 'a };
let mut fn_generics = generics.clone();
fn_generics

View file

@ -166,8 +166,9 @@ mod field_info {
use crate::props::type_from_inside_option;
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::Error;
use syn::spanned::Spanned;
use syn::Expr;
use syn::{parse::Error, punctuated::Punctuated};
use super::util::{
expr_to_single_string, ident_to_type, path_to_single_string, strip_raw_ident_prefix,
@ -260,31 +261,32 @@ mod field_info {
pub fn with(mut self, attrs: &[syn::Attribute]) -> Result<Self, Error> {
let mut skip_tokens = None;
for attr in attrs {
if path_to_single_string(&attr.path).as_deref() != Some("props") {
if path_to_single_string(attr.path()).as_deref() != Some("props") {
continue;
}
if attr.tokens.is_empty() {
continue;
}
let as_expr: syn::Expr = syn::parse2(attr.tokens.clone())?;
match as_expr {
syn::Expr::Paren(body) => {
self.apply_meta(*body.expr)?;
}
syn::Expr::Tuple(body) => {
for expr in body.elems.into_iter() {
self.apply_meta(expr)?;
match &attr.meta {
syn::Meta::List(list) => {
if list.tokens.is_empty() {
continue;
}
}
_ => {
return Err(Error::new_spanned(attr.tokens.clone(), "Expected (<...>)"));
continue;
}
}
let as_expr = attr.parse_args_with(
Punctuated::<Expr, syn::Token![,]>::parse_separated_nonempty,
)?;
for expr in as_expr.into_iter() {
self.apply_meta(expr)?;
}
// Stash its span for later (we dont yet know if itll be an error)
if self.skip && skip_tokens.is_none() {
skip_tokens = Some(attr.tokens.clone());
skip_tokens = Some(attr.meta.clone());
}
}
@ -461,6 +463,8 @@ mod struct_info {
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::Error;
use syn::punctuated::Punctuated;
use syn::Expr;
use super::field_info::{FieldBuilderAttr, FieldInfo};
use super::util::{
@ -1082,28 +1086,28 @@ Finally, call `.build()` to create the instance of `{name}`.
pub fn new(attrs: &[syn::Attribute]) -> Result<TypeBuilderAttr, Error> {
let mut result = TypeBuilderAttr::default();
for attr in attrs {
if path_to_single_string(&attr.path).as_deref() != Some("builder") {
if path_to_single_string(attr.path()).as_deref() != Some("builder") {
continue;
}
if attr.tokens.is_empty() {
continue;
}
let as_expr: syn::Expr = syn::parse2(attr.tokens.clone())?;
match as_expr {
syn::Expr::Paren(body) => {
result.apply_meta(*body.expr)?;
}
syn::Expr::Tuple(body) => {
for expr in body.elems.into_iter() {
result.apply_meta(expr)?;
match &attr.meta {
syn::Meta::List(list) => {
if list.tokens.is_empty() {
continue;
}
}
_ => {
return Err(Error::new_spanned(attr.tokens.clone(), "Expected (<...>)"));
continue;
}
}
let as_expr = attr.parse_args_with(
Punctuated::<Expr, syn::Token![,]>::parse_separated_nonempty,
)?;
for expr in as_expr.into_iter() {
result.apply_meta(expr)?;
}
}
Ok(result)

View file

@ -4,7 +4,7 @@ version = "0.3.3"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "Core functionality for Dioxus - a concurrent renderer-agnostic Virtual DOM for interactive user experiences"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]

View file

@ -4,7 +4,7 @@ version = "0.3.0"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "WebView renderer for Dioxus"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/desktop.html"
keywords = ["dom", "ui", "gui", "react"]
@ -17,7 +17,7 @@ dioxus-hot-reload = { workspace = true, optional = true }
serde = "1.0.136"
serde_json = "1.0.79"
thiserror = "1.0.30"
thiserror = { workspace = true }
log = { workspace = true }
wry = { version = "0.28.0" }
futures-channel = { workspace = true }

View file

@ -7,7 +7,7 @@ description = "TUI-based renderer for Dioxus"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/tui.html"
keywords = ["dom", "ui", "gui", "react", "terminal"]
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
[dependencies]
dioxus = { workspace = true }

View file

@ -33,7 +33,7 @@ futures-util = { workspace = true }
log = { workspace = true }
rand = { version = "0.8.4", features = ["small_rng"] }
criterion = "0.3.5"
thiserror = "1.0.30"
thiserror = { workspace = true }
env_logger = "0.10.0"
tokio = { workspace = true, features = ["full"] }
# dioxus-edit-stream = { workspace = true }

View file

@ -4,7 +4,7 @@ version = "0.3.0"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "Global state management for Dioxus"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react", "state-management"]

View file

@ -3,7 +3,7 @@ name = "dioxus-fullstack"
version = "0.1.0"
edition = "2021"
description = "Fullstack Dioxus Utilities"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react", "ssr", "fullstack"]
@ -39,7 +39,7 @@ dioxus-router = { workspace = true, optional = true }
log = { workspace = true }
once_cell = "1.17.1"
thiserror = "1.0.40"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"], optional = true }
object-pool = "0.5.4"
anymap = "0.12.1"

View file

@ -4,17 +4,21 @@ version = "0.3.1"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "Basic useful hooks for Dioxus."
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]
[features]
default = []
nightly-features = []
[dependencies]
dioxus-core = { workspace = true }
futures-channel = { workspace = true }
log = { workspace = true }
thiserror = { workspace = true }
debug-cell = { git = "https://github.com/Niedzwiedzw/debug-cell", rev = "3352a1f8aff19f56f5e3b2018200a3338fd43d2e" } # waiting for the merge / official DioxusLabs fork
[dev-dependencies]
futures-util = { workspace = true, default-features = false }

View file

@ -1,3 +1,5 @@
#![cfg_attr(feature = "nightly-features", feature(debug_refcell))]
#[macro_export]
/// A helper macro for using hooks and properties in async environements.
///

View file

@ -1,11 +1,79 @@
use self::error::{UseSharedStateError, UseSharedStateResult};
use dioxus_core::{ScopeId, ScopeState};
use std::{
cell::{Ref, RefCell, RefMut},
collections::HashSet,
rc::Rc,
sync::Arc,
use std::{collections::HashSet, rc::Rc, sync::Arc};
#[cfg(debug_assertions)]
pub use debug_cell::{
error::{BorrowError, BorrowMutError},
Ref, RefCell, RefMut,
};
#[cfg(not(debug_assertions))]
pub use std::cell::{BorrowError, BorrowMutError, Ref, RefCell, RefMut};
#[macro_export]
macro_rules! debug_location {
() => {{
#[cfg(debug_assertions)]
{
std::panic::Location::caller()
}
#[cfg(not(debug_assertions))]
{
()
}
}};
}
pub mod error {
fn locations_display(locations: &[&'static std::panic::Location<'static>]) -> String {
locations
.iter()
.map(|location| format!(" - {location}"))
.collect::<Vec<_>>()
.join("\n")
}
#[derive(thiserror::Error, Debug)]
pub enum UseSharedStateError {
#[cfg_attr(
debug_assertions,
error(
"[{0}] {1} is already borrowed at, so it cannot be borrowed mutably. Previous borrows:\n[{2}]\n\n",
.source.attempted_at,
.type_name,
locations_display(&.source.already_borrowed_at)
)
)]
#[cfg_attr(
not(debug_assertions),
error("{type_name} is already borrowed, so it cannot be borrowed mutably. (More detail available in debug mode)")
)]
AlreadyBorrowed {
source: super::BorrowMutError,
type_name: &'static str,
},
#[cfg_attr(
debug_assertions,
error(
"[{0}] {1} is already borrowed mutably at [{2}], so it cannot be borrowed anymore.",
.source.attempted_at,
.type_name,
locations_display(&.source.already_borrowed_at)
)
)]
#[cfg_attr(
not(debug_assertions),
error("{type_name} is already borrowed mutably, so it cannot be borrowed anymore. (More detail available in debug mode)")
)]
AlreadyBorrowedMutably {
source: super::BorrowError,
type_name: &'static str,
},
}
pub type UseSharedStateResult<T> = Result<T, UseSharedStateError>;
}
type ProvidedState<T> = Rc<RefCell<ProvidedStateInner<T>>>;
// Tracks all the subscribers to a shared State
@ -95,7 +163,7 @@ pub fn use_shared_state<T: 'static>(cx: &ScopeState) -> Option<&UseSharedState<T
root.borrow_mut().consumers.insert(scope_id);
let state = UseSharedState { inner: root };
let state = UseSharedState::new(root);
let owner = UseSharedStateOwner { state, scope_id };
Some(owner)
});
@ -122,29 +190,97 @@ pub struct UseSharedState<T> {
}
impl<T> UseSharedState<T> {
fn new(inner: Rc<RefCell<ProvidedStateInner<T>>>) -> Self {
Self { inner }
}
/// Notify all consumers of the state that it has changed. (This is called automatically when you call "write")
pub fn notify_consumers(&self) {
self.inner.borrow_mut().notify_consumers();
}
/// Try reading the shared state
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn try_read(&self) -> UseSharedStateResult<Ref<'_, T>> {
match self.inner.try_borrow() {
Ok(value) => Ok(Ref::map(value, |inner| &inner.value)),
Err(source) => Err(UseSharedStateError::AlreadyBorrowedMutably {
source,
type_name: std::any::type_name::<Self>(),
}),
}
}
/// Read the shared value
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn read(&self) -> Ref<'_, T> {
Ref::map(self.inner.borrow(), |inner| &inner.value)
match self.try_read() {
Ok(value) => value,
Err(message) => panic!(
"Reading the shared state failed: {}\n({:?})",
message, message
),
}
}
/// Try writing the shared state
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn try_write(&self) -> UseSharedStateResult<RefMut<'_, T>> {
match self.inner.try_borrow_mut() {
Ok(mut value) => {
value.notify_consumers();
Ok(RefMut::map(value, |inner| &mut inner.value))
}
Err(source) => Err(UseSharedStateError::AlreadyBorrowed {
source,
type_name: std::any::type_name::<Self>(),
}),
}
}
/// Calling "write" will force the component to re-render
///
///
// TODO: We prevent unncessary notifications only in the hook, but we should figure out some more global lock
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn write(&self) -> RefMut<'_, T> {
let mut value = self.inner.borrow_mut();
value.notify_consumers();
RefMut::map(value, |inner| &mut inner.value)
match self.try_write() {
Ok(value) => value,
Err(message) => panic!(
"Writing to shared state failed: {}\n({:?})",
message, message
),
}
}
/// Allows the ability to write the value without forcing a re-render
/// Tries writing the value without forcing a re-render
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn try_write_silent(&self) -> UseSharedStateResult<RefMut<'_, T>> {
match self.inner.try_borrow_mut() {
Ok(value) => Ok(RefMut::map(value, |inner| &mut inner.value)),
Err(source) => Err(UseSharedStateError::AlreadyBorrowed {
source,
type_name: std::any::type_name::<Self>(),
}),
}
}
/// Writes the value without forcing a re-render
#[cfg_attr(debug_assertions, track_caller)]
#[cfg_attr(debug_assertions, inline(never))]
pub fn write_silent(&self) -> RefMut<'_, T> {
RefMut::map(self.inner.borrow_mut(), |inner| &mut inner.value)
match self.try_write_silent() {
Ok(value) => value,
Err(message) => panic!(
"Writing to shared state silently failed: {}\n({:?})",
message, message
),
}
}
}

View file

@ -2,22 +2,26 @@
name = "dioxus-hot-reload"
version = "0.1.1"
edition = "2021"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/hot_reload.html"
description = "Hot reloading utilites for Dioxus"
description = "Hot reloading utilities for Dioxus"
keywords = ["dom", "ui", "gui", "react", "hot-reloading"]
[dependencies]
dioxus-rsx = { workspace = true }
dioxus-core = { workspace = true, features = ["serialize"] }
dioxus-html = { workspace = true, features = ["hot-reload-context"] }
dioxus-html = { workspace = true }
interprocess-docfix = { version = "1.2.2" }
notify = "5.0.0"
chrono = { version = "0.4.24", default-features = false, features = ["clock"] }
notify = { version = "5.0.0", optional = true }
chrono = { version = "0.4.24", default-features = false, features = ["clock"], optional = true }
serde_json = "1.0.91"
serde = { version = "1", features = ["derive"] }
execute = "0.2.11"
once_cell = "1.17.0"
ignore = "0.4.19"
execute = { version = "0.2.11", optional = true }
once_cell = { version = "1.17.0", optional = true }
ignore = { version = "0.4.19", optional = true }
[features]
default = []
file_watcher = ["ignore", "chrono", "notify", "execute", "once_cell", "ignore", "dioxus-html/hot-reload-context"]

View file

@ -0,0 +1,352 @@
use std::{
io::{BufRead, BufReader, Write},
path::PathBuf,
str::FromStr,
sync::{Arc, Mutex},
};
use dioxus_core::Template;
use dioxus_rsx::{
hot_reload::{FileMap, FileMapBuildResult, UpdateResult},
HotReloadingContext,
};
use interprocess_docfix::local_socket::{LocalSocketListener, LocalSocketStream};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
pub use dioxus_html::HtmlCtx;
use serde::{Deserialize, Serialize};
pub struct Config<Ctx: HotReloadingContext = HtmlCtx> {
root_path: &'static str,
listening_paths: &'static [&'static str],
excluded_paths: &'static [&'static str],
log: bool,
rebuild_with: Option<Box<dyn FnMut() -> bool + Send + 'static>>,
phantom: std::marker::PhantomData<Ctx>,
}
impl<Ctx: HotReloadingContext> Default for Config<Ctx> {
fn default() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl Config<HtmlCtx> {
pub const fn new() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl<Ctx: HotReloadingContext> Config<Ctx> {
/// Set the root path of the project (where the Cargo.toml file is). This is automatically set by the [`hot_reload_init`] macro.
pub fn root(self, path: &'static str) -> Self {
Self {
root_path: path,
..self
}
}
/// Set whether to enable logs
pub fn with_logging(self, log: bool) -> Self {
Self { log, ..self }
}
/// Set the command to run to rebuild the project
///
/// For example to restart the application after a change is made, you could use `cargo run`
pub fn with_rebuild_command(self, rebuild_command: &'static str) -> Self {
self.with_rebuild_callback(move || {
execute::shell(rebuild_command)
.spawn()
.expect("Failed to spawn the rebuild command");
true
})
}
/// Set a callback to run to when the project needs to be rebuilt and returns if the server should shut down
///
/// For example a CLI application could rebuild the application when a change is made
pub fn with_rebuild_callback(
self,
rebuild_callback: impl FnMut() -> bool + Send + 'static,
) -> Self {
Self {
rebuild_with: Some(Box::new(rebuild_callback)),
..self
}
}
/// Set the paths to listen for changes in to trigger hot reloading. If this is a directory it will listen for changes in all files in that directory recursively.
pub fn with_paths(self, paths: &'static [&'static str]) -> Self {
Self {
listening_paths: paths,
..self
}
}
/// Sets paths to ignore changes on. This will override any paths set in the [`Config::with_paths`] method in the case of conflicts.
pub fn excluded_paths(self, paths: &'static [&'static str]) -> Self {
Self {
excluded_paths: paths,
..self
}
}
}
/// Initialize the hot reloading listener
pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
let Config {
root_path,
listening_paths,
log,
mut rebuild_with,
excluded_paths,
phantom: _,
} = cfg;
if let Ok(crate_dir) = PathBuf::from_str(root_path) {
// try to find the gitingore file
let gitignore_file_path = crate_dir.join(".gitignore");
let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
// convert the excluded paths to absolute paths
let excluded_paths = excluded_paths
.iter()
.map(|path| crate_dir.join(PathBuf::from(path)))
.collect::<Vec<_>>();
let channels = Arc::new(Mutex::new(Vec::new()));
let FileMapBuildResult {
map: file_map,
errors,
} = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
// skip excluded paths
excluded_paths.iter().any(|p| path.starts_with(p)) ||
// respect .gitignore
gitignore
.matched_path_or_any_parents(path, path.is_dir())
.is_ignore()
})
.unwrap();
for err in errors {
if log {
println!("hot reloading failed to initialize:\n{err:?}");
}
}
let file_map = Arc::new(Mutex::new(file_map));
#[cfg(target_os = "macos")]
{
// On unix, if you force quit the application, it can leave the file socket open
// This will cause the local socket listener to fail to open
// We check if the file socket is already open from an old session and then delete it
let paths = ["./dioxusin", "./@dioxusin"];
for path in paths {
let path = PathBuf::from(path);
if path.exists() {
let _ = std::fs::remove_file(path);
}
}
}
match LocalSocketListener::bind("@dioxusin") {
Ok(local_socket_stream) => {
let aborted = Arc::new(Mutex::new(false));
// listen for connections
std::thread::spawn({
let file_map = file_map.clone();
let channels = channels.clone();
let aborted = aborted.clone();
let _ = local_socket_stream.set_nonblocking(true);
move || {
loop {
if let Ok(mut connection) = local_socket_stream.accept() {
// send any templates than have changed before the socket connected
let templates: Vec<_> = {
file_map
.lock()
.unwrap()
.map
.values()
.filter_map(|(_, template_slot)| *template_slot)
.collect()
};
for template in templates {
if !send_msg(
HotReloadMsg::UpdateTemplate(template),
&mut connection,
) {
continue;
}
}
channels.lock().unwrap().push(connection);
if log {
println!("Connected to hot reloading 🚀");
}
}
if *aborted.lock().unwrap() {
break;
}
}
}
});
// watch for changes
std::thread::spawn(move || {
let mut last_update_time = chrono::Local::now().timestamp();
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher =
RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
for path in listening_paths {
let full_path = crate_dir.join(path);
if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
if log {
println!(
"hot reloading failed to start watching {full_path:?}:\n{err:?}",
);
}
}
}
let mut rebuild = {
let aborted = aborted.clone();
let channels = channels.clone();
move || {
if let Some(rebuild_callback) = &mut rebuild_with {
if log {
println!("Rebuilding the application...");
}
let shutdown = rebuild_callback();
if shutdown {
*aborted.lock().unwrap() = true;
}
for channel in &mut *channels.lock().unwrap() {
send_msg(HotReloadMsg::Shutdown, channel);
}
return shutdown;
} else if log {
println!(
"Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view futher changes."
);
}
true
}
};
for evt in rx {
if chrono::Local::now().timestamp_millis() >= last_update_time {
if let Ok(evt) = evt {
let real_paths = evt
.paths
.iter()
.filter(|path| {
// skip non rust files
matches!(
path.extension().and_then(|p| p.to_str()),
Some("rs" | "toml" | "css" | "html" | "js")
) &&
// skip excluded paths
!excluded_paths.iter().any(|p| path.starts_with(p)) &&
// respect .gitignore
!gitignore
.matched_path_or_any_parents(path, false)
.is_ignore()
})
.collect::<Vec<_>>();
// Give time for the change to take effect before reading the file
if !real_paths.is_empty() {
std::thread::sleep(std::time::Duration::from_millis(10));
}
let mut channels = channels.lock().unwrap();
for path in real_paths {
// if this file type cannot be hot reloaded, rebuild the application
if path.extension().and_then(|p| p.to_str()) != Some("rs")
&& rebuild()
{
return;
}
// find changes to the rsx in the file
match file_map
.lock()
.unwrap()
.update_rsx(path, crate_dir.as_path())
{
Ok(UpdateResult::UpdatedRsx(msgs)) => {
for msg in msgs {
let mut i = 0;
while i < channels.len() {
let channel = &mut channels[i];
if send_msg(
HotReloadMsg::UpdateTemplate(msg),
channel,
) {
i += 1;
} else {
channels.remove(i);
}
}
}
}
Ok(UpdateResult::NeedsRebuild) => {
drop(channels);
if rebuild() {
return;
}
break;
}
Err(err) => {
if log {
println!(
"hot reloading failed to update rsx:\n{err:?}"
);
}
}
}
}
}
last_update_time = chrono::Local::now().timestamp_millis();
}
}
});
}
Err(error) => println!("failed to connect to hot reloading\n{error}"),
}
}
}
fn send_msg(msg: HotReloadMsg, channel: &mut impl Write) -> bool {
if let Ok(msg) = serde_json::to_string(&msg) {
if channel.write_all(msg.as_bytes()).is_err() {
return false;
}
if channel.write_all(&[b'\n']).is_err() {
return false;
}
true
} else {
false
}
}

View file

@ -1,21 +1,16 @@
use std::{
io::{BufRead, BufReader, Write},
path::PathBuf,
str::FromStr,
sync::{Arc, Mutex},
};
use std::io::{BufRead, BufReader};
use dioxus_core::Template;
use dioxus_rsx::{
hot_reload::{FileMap, FileMapBuildResult, UpdateResult},
HotReloadingContext,
};
use interprocess_docfix::local_socket::{LocalSocketListener, LocalSocketStream};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
#[cfg(file_watcher)]
pub use dioxus_html::HtmlCtx;
use interprocess_docfix::local_socket::LocalSocketStream;
use serde::{Deserialize, Serialize};
#[cfg(file_watcher)]
mod file_watcher;
#[cfg(file_watcher)]
use file_watcher::*;
/// A message the hot reloading server sends to the client
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub enum HotReloadMsg {
@ -26,341 +21,6 @@ pub enum HotReloadMsg {
Shutdown,
}
pub struct Config<Ctx: HotReloadingContext = HtmlCtx> {
root_path: &'static str,
listening_paths: &'static [&'static str],
excluded_paths: &'static [&'static str],
log: bool,
rebuild_with: Option<Box<dyn FnMut() -> bool + Send + 'static>>,
phantom: std::marker::PhantomData<Ctx>,
}
impl<Ctx: HotReloadingContext> Default for Config<Ctx> {
fn default() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl Config<HtmlCtx> {
pub const fn new() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl<Ctx: HotReloadingContext> Config<Ctx> {
/// Set the root path of the project (where the Cargo.toml file is). This is automatically set by the [`hot_reload_init`] macro.
pub fn root(self, path: &'static str) -> Self {
Self {
root_path: path,
..self
}
}
/// Set whether to enable logs
pub fn with_logging(self, log: bool) -> Self {
Self { log, ..self }
}
/// Set the command to run to rebuild the project
///
/// For example to restart the application after a change is made, you could use `cargo run`
pub fn with_rebuild_command(self, rebuild_command: &'static str) -> Self {
self.with_rebuild_callback(move || {
execute::shell(rebuild_command)
.spawn()
.expect("Failed to spawn the rebuild command");
true
})
}
/// Set a callback to run to when the project needs to be rebuilt and returns if the server should shut down
///
/// For example a CLI application could rebuild the application when a change is made
pub fn with_rebuild_callback(
self,
rebuild_callback: impl FnMut() -> bool + Send + 'static,
) -> Self {
Self {
rebuild_with: Some(Box::new(rebuild_callback)),
..self
}
}
/// Set the paths to listen for changes in to trigger hot reloading. If this is a directory it will listen for changes in all files in that directory recursively.
pub fn with_paths(self, paths: &'static [&'static str]) -> Self {
Self {
listening_paths: paths,
..self
}
}
/// Sets paths to ignore changes on. This will override any paths set in the [`Config::with_paths`] method in the case of conflicts.
pub fn excluded_paths(self, paths: &'static [&'static str]) -> Self {
Self {
excluded_paths: paths,
..self
}
}
}
/// Initialize the hot reloading listener
pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
let Config {
root_path,
listening_paths,
log,
mut rebuild_with,
excluded_paths,
phantom: _,
} = cfg;
if let Ok(crate_dir) = PathBuf::from_str(root_path) {
// try to find the gitingore file
let gitignore_file_path = crate_dir.join(".gitignore");
let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
// convert the excluded paths to absolute paths
let excluded_paths = excluded_paths
.iter()
.map(|path| crate_dir.join(PathBuf::from(path)))
.collect::<Vec<_>>();
let channels = Arc::new(Mutex::new(Vec::new()));
let FileMapBuildResult {
map: file_map,
errors,
} = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
// skip excluded paths
excluded_paths.iter().any(|p| path.starts_with(p)) ||
// respect .gitignore
gitignore
.matched_path_or_any_parents(path, path.is_dir())
.is_ignore()
})
.unwrap();
for err in errors {
if log {
println!("hot reloading failed to initialize:\n{err:?}");
}
}
let file_map = Arc::new(Mutex::new(file_map));
#[cfg(target_os = "macos")]
{
// On unix, if you force quit the application, it can leave the file socket open
// This will cause the local socket listener to fail to open
// We check if the file socket is already open from an old session and then delete it
let paths = ["./dioxusin", "./@dioxusin"];
for path in paths {
let path = PathBuf::from(path);
if path.exists() {
let _ = std::fs::remove_file(path);
}
}
}
match LocalSocketListener::bind("@dioxusin") {
Ok(local_socket_stream) => {
let aborted = Arc::new(Mutex::new(false));
// listen for connections
std::thread::spawn({
let file_map = file_map.clone();
let channels = channels.clone();
let aborted = aborted.clone();
let _ = local_socket_stream.set_nonblocking(true);
move || {
loop {
if let Ok(mut connection) = local_socket_stream.accept() {
// send any templates than have changed before the socket connected
let templates: Vec<_> = {
file_map
.lock()
.unwrap()
.map
.values()
.filter_map(|(_, template_slot)| *template_slot)
.collect()
};
for template in templates {
if !send_msg(
HotReloadMsg::UpdateTemplate(template),
&mut connection,
) {
continue;
}
}
channels.lock().unwrap().push(connection);
if log {
println!("Connected to hot reloading 🚀");
}
}
if *aborted.lock().unwrap() {
break;
}
}
}
});
// watch for changes
std::thread::spawn(move || {
let mut last_update_time = chrono::Local::now().timestamp();
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher =
RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
for path in listening_paths {
let full_path = crate_dir.join(path);
if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
if log {
println!(
"hot reloading failed to start watching {full_path:?}:\n{err:?}",
);
}
}
}
let mut rebuild = {
let aborted = aborted.clone();
let channels = channels.clone();
move || {
if let Some(rebuild_callback) = &mut rebuild_with {
if log {
println!("Rebuilding the application...");
}
let shutdown = rebuild_callback();
if shutdown {
*aborted.lock().unwrap() = true;
}
for channel in &mut *channels.lock().unwrap() {
send_msg(HotReloadMsg::Shutdown, channel);
}
return shutdown;
} else if log {
println!(
"Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view futher changes."
);
}
true
}
};
for evt in rx {
if chrono::Local::now().timestamp_millis() >= last_update_time {
if let Ok(evt) = evt {
let real_paths = evt
.paths
.iter()
.filter(|path| {
// skip non rust files
matches!(
path.extension().and_then(|p| p.to_str()),
Some("rs" | "toml" | "css" | "html" | "js")
) &&
// skip excluded paths
!excluded_paths.iter().any(|p| path.starts_with(p)) &&
// respect .gitignore
!gitignore
.matched_path_or_any_parents(path, false)
.is_ignore()
})
.collect::<Vec<_>>();
// Give time for the change to take effect before reading the file
if !real_paths.is_empty() {
std::thread::sleep(std::time::Duration::from_millis(10));
}
let mut channels = channels.lock().unwrap();
for path in real_paths {
// if this file type cannot be hot reloaded, rebuild the application
if path.extension().and_then(|p| p.to_str()) != Some("rs")
&& rebuild()
{
return;
}
// find changes to the rsx in the file
match file_map
.lock()
.unwrap()
.update_rsx(path, crate_dir.as_path())
{
Ok(UpdateResult::UpdatedRsx(msgs)) => {
for msg in msgs {
let mut i = 0;
while i < channels.len() {
let channel = &mut channels[i];
if send_msg(
HotReloadMsg::UpdateTemplate(msg),
channel,
) {
i += 1;
} else {
channels.remove(i);
}
}
}
}
Ok(UpdateResult::NeedsRebuild) => {
drop(channels);
if rebuild() {
return;
}
break;
}
Err(err) => {
if log {
println!(
"hot reloading failed to update rsx:\n{err:?}"
);
}
}
}
}
}
last_update_time = chrono::Local::now().timestamp_millis();
}
}
});
}
Err(error) => println!("failed to connect to hot reloading\n{error}"),
}
}
}
fn send_msg(msg: HotReloadMsg, channel: &mut impl Write) -> bool {
if let Ok(msg) = serde_json::to_string(&msg) {
if channel.write_all(msg.as_bytes()).is_err() {
return false;
}
if channel.write_all(&[b'\n']).is_err() {
return false;
}
true
} else {
false
}
}
/// Connect to the hot reloading listener. The callback provided will be called every time a template change is detected
pub fn connect(mut f: impl FnMut(HotReloadMsg) + Send + 'static) {
std::thread::spawn(move || {

View file

@ -4,14 +4,14 @@ version = "0.3.1"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "HTML Element pack for Dioxus - a concurrent renderer-agnostic Virtual DOM for interactive user experiences"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]
[dependencies]
dioxus-core = { workspace = true }
dioxus-rsx = { workspace = true, optional = true }
dioxus-rsx = { workspace = true, features = ["hot_reload"], optional = true }
serde = { version = "1", features = ["derive"], optional = true }
serde_repr = { version = "0.1", optional = true }
wasm-bindgen = { workspace = true, optional = true }

View file

@ -15,7 +15,7 @@ wasm-bindgen = { workspace = true, optional = true }
js-sys = { version = "0.3.56", optional = true }
web-sys = { version = "0.3.56", optional = true, features = ["Element", "Node"] }
sledgehammer_bindgen = { version = "0.2.1", optional = true }
sledgehammer_utils = { version = "0.1.1", optional = true }
sledgehammer_utils = { version = "0.2", optional = true }
serde = { version = "1.0", features = ["derive"], optional = true }
[features]

View file

@ -6,10 +6,10 @@ repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/liveview.html"
keywords = ["dom", "ui", "gui", "react", "liveview"]
description = "Build server-side apps with Dioxus"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
[dependencies]
thiserror = "1.0.38"
thiserror = { workspace = true }
log = { workspace = true }
slab = { workspace = true }
futures-util = { workspace = true, default-features = false, features = [

View file

@ -7,7 +7,7 @@ description = "Mobile-compatible renderer for Dioxus"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/mobile.html"
keywords = ["dom", "ui", "gui", "react"]
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
[dependencies]
dioxus-desktop = { workspace = true }

View file

@ -3,7 +3,7 @@ name = "dioxus-native-core-macro"
version = "0.3.0"
edition = "2021"
description = "Build natively rendered apps with Dioxus"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react"]
@ -12,7 +12,7 @@ keywords = ["dom", "ui", "gui", "react"]
proc-macro = true
[dependencies]
syn = { version = "1.0.11", features = ["extra-traits", "full"] }
syn = { version = "2.0", features = ["extra-traits", "full"] }
quote = "1.0"
[dev-dependencies]

View file

@ -14,7 +14,7 @@ pub fn partial_derive_state(_: TokenStream, input: TokenStream) -> TokenStream {
let has_create_fn = impl_block
.items
.iter()
.any(|item| matches!(item, syn::ImplItem::Method(method) if method.sig.ident == "create"));
.any(|item| matches!(item, syn::ImplItem::Fn(method) if method.sig.ident == "create"));
let parent_dependencies = impl_block
.items

View file

@ -2,7 +2,7 @@
name = "dioxus-native-core"
version = "0.2.0"
edition = "2021"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
description = "Build natively rendered apps with Dioxus"

View file

@ -7,7 +7,7 @@ description = "TUI-based renderer for Dioxus"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react", "terminal"]
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
[dependencies]
dioxus-html = { workspace = true }

View file

@ -4,7 +4,7 @@ version = "0.3.0"
authors = ["Evan Almloff"]
edition = "2021"
description = "Macro for Dioxus Router"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
documentation = "https://dioxuslabs.com"
@ -15,7 +15,7 @@ keywords = ["dom", "ui", "gui", "react", "router"]
proc-macro = true
[dependencies]
syn = { version = "1.0.11", features = ["extra-traits", "full"] }
syn = { version = "2.0", features = ["extra-traits", "full"] }
quote = "1.0"
proc-macro2 = "1.0.56"
slab = "0.4"

View file

@ -286,7 +286,7 @@ impl RouteEnum {
let mut excluded = Vec::new();
// Apply the any nesting attributes in order
for attr in &variant.attrs {
if attr.path.is_ident("nest") {
if attr.path().is_ident("nest") {
let mut children_routes = Vec::new();
{
// add all of the variants of the enum to the children_routes until we hit an end_nest
@ -294,9 +294,9 @@ impl RouteEnum {
'o: for variant in &data.variants {
children_routes.push(variant.fields.clone());
for attr in &variant.attrs {
if attr.path.is_ident("nest") {
if attr.path().is_ident("nest") {
level += 1;
} else if attr.path.is_ident("end_nest") {
} else if attr.path().is_ident("end_nest") {
level -= 1;
if level < 0 {
break 'o;
@ -341,7 +341,7 @@ impl RouteEnum {
nests.push(nest);
nest_stack.push(NestId(nest_index));
} else if attr.path.is_ident("end_nest") {
} else if attr.path().is_ident("end_nest") {
nest_stack.pop();
// pop the current nest segment off the stack and add it to the parent or the site map
if let Some(segment) = site_map_stack.pop() {
@ -360,7 +360,7 @@ impl RouteEnum {
children.push(current);
}
} else if attr.path.is_ident("layout") {
} else if attr.path().is_ident("layout") {
let parser = |input: ParseStream| {
let bang: Option<Token![!]> = input.parse().ok();
let exclude = bang.is_some();
@ -382,9 +382,9 @@ impl RouteEnum {
layouts.push(layout);
layout_stack.push(LayoutId(layout_index));
}
} else if attr.path.is_ident("end_layout") {
} else if attr.path().is_ident("end_layout") {
layout_stack.pop();
} else if attr.path.is_ident("redirect") {
} else if attr.path().is_ident("redirect") {
let parser = |input: ParseStream| {
Redirect::parse(input, nest_stack.clone(), redirects.len())
};

View file

@ -75,7 +75,7 @@ impl Route {
let route_attr = variant
.attrs
.iter()
.find(|attr| attr.path.is_ident("route"));
.find(|attr| attr.path().is_ident("route"));
let route;
let ty;
let route_name = variant.ident.clone();
@ -106,7 +106,7 @@ impl Route {
if let Some(route_attr) = variant
.attrs
.iter()
.find(|attr| attr.path.is_ident("child"))
.find(|attr| attr.path().is_ident("child"))
{
let args = route_attr.parse_args::<ChildArgs>()?;
route = args.route.value();
@ -116,7 +116,7 @@ impl Route {
let child_field = fields.named.iter().find(|f| {
f.attrs
.iter()
.any(|attr| attr.path.is_ident("child"))
.any(|attr| attr.path().is_ident("child"))
|| *f.ident.as_ref().unwrap() == "child"
});
match child_field{

View file

@ -3,7 +3,7 @@ name = "dioxus-router"
version = "0.3.0"
edition = "2018"
description = "Cross-platform router for Dioxus apps"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
keywords = ["dom", "ui", "gui", "react", "wasm"]
@ -14,11 +14,15 @@ dioxus = { workspace = true }
dioxus-router-macro = { workspace = true }
gloo = { version = "0.8.0", optional = true }
log = { workspace = true }
thiserror = { workspace = true }
futures-util = { workspace = true }
serde_urlencoded = { version = "0.7.1", optional = true }
serde = { version = "1", features = ["derive"], optional = true }
thiserror = "1.0.37"
url = "2.3.1"
wasm-bindgen = { workspace = true, optional = true }
web-sys = { version = "0.3.60", optional = true, features = ["ScrollRestoration"] }
web-sys = { version = "0.3.60", optional = true, features = [
"ScrollRestoration",
] }
js-sys = { version = "0.3.63", optional = true }
gloo-utils = { version = "0.1.6", optional = true }
dioxus-ssr = { workspace = true, optional = true }

View file

@ -4,7 +4,7 @@ version = "0.3.0"
edition = "2021"
authors = ["Jonathan Kelley"]
description = "Autofomatter for Dioxus RSX"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
documentation = "https://dioxuslabs.com"
@ -18,7 +18,7 @@ dioxus-rsx = { workspace = true }
html_parser.workspace = true
proc-macro2 = "1.0.49"
quote = "1.0.23"
syn = { version = "1.0.107", features = ["full"] }
syn = { version = "2.0", features = ["full"] }
convert_case = "0.5.0"
# [features]

View file

@ -2,7 +2,7 @@
name = "dioxus-rsx"
version = "0.0.3"
edition = "2018"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
description = "Core functionality for Dioxus - a concurrent renderer-agnostic Virtual DOM for interactive user experiences"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com"
@ -14,8 +14,12 @@ keywords = ["dom", "ui", "gui", "react"]
[dependencies]
proc-macro2 = { version = "1.0", features = ["span-locations"] }
dioxus-core = { workspace = true}
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
quote = { version = "1.0" }
serde = { version = "1.0", features = ["derive"] }
internment = "0.7.0"
krates = "0.12.6"
serde = { version = "1.0", features = ["derive"], optional = true }
internment = { version = "0.7.0", optional = true }
krates = { version = "0.12.6", optional = true }
[features]
hot_reload = ["krates", "internment"]
serde = ["dep:serde"]

View file

@ -1,15 +1,18 @@
use std::fmt::Display;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// An error produced when interperting the rsx
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum Error {
ParseError(ParseError),
RecompileRequiredError(RecompileReason),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum RecompileReason {
Variable(String),
Expression(String),
@ -18,14 +21,16 @@ pub enum RecompileReason {
Attribute(String),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct CodeLocation {
pub line: u32,
pub column: u32,
pub file_path: &'static str,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ParseError {
pub message: String,
pub location: CodeLocation,

View file

@ -85,7 +85,7 @@ fn find_rsx_item(
(syn::ImplItem::Const(new_item), syn::ImplItem::Const(old_item)) => {
find_rsx_expr(&new_item.expr, &old_item.expr, rsx_calls)
}
(syn::ImplItem::Method(new_item), syn::ImplItem::Method(old_item)) => {
(syn::ImplItem::Fn(new_item), syn::ImplItem::Fn(old_item)) => {
find_rsx_block(&new_item.block, &old_item.block, rsx_calls)
}
(syn::ImplItem::Type(new_item), syn::ImplItem::Type(old_item)) => {
@ -114,7 +114,6 @@ fn find_rsx_item(
|| new_item.semi_token != old_item.semi_token
|| new_item.ident != old_item.ident
}
(syn::Item::Macro2(new_item), syn::Item::Macro2(old_item)) => old_item != new_item,
(syn::Item::Mod(new_item), syn::Item::Mod(old_item)) => {
match (&new_item.content, &old_item.content) {
(Some((_, new_items)), Some((_, old_items))) => {
@ -186,7 +185,7 @@ fn find_rsx_trait(
true
}
}
(syn::TraitItem::Method(new_item), syn::TraitItem::Method(old_item)) => {
(syn::TraitItem::Fn(new_item), syn::TraitItem::Fn(old_item)) => {
if let (Some(new_block), Some(old_block)) = (&new_item.default, &old_item.default) {
find_rsx_block(new_block, old_block, rsx_calls)
} else {
@ -239,8 +238,9 @@ fn find_rsx_stmt(
match (new_stmt, old_stmt) {
(syn::Stmt::Local(new_local), syn::Stmt::Local(old_local)) => {
(match (&new_local.init, &old_local.init) {
(Some((new_eq, new_expr)), Some((old_eq, old_expr))) => {
find_rsx_expr(new_expr, old_expr, rsx_calls) || new_eq != old_eq
(Some(new_local), Some(old_local)) => {
find_rsx_expr(&new_local.expr, &old_local.expr, rsx_calls)
|| new_local != old_local
}
(None, None) => false,
_ => true,
@ -252,11 +252,13 @@ fn find_rsx_stmt(
(syn::Stmt::Item(new_item), syn::Stmt::Item(old_item)) => {
find_rsx_item(new_item, old_item, rsx_calls)
}
(syn::Stmt::Expr(new_expr), syn::Stmt::Expr(old_expr)) => {
(syn::Stmt::Expr(new_expr, _), syn::Stmt::Expr(old_expr, _)) => {
find_rsx_expr(new_expr, old_expr, rsx_calls)
}
(syn::Stmt::Semi(new_expr, new_semi), syn::Stmt::Semi(old_expr, old_semi)) => {
find_rsx_expr(new_expr, old_expr, rsx_calls) || new_semi != old_semi
(syn::Stmt::Macro(new_macro), syn::Stmt::Macro(old_macro)) => {
find_rsx_macro(&new_macro.mac, &old_macro.mac, rsx_calls)
|| new_macro.attrs != old_macro.attrs
|| new_macro.semi_token != old_macro.semi_token
}
_ => true,
}
@ -285,12 +287,6 @@ fn find_rsx_expr(
|| new_expr.attrs != old_expr.attrs
|| new_expr.eq_token != old_expr.eq_token
}
(syn::Expr::AssignOp(new_expr), syn::Expr::AssignOp(old_expr)) => {
find_rsx_expr(&new_expr.left, &old_expr.left, rsx_calls)
|| find_rsx_expr(&new_expr.right, &old_expr.right, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.op != old_expr.op
}
(syn::Expr::Async(new_expr), syn::Expr::Async(old_expr)) => {
find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
|| new_expr.attrs != old_expr.attrs
@ -314,11 +310,6 @@ fn find_rsx_expr(
|| new_expr.attrs != old_expr.attrs
|| new_expr.label != old_expr.label
}
(syn::Expr::Box(new_expr), syn::Expr::Box(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.box_token != old_expr.box_token
}
(syn::Expr::Break(new_expr), syn::Expr::Break(old_expr)) => {
match (&new_expr.expr, &old_expr.expr) {
(Some(new_inner), Some(old_inner)) => {
@ -478,7 +469,7 @@ fn find_rsx_expr(
}
(syn::Expr::Path(new_expr), syn::Expr::Path(old_expr)) => old_expr != new_expr,
(syn::Expr::Range(new_expr), syn::Expr::Range(old_expr)) => {
match (&new_expr.from, &old_expr.from) {
match (&new_expr.start, &old_expr.start) {
(Some(new_expr), Some(old_expr)) => {
if find_rsx_expr(new_expr, old_expr, rsx_calls) {
return true;
@ -487,7 +478,7 @@ fn find_rsx_expr(
(None, None) => (),
_ => return true,
}
match (&new_expr.to, &old_expr.to) {
match (&new_expr.end, &old_expr.end) {
(Some(new_inner), Some(old_inner)) => {
find_rsx_expr(new_inner, old_inner, rsx_calls)
|| new_expr.attrs != old_expr.attrs
@ -568,12 +559,6 @@ fn find_rsx_expr(
}
new_expr.attrs != old_expr.attrs || new_expr.paren_token != old_expr.paren_token
}
(syn::Expr::Type(new_expr), syn::Expr::Type(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.colon_token != old_expr.colon_token
|| new_expr.ty != old_expr.ty
}
(syn::Expr::Unary(new_expr), syn::Expr::Unary(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs

View file

@ -15,18 +15,22 @@
mod errors;
mod component;
mod element;
#[cfg(feature = "hot_reload")]
pub mod hot_reload;
mod ifmt;
mod node;
use std::{collections::HashMap, fmt::Debug, hash::Hash};
use std::{fmt::Debug, hash::Hash};
// Re-export the namespaces into each other
pub use component::*;
#[cfg(feature = "hot_reload")]
use dioxus_core::{Template, TemplateAttribute, TemplateNode};
pub use element::*;
#[cfg(feature = "hot_reload")]
pub use hot_reload::HotReloadingContext;
pub use ifmt::*;
#[cfg(feature = "hot_reload")]
use internment::Intern;
pub use node::*;
@ -38,6 +42,7 @@ use syn::{
Result, Token,
};
#[cfg(feature = "hot_reload")]
// interns a object into a static object, resusing the value if it already exists
fn intern<T: Eq + Hash + Send + Sync + ?Sized + 'static>(s: impl Into<Intern<T>>) -> &'static T {
s.into().as_ref()
@ -50,6 +55,7 @@ pub struct CallBody {
}
impl CallBody {
#[cfg(feature = "hot_reload")]
/// This will try to create a new template from the current body and the previous body. This will return None if the rsx has some dynamic part that has changed.
/// This function intentionally leaks memory to create a static template.
/// Keeping the template static allows us to simplify the core of dioxus and leaking memory in dev mode is less of an issue.
@ -140,6 +146,7 @@ pub struct TemplateRenderer<'a> {
}
impl<'a> TemplateRenderer<'a> {
#[cfg(feature = "hot_reload")]
fn update_template<Ctx: HotReloadingContext>(
&mut self,
previous_call: Option<CallBody>,
@ -248,14 +255,16 @@ impl<'a> ToTokens for TemplateRenderer<'a> {
}
}
#[cfg(feature = "hot_reload")]
#[derive(Default, Debug)]
struct DynamicMapping {
attribute_to_idx: HashMap<ElementAttr, Vec<usize>>,
attribute_to_idx: std::collections::HashMap<ElementAttr, Vec<usize>>,
last_attribute_idx: usize,
node_to_idx: HashMap<BodyNode, Vec<usize>>,
node_to_idx: std::collections::HashMap<BodyNode, Vec<usize>>,
last_element_idx: usize,
}
#[cfg(feature = "hot_reload")]
impl DynamicMapping {
fn from(nodes: Vec<BodyNode>) -> Self {
let mut new = Self::default();
@ -349,6 +358,7 @@ pub struct DynamicContext<'a> {
}
impl<'a> DynamicContext<'a> {
#[cfg(feature = "hot_reload")]
fn update_node<Ctx: HotReloadingContext>(
&mut self,
root: &'a BodyNode,
@ -558,6 +568,7 @@ impl<'a> DynamicContext<'a> {
}
}
#[cfg(feature = "hot_reload")]
#[test]
fn create_template() {
let input = quote! {
@ -643,6 +654,7 @@ fn create_template() {
)
}
#[cfg(feature = "hot_reload")]
#[test]
fn diff_template() {
use dioxus_core::Scope;

View file

@ -222,7 +222,7 @@ impl Parse for ForLoop {
fn parse(input: ParseStream) -> Result<Self> {
let for_token: Token![for] = input.parse()?;
let pat = input.parse()?;
let pat = Pat::parse_single(input)?;
let in_token: Token![in] = input.parse()?;
let expr: Expr = input.call(Expr::parse_without_eager_brace)?;

View file

@ -4,7 +4,7 @@ version = "0.3.0"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "Dioxus render-to-string"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/ssr.html"
keywords = ["dom", "ui", "gui", "react", "ssr"]
@ -21,6 +21,8 @@ tokio = { version = "1.28", features = ["full"] }
[dev-dependencies]
dioxus = { workspace = true }
thiserror = { workspace = true }
log = { workspace = true }
fern = { version = "0.6.0", features = ["colored"] }
anyhow = "1.0"
argh = "0.1.4"

View file

@ -4,7 +4,7 @@ version = "0.3.2"
authors = ["Jonathan Kelley"]
edition = "2018"
description = "Web renderer for Dioxus using websys"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/web.html"
keywords = ["dom", "ui", "gui", "react", "wasm"]

View file

@ -3,7 +3,7 @@ name = "dioxus-playwright-liveview-test"
version = "0.0.1"
edition = "2021"
description = "Playwright test for Dioxus Liveview"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
publish = false
[dependencies]

View file

@ -3,7 +3,7 @@ name = "dioxus-playwright-web-test"
version = "0.0.1"
edition = "2021"
description = "Playwright test for Dioxus Web"
license = "MIT/Apache-2.0"
license = "MIT OR Apache-2.0"
publish = false
[dependencies]