half-build-times

This commit is contained in:
Evan Almloff 2023-07-19 10:19:23 -07:00
parent f69b84bf34
commit 3d8d8a1b6f
24 changed files with 489 additions and 456 deletions

View file

@ -14,9 +14,9 @@ keywords = ["dom", "ui", "gui", "react"]
dioxus-rsx = { workspace = true }
proc-macro2 = { version = "1.0.6", features = ["span-locations"] }
quote = "1.0"
syn = { version = "1.0.11", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
serde = { version = "1.0.136", features = ["derive"] }
prettyplease = { package = "prettier-please", version = "0.1.16", features = [
prettyplease = { package = "prettier-please", version = "0.2", features = [
"verbatim",
] }

View file

@ -5,6 +5,8 @@
use proc_macro2::LineColumn;
use syn::{Block, Expr, File, Item, Macro, Stmt};
use crate::expr;
type CollectedMacro<'a> = &'a Macro;
pub fn collect_from_file<'a>(file: &'a File, macros: &mut Vec<CollectedMacro<'a>>) {
@ -42,7 +44,7 @@ pub fn collect_from_item<'a>(item: &'a Item, macros: &mut Vec<CollectedMacro<'a>
}
// None of these we can really do anything with at the item level
Item::Macro2(_)
Item::Macro(_)
| Item::Enum(_)
| Item::ExternCrate(_)
| Item::ForeignMod(_)
@ -61,11 +63,18 @@ pub fn collect_from_block<'a>(block: &'a Block, macros: &mut Vec<CollectedMacro<
match stmt {
Stmt::Item(item) => collect_from_item(item, macros),
Stmt::Local(local) => {
if let Some((_eq, init)) = &local.init {
collect_from_expr(init, macros);
if let Some(init) = &local.init {
collect_from_expr(&init.expr, macros);
}
}
Stmt::Expr(expr, _) => collect_from_expr(expr, macros),
Stmt::Macro(mac) => {
if mac.mac.path.segments[0].ident == "rsx"
|| mac.mac.path.segments[0].ident == "render"
{
macros.push(&mac.mac);
}
}
Stmt::Expr(exp) | Stmt::Semi(exp, _) => collect_from_expr(exp, macros),
}
}
}
@ -147,10 +156,9 @@ pub fn collect_from_expr<'a>(expr: &'a Expr, macros: &mut Vec<CollectedMacro<'a>
// don't both formatting these for now
Expr::Array(_)
| Expr::AssignOp(_)
| Expr::Assign(_)
| Expr::Await(_)
| Expr::Binary(_)
| Expr::Box(_)
| Expr::Break(_)
| Expr::Cast(_)
| Expr::Continue(_)
@ -167,7 +175,6 @@ pub fn collect_from_expr<'a>(expr: &'a Expr, macros: &mut Vec<CollectedMacro<'a>
| Expr::Try(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Type(_)
| Expr::Unary(_)
| Expr::Verbatim(_) => {}

View file

@ -176,7 +176,7 @@ impl Writer<'_> {
)?;
}
ContentField::OnHandlerRaw(exp) => {
let out = prettyplease::unparse_expr(exp);
let out = prettyplease::unparse(exp);
let mut lines = out.split('\n').peekable();
let first = lines.next().unwrap();
write!(self.out, "{name}: {first}")?;

View file

@ -88,9 +88,9 @@ pub fn fmt_file(contents: &str) -> Vec<FormattedBlock> {
}
let span = match item.delimiter {
MacroDelimiter::Paren(b) => b.span,
MacroDelimiter::Brace(b) => b.span,
MacroDelimiter::Bracket(b) => b.span,
MacroDelimiter::Paren(b) => b.span.open(),
MacroDelimiter::Brace(b) => b.span.open(),
MacroDelimiter::Bracket(b) => b.span.open(),
};
let mut formatted = String::new();

View file

@ -197,7 +197,7 @@ impl<'a> Writer<'a> {
self.out,
"for {} in {} {{",
forloop.pat.clone().into_token_stream(),
prettyplease::unparse_expr(&forloop.expr)
prettyplease::unparse_expr(&*forloop.expr)
)?;
if forloop.body.is_empty() {

View file

@ -57,7 +57,7 @@ flate2 = "1.0.22"
tar = "0.4.38"
zip = "0.6.2"
tower = "0.4.12"
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
proc-macro2 = { version = "1.0", features = ["span-locations"] }
lazy_static = "1.4.0"

View file

@ -15,7 +15,7 @@ proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0" }
quote = "1.0"
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
dioxus-rsx = { workspace = true }
# testing

View file

@ -135,7 +135,7 @@ impl ToTokens for InlinePropsBody {
quote! { <#struct_generics> },
)
} else {
let lifetime: LifetimeDef = parse_quote! { 'a };
let lifetime: LifetimeParam = parse_quote! { 'a };
let mut fn_generics = generics.clone();
fn_generics

View file

@ -166,8 +166,9 @@ mod field_info {
use crate::props::type_from_inside_option;
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::Error;
use syn::spanned::Spanned;
use syn::Expr;
use syn::{parse::Error, punctuated::Punctuated};
use super::util::{
expr_to_single_string, ident_to_type, path_to_single_string, strip_raw_ident_prefix,
@ -260,31 +261,32 @@ mod field_info {
pub fn with(mut self, attrs: &[syn::Attribute]) -> Result<Self, Error> {
let mut skip_tokens = None;
for attr in attrs {
if path_to_single_string(&attr.path).as_deref() != Some("props") {
if path_to_single_string(attr.path()).as_deref() != Some("props") {
continue;
}
if attr.tokens.is_empty() {
match &attr.meta {
syn::Meta::List(list) => {
if list.tokens.is_empty() {
continue;
}
let as_expr: syn::Expr = syn::parse2(attr.tokens.clone())?;
match as_expr {
syn::Expr::Paren(body) => {
self.apply_meta(*body.expr)?;
}
syn::Expr::Tuple(body) => {
for expr in body.elems.into_iter() {
self.apply_meta(expr)?;
}
}
_ => {
return Err(Error::new_spanned(attr.tokens.clone(), "Expected (<...>)"));
continue;
}
}
let as_expr = attr.parse_args_with(
Punctuated::<Expr, syn::Token![,]>::parse_separated_nonempty,
)?;
for expr in as_expr.into_iter() {
self.apply_meta(expr)?;
}
// Stash its span for later (we dont yet know if itll be an error)
if self.skip && skip_tokens.is_none() {
skip_tokens = Some(attr.tokens.clone());
skip_tokens = Some(attr.meta.clone());
}
}
@ -461,6 +463,8 @@ mod struct_info {
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::Error;
use syn::punctuated::Punctuated;
use syn::Expr;
use super::field_info::{FieldBuilderAttr, FieldInfo};
use super::util::{
@ -1082,28 +1086,28 @@ Finally, call `.build()` to create the instance of `{name}`.
pub fn new(attrs: &[syn::Attribute]) -> Result<TypeBuilderAttr, Error> {
let mut result = TypeBuilderAttr::default();
for attr in attrs {
if path_to_single_string(&attr.path).as_deref() != Some("builder") {
if path_to_single_string(&attr.path()).as_deref() != Some("builder") {
continue;
}
if attr.tokens.is_empty() {
match &attr.meta {
syn::Meta::List(list) => {
if list.tokens.is_empty() {
continue;
}
let as_expr: syn::Expr = syn::parse2(attr.tokens.clone())?;
match as_expr {
syn::Expr::Paren(body) => {
result.apply_meta(*body.expr)?;
}
syn::Expr::Tuple(body) => {
for expr in body.elems.into_iter() {
result.apply_meta(expr)?;
}
}
_ => {
return Err(Error::new_spanned(attr.tokens.clone(), "Expected (<...>)"));
continue;
}
}
let as_expr = attr.parse_args_with(
Punctuated::<Expr, syn::Token![,]>::parse_separated_nonempty,
)?;
for expr in as_expr.into_iter() {
result.apply_meta(expr)?;
}
}
Ok(result)

View file

@ -5,19 +5,23 @@ edition = "2021"
license = "MIT/Apache-2.0"
repository = "https://github.com/DioxusLabs/dioxus/"
homepage = "https://dioxuslabs.com/docs/0.3/guide/en/getting_started/hot_reload.html"
description = "Hot reloading utilites for Dioxus"
description = "Hot reloading utilities for Dioxus"
keywords = ["dom", "ui", "gui", "react", "hot-reloading"]
[dependencies]
dioxus-rsx = { workspace = true }
dioxus-core = { workspace = true, features = ["serialize"] }
dioxus-html = { workspace = true, features = ["hot-reload-context"] }
dioxus-html = { workspace = true }
interprocess-docfix = { version = "1.2.2" }
notify = "5.0.0"
chrono = { version = "0.4.24", default-features = false, features = ["clock"] }
notify = { version = "5.0.0", optional = true }
chrono = { version = "0.4.24", default-features = false, features = ["clock"], optional = true }
serde_json = "1.0.91"
serde = { version = "1", features = ["derive"] }
execute = "0.2.11"
once_cell = "1.17.0"
ignore = "0.4.19"
execute = { version = "0.2.11", optional = true }
once_cell = { version = "1.17.0", optional = true }
ignore = { version = "0.4.19", optional = true }
[features]
default = []
file_watcher = ["ignore", "chrono", "notify", "execute", "once_cell", "ignore", "dioxus-html/hot-reload-context"]

View file

@ -0,0 +1,352 @@
use std::{
io::{BufRead, BufReader, Write},
path::PathBuf,
str::FromStr,
sync::{Arc, Mutex},
};
use dioxus_core::Template;
use dioxus_rsx::{
hot_reload::{FileMap, FileMapBuildResult, UpdateResult},
HotReloadingContext,
};
use interprocess_docfix::local_socket::{LocalSocketListener, LocalSocketStream};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
pub use dioxus_html::HtmlCtx;
use serde::{Deserialize, Serialize};
pub struct Config<Ctx: HotReloadingContext = HtmlCtx> {
root_path: &'static str,
listening_paths: &'static [&'static str],
excluded_paths: &'static [&'static str],
log: bool,
rebuild_with: Option<Box<dyn FnMut() -> bool + Send + 'static>>,
phantom: std::marker::PhantomData<Ctx>,
}
impl<Ctx: HotReloadingContext> Default for Config<Ctx> {
fn default() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl Config<HtmlCtx> {
pub const fn new() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl<Ctx: HotReloadingContext> Config<Ctx> {
/// Set the root path of the project (where the Cargo.toml file is). This is automatically set by the [`hot_reload_init`] macro.
pub fn root(self, path: &'static str) -> Self {
Self {
root_path: path,
..self
}
}
/// Set whether to enable logs
pub fn with_logging(self, log: bool) -> Self {
Self { log, ..self }
}
/// Set the command to run to rebuild the project
///
/// For example to restart the application after a change is made, you could use `cargo run`
pub fn with_rebuild_command(self, rebuild_command: &'static str) -> Self {
self.with_rebuild_callback(move || {
execute::shell(rebuild_command)
.spawn()
.expect("Failed to spawn the rebuild command");
true
})
}
/// Set a callback to run to when the project needs to be rebuilt and returns if the server should shut down
///
/// For example a CLI application could rebuild the application when a change is made
pub fn with_rebuild_callback(
self,
rebuild_callback: impl FnMut() -> bool + Send + 'static,
) -> Self {
Self {
rebuild_with: Some(Box::new(rebuild_callback)),
..self
}
}
/// Set the paths to listen for changes in to trigger hot reloading. If this is a directory it will listen for changes in all files in that directory recursively.
pub fn with_paths(self, paths: &'static [&'static str]) -> Self {
Self {
listening_paths: paths,
..self
}
}
/// Sets paths to ignore changes on. This will override any paths set in the [`Config::with_paths`] method in the case of conflicts.
pub fn excluded_paths(self, paths: &'static [&'static str]) -> Self {
Self {
excluded_paths: paths,
..self
}
}
}
/// Initialize the hot reloading listener
pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
let Config {
root_path,
listening_paths,
log,
mut rebuild_with,
excluded_paths,
phantom: _,
} = cfg;
if let Ok(crate_dir) = PathBuf::from_str(root_path) {
// try to find the gitingore file
let gitignore_file_path = crate_dir.join(".gitignore");
let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
// convert the excluded paths to absolute paths
let excluded_paths = excluded_paths
.iter()
.map(|path| crate_dir.join(PathBuf::from(path)))
.collect::<Vec<_>>();
let channels = Arc::new(Mutex::new(Vec::new()));
let FileMapBuildResult {
map: file_map,
errors,
} = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
// skip excluded paths
excluded_paths.iter().any(|p| path.starts_with(p)) ||
// respect .gitignore
gitignore
.matched_path_or_any_parents(path, path.is_dir())
.is_ignore()
})
.unwrap();
for err in errors {
if log {
println!("hot reloading failed to initialize:\n{err:?}");
}
}
let file_map = Arc::new(Mutex::new(file_map));
#[cfg(target_os = "macos")]
{
// On unix, if you force quit the application, it can leave the file socket open
// This will cause the local socket listener to fail to open
// We check if the file socket is already open from an old session and then delete it
let paths = ["./dioxusin", "./@dioxusin"];
for path in paths {
let path = PathBuf::from(path);
if path.exists() {
let _ = std::fs::remove_file(path);
}
}
}
match LocalSocketListener::bind("@dioxusin") {
Ok(local_socket_stream) => {
let aborted = Arc::new(Mutex::new(false));
// listen for connections
std::thread::spawn({
let file_map = file_map.clone();
let channels = channels.clone();
let aborted = aborted.clone();
let _ = local_socket_stream.set_nonblocking(true);
move || {
loop {
if let Ok(mut connection) = local_socket_stream.accept() {
// send any templates than have changed before the socket connected
let templates: Vec<_> = {
file_map
.lock()
.unwrap()
.map
.values()
.filter_map(|(_, template_slot)| *template_slot)
.collect()
};
for template in templates {
if !send_msg(
HotReloadMsg::UpdateTemplate(template),
&mut connection,
) {
continue;
}
}
channels.lock().unwrap().push(connection);
if log {
println!("Connected to hot reloading 🚀");
}
}
if *aborted.lock().unwrap() {
break;
}
}
}
});
// watch for changes
std::thread::spawn(move || {
let mut last_update_time = chrono::Local::now().timestamp();
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher =
RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
for path in listening_paths {
let full_path = crate_dir.join(path);
if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
if log {
println!(
"hot reloading failed to start watching {full_path:?}:\n{err:?}",
);
}
}
}
let mut rebuild = {
let aborted = aborted.clone();
let channels = channels.clone();
move || {
if let Some(rebuild_callback) = &mut rebuild_with {
if log {
println!("Rebuilding the application...");
}
let shutdown = rebuild_callback();
if shutdown {
*aborted.lock().unwrap() = true;
}
for channel in &mut *channels.lock().unwrap() {
send_msg(HotReloadMsg::Shutdown, channel);
}
return shutdown;
} else if log {
println!(
"Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view futher changes."
);
}
true
}
};
for evt in rx {
if chrono::Local::now().timestamp_millis() >= last_update_time {
if let Ok(evt) = evt {
let real_paths = evt
.paths
.iter()
.filter(|path| {
// skip non rust files
matches!(
path.extension().and_then(|p| p.to_str()),
Some("rs" | "toml" | "css" | "html" | "js")
) &&
// skip excluded paths
!excluded_paths.iter().any(|p| path.starts_with(p)) &&
// respect .gitignore
!gitignore
.matched_path_or_any_parents(path, false)
.is_ignore()
})
.collect::<Vec<_>>();
// Give time for the change to take effect before reading the file
if !real_paths.is_empty() {
std::thread::sleep(std::time::Duration::from_millis(10));
}
let mut channels = channels.lock().unwrap();
for path in real_paths {
// if this file type cannot be hot reloaded, rebuild the application
if path.extension().and_then(|p| p.to_str()) != Some("rs")
&& rebuild()
{
return;
}
// find changes to the rsx in the file
match file_map
.lock()
.unwrap()
.update_rsx(path, crate_dir.as_path())
{
Ok(UpdateResult::UpdatedRsx(msgs)) => {
for msg in msgs {
let mut i = 0;
while i < channels.len() {
let channel = &mut channels[i];
if send_msg(
HotReloadMsg::UpdateTemplate(msg),
channel,
) {
i += 1;
} else {
channels.remove(i);
}
}
}
}
Ok(UpdateResult::NeedsRebuild) => {
drop(channels);
if rebuild() {
return;
}
break;
}
Err(err) => {
if log {
println!(
"hot reloading failed to update rsx:\n{err:?}"
);
}
}
}
}
}
last_update_time = chrono::Local::now().timestamp_millis();
}
}
});
}
Err(error) => println!("failed to connect to hot reloading\n{error}"),
}
}
}
fn send_msg(msg: HotReloadMsg, channel: &mut impl Write) -> bool {
if let Ok(msg) = serde_json::to_string(&msg) {
if channel.write_all(msg.as_bytes()).is_err() {
return false;
}
if channel.write_all(&[b'\n']).is_err() {
return false;
}
true
} else {
false
}
}

View file

@ -1,21 +1,16 @@
use std::{
io::{BufRead, BufReader, Write},
path::PathBuf,
str::FromStr,
sync::{Arc, Mutex},
};
use std::io::{BufRead, BufReader};
use dioxus_core::Template;
use dioxus_rsx::{
hot_reload::{FileMap, FileMapBuildResult, UpdateResult},
HotReloadingContext,
};
use interprocess_docfix::local_socket::{LocalSocketListener, LocalSocketStream};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
#[cfg(file_watcher)]
pub use dioxus_html::HtmlCtx;
use interprocess_docfix::local_socket::LocalSocketStream;
use serde::{Deserialize, Serialize};
#[cfg(file_watcher)]
mod file_watcher;
#[cfg(file_watcher)]
use file_watcher::*;
/// A message the hot reloading server sends to the client
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub enum HotReloadMsg {
@ -26,341 +21,6 @@ pub enum HotReloadMsg {
Shutdown,
}
pub struct Config<Ctx: HotReloadingContext = HtmlCtx> {
root_path: &'static str,
listening_paths: &'static [&'static str],
excluded_paths: &'static [&'static str],
log: bool,
rebuild_with: Option<Box<dyn FnMut() -> bool + Send + 'static>>,
phantom: std::marker::PhantomData<Ctx>,
}
impl<Ctx: HotReloadingContext> Default for Config<Ctx> {
fn default() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl Config<HtmlCtx> {
pub const fn new() -> Self {
Self {
root_path: "",
listening_paths: &[""],
excluded_paths: &["./target"],
log: true,
rebuild_with: None,
phantom: std::marker::PhantomData,
}
}
}
impl<Ctx: HotReloadingContext> Config<Ctx> {
/// Set the root path of the project (where the Cargo.toml file is). This is automatically set by the [`hot_reload_init`] macro.
pub fn root(self, path: &'static str) -> Self {
Self {
root_path: path,
..self
}
}
/// Set whether to enable logs
pub fn with_logging(self, log: bool) -> Self {
Self { log, ..self }
}
/// Set the command to run to rebuild the project
///
/// For example to restart the application after a change is made, you could use `cargo run`
pub fn with_rebuild_command(self, rebuild_command: &'static str) -> Self {
self.with_rebuild_callback(move || {
execute::shell(rebuild_command)
.spawn()
.expect("Failed to spawn the rebuild command");
true
})
}
/// Set a callback to run to when the project needs to be rebuilt and returns if the server should shut down
///
/// For example a CLI application could rebuild the application when a change is made
pub fn with_rebuild_callback(
self,
rebuild_callback: impl FnMut() -> bool + Send + 'static,
) -> Self {
Self {
rebuild_with: Some(Box::new(rebuild_callback)),
..self
}
}
/// Set the paths to listen for changes in to trigger hot reloading. If this is a directory it will listen for changes in all files in that directory recursively.
pub fn with_paths(self, paths: &'static [&'static str]) -> Self {
Self {
listening_paths: paths,
..self
}
}
/// Sets paths to ignore changes on. This will override any paths set in the [`Config::with_paths`] method in the case of conflicts.
pub fn excluded_paths(self, paths: &'static [&'static str]) -> Self {
Self {
excluded_paths: paths,
..self
}
}
}
/// Initialize the hot reloading listener
pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
let Config {
root_path,
listening_paths,
log,
mut rebuild_with,
excluded_paths,
phantom: _,
} = cfg;
if let Ok(crate_dir) = PathBuf::from_str(root_path) {
// try to find the gitingore file
let gitignore_file_path = crate_dir.join(".gitignore");
let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
// convert the excluded paths to absolute paths
let excluded_paths = excluded_paths
.iter()
.map(|path| crate_dir.join(PathBuf::from(path)))
.collect::<Vec<_>>();
let channels = Arc::new(Mutex::new(Vec::new()));
let FileMapBuildResult {
map: file_map,
errors,
} = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
// skip excluded paths
excluded_paths.iter().any(|p| path.starts_with(p)) ||
// respect .gitignore
gitignore
.matched_path_or_any_parents(path, path.is_dir())
.is_ignore()
})
.unwrap();
for err in errors {
if log {
println!("hot reloading failed to initialize:\n{err:?}");
}
}
let file_map = Arc::new(Mutex::new(file_map));
#[cfg(target_os = "macos")]
{
// On unix, if you force quit the application, it can leave the file socket open
// This will cause the local socket listener to fail to open
// We check if the file socket is already open from an old session and then delete it
let paths = ["./dioxusin", "./@dioxusin"];
for path in paths {
let path = PathBuf::from(path);
if path.exists() {
let _ = std::fs::remove_file(path);
}
}
}
match LocalSocketListener::bind("@dioxusin") {
Ok(local_socket_stream) => {
let aborted = Arc::new(Mutex::new(false));
// listen for connections
std::thread::spawn({
let file_map = file_map.clone();
let channels = channels.clone();
let aborted = aborted.clone();
let _ = local_socket_stream.set_nonblocking(true);
move || {
loop {
if let Ok(mut connection) = local_socket_stream.accept() {
// send any templates than have changed before the socket connected
let templates: Vec<_> = {
file_map
.lock()
.unwrap()
.map
.values()
.filter_map(|(_, template_slot)| *template_slot)
.collect()
};
for template in templates {
if !send_msg(
HotReloadMsg::UpdateTemplate(template),
&mut connection,
) {
continue;
}
}
channels.lock().unwrap().push(connection);
if log {
println!("Connected to hot reloading 🚀");
}
}
if *aborted.lock().unwrap() {
break;
}
}
}
});
// watch for changes
std::thread::spawn(move || {
let mut last_update_time = chrono::Local::now().timestamp();
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher =
RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
for path in listening_paths {
let full_path = crate_dir.join(path);
if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
if log {
println!(
"hot reloading failed to start watching {full_path:?}:\n{err:?}",
);
}
}
}
let mut rebuild = {
let aborted = aborted.clone();
let channels = channels.clone();
move || {
if let Some(rebuild_callback) = &mut rebuild_with {
if log {
println!("Rebuilding the application...");
}
let shutdown = rebuild_callback();
if shutdown {
*aborted.lock().unwrap() = true;
}
for channel in &mut *channels.lock().unwrap() {
send_msg(HotReloadMsg::Shutdown, channel);
}
return shutdown;
} else if log {
println!(
"Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view futher changes."
);
}
true
}
};
for evt in rx {
if chrono::Local::now().timestamp_millis() >= last_update_time {
if let Ok(evt) = evt {
let real_paths = evt
.paths
.iter()
.filter(|path| {
// skip non rust files
matches!(
path.extension().and_then(|p| p.to_str()),
Some("rs" | "toml" | "css" | "html" | "js")
) &&
// skip excluded paths
!excluded_paths.iter().any(|p| path.starts_with(p)) &&
// respect .gitignore
!gitignore
.matched_path_or_any_parents(path, false)
.is_ignore()
})
.collect::<Vec<_>>();
// Give time for the change to take effect before reading the file
if !real_paths.is_empty() {
std::thread::sleep(std::time::Duration::from_millis(10));
}
let mut channels = channels.lock().unwrap();
for path in real_paths {
// if this file type cannot be hot reloaded, rebuild the application
if path.extension().and_then(|p| p.to_str()) != Some("rs")
&& rebuild()
{
return;
}
// find changes to the rsx in the file
match file_map
.lock()
.unwrap()
.update_rsx(path, crate_dir.as_path())
{
Ok(UpdateResult::UpdatedRsx(msgs)) => {
for msg in msgs {
let mut i = 0;
while i < channels.len() {
let channel = &mut channels[i];
if send_msg(
HotReloadMsg::UpdateTemplate(msg),
channel,
) {
i += 1;
} else {
channels.remove(i);
}
}
}
}
Ok(UpdateResult::NeedsRebuild) => {
drop(channels);
if rebuild() {
return;
}
break;
}
Err(err) => {
if log {
println!(
"hot reloading failed to update rsx:\n{err:?}"
);
}
}
}
}
}
last_update_time = chrono::Local::now().timestamp_millis();
}
}
});
}
Err(error) => println!("failed to connect to hot reloading\n{error}"),
}
}
}
fn send_msg(msg: HotReloadMsg, channel: &mut impl Write) -> bool {
if let Ok(msg) = serde_json::to_string(&msg) {
if channel.write_all(msg.as_bytes()).is_err() {
return false;
}
if channel.write_all(&[b'\n']).is_err() {
return false;
}
true
} else {
false
}
}
/// Connect to the hot reloading listener. The callback provided will be called every time a template change is detected
pub fn connect(mut f: impl FnMut(HotReloadMsg) + Send + 'static) {
std::thread::spawn(move || {

View file

@ -11,7 +11,7 @@ keywords = ["dom", "ui", "gui", "react"]
[dependencies]
dioxus-core = { workspace = true }
dioxus-rsx = { workspace = true, optional = true }
dioxus-rsx = { workspace = true, features = ["hot_reload"], optional = true }
serde = { version = "1", features = ["derive"], optional = true }
serde_repr = { version = "0.1", optional = true }
wasm-bindgen = { workspace = true, optional = true }

View file

@ -12,7 +12,7 @@ keywords = ["dom", "ui", "gui", "react"]
proc-macro = true
[dependencies]
syn = { version = "1.0.11", features = ["extra-traits", "full"] }
syn = { version = "2.0", features = ["extra-traits", "full"] }
quote = "1.0"
[dev-dependencies]

View file

@ -14,7 +14,7 @@ pub fn partial_derive_state(_: TokenStream, input: TokenStream) -> TokenStream {
let has_create_fn = impl_block
.items
.iter()
.any(|item| matches!(item, syn::ImplItem::Method(method) if method.sig.ident == "create"));
.any(|item| matches!(item, syn::ImplItem::Fn(method) if method.sig.ident == "create"));
let parent_dependencies = impl_block
.items

View file

@ -15,7 +15,7 @@ keywords = ["dom", "ui", "gui", "react", "router"]
proc-macro = true
[dependencies]
syn = { version = "1.0.11", features = ["extra-traits", "full"] }
syn = { version = "2.0", features = ["extra-traits", "full"] }
quote = "1.0"
proc-macro2 = "1.0.56"
slab = "0.4"

View file

@ -286,7 +286,7 @@ impl RouteEnum {
let mut excluded = Vec::new();
// Apply the any nesting attributes in order
for attr in &variant.attrs {
if attr.path.is_ident("nest") {
if attr.path().is_ident("nest") {
let mut children_routes = Vec::new();
{
// add all of the variants of the enum to the children_routes until we hit an end_nest
@ -294,9 +294,9 @@ impl RouteEnum {
'o: for variant in &data.variants {
children_routes.push(variant.fields.clone());
for attr in &variant.attrs {
if attr.path.is_ident("nest") {
if attr.path().is_ident("nest") {
level += 1;
} else if attr.path.is_ident("end_nest") {
} else if attr.path().is_ident("end_nest") {
level -= 1;
if level < 0 {
break 'o;
@ -341,7 +341,7 @@ impl RouteEnum {
nests.push(nest);
nest_stack.push(NestId(nest_index));
} else if attr.path.is_ident("end_nest") {
} else if attr.path().is_ident("end_nest") {
nest_stack.pop();
// pop the current nest segment off the stack and add it to the parent or the site map
if let Some(segment) = site_map_stack.pop() {
@ -360,7 +360,7 @@ impl RouteEnum {
children.push(current);
}
} else if attr.path.is_ident("layout") {
} else if attr.path().is_ident("layout") {
let parser = |input: ParseStream| {
let bang: Option<Token![!]> = input.parse().ok();
let exclude = bang.is_some();
@ -382,9 +382,9 @@ impl RouteEnum {
layouts.push(layout);
layout_stack.push(LayoutId(layout_index));
}
} else if attr.path.is_ident("end_layout") {
} else if attr.path().is_ident("end_layout") {
layout_stack.pop();
} else if attr.path.is_ident("redirect") {
} else if attr.path().is_ident("redirect") {
let parser = |input: ParseStream| {
Redirect::parse(input, nest_stack.clone(), redirects.len())
};

View file

@ -75,7 +75,7 @@ impl Route {
let route_attr = variant
.attrs
.iter()
.find(|attr| attr.path.is_ident("route"));
.find(|attr| attr.path().is_ident("route"));
let route;
let ty;
let route_name = variant.ident.clone();
@ -106,7 +106,7 @@ impl Route {
if let Some(route_attr) = variant
.attrs
.iter()
.find(|attr| attr.path.is_ident("child"))
.find(|attr| attr.path().is_ident("child"))
{
let args = route_attr.parse_args::<ChildArgs>()?;
route = args.route.value();
@ -116,7 +116,7 @@ impl Route {
let child_field = fields.named.iter().find(|f| {
f.attrs
.iter()
.any(|attr| attr.path.is_ident("child"))
.any(|attr| attr.path().is_ident("child"))
|| *f.ident.as_ref().unwrap() == "child"
});
match child_field{

View file

@ -18,7 +18,7 @@ dioxus-rsx = { workspace = true }
html_parser.workspace = true
proc-macro2 = "1.0.49"
quote = "1.0.23"
syn = { version = "1.0.107", features = ["full"] }
syn = { version = "2.0", features = ["full"] }
convert_case = "0.5.0"
# [features]

View file

@ -14,8 +14,12 @@ keywords = ["dom", "ui", "gui", "react"]
[dependencies]
proc-macro2 = { version = "1.0", features = ["span-locations"] }
dioxus-core = { workspace = true}
syn = { version = "1.0", features = ["full", "extra-traits"] }
syn = { version = "2.0", features = ["full", "extra-traits"] }
quote = { version = "1.0" }
serde = { version = "1.0", features = ["derive"] }
internment = "0.7.0"
krates = "0.12.6"
serde = { version = "1.0", features = ["derive"], optional = true }
internment = { version = "0.7.0", optional = true }
krates = { version = "0.12.6", optional = true }
[features]
hot_reload = ["krates", "internment"]
serde = ["dep:serde"]

View file

@ -1,15 +1,18 @@
use std::fmt::Display;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// An error produced when interperting the rsx
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum Error {
ParseError(ParseError),
RecompileRequiredError(RecompileReason),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum RecompileReason {
Variable(String),
Expression(String),
@ -18,14 +21,16 @@ pub enum RecompileReason {
Attribute(String),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct CodeLocation {
pub line: u32,
pub column: u32,
pub file_path: &'static str,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ParseError {
pub message: String,
pub location: CodeLocation,

View file

@ -85,7 +85,7 @@ fn find_rsx_item(
(syn::ImplItem::Const(new_item), syn::ImplItem::Const(old_item)) => {
find_rsx_expr(&new_item.expr, &old_item.expr, rsx_calls)
}
(syn::ImplItem::Method(new_item), syn::ImplItem::Method(old_item)) => {
(syn::ImplItem::Fn(new_item), syn::ImplItem::Fn(old_item)) => {
find_rsx_block(&new_item.block, &old_item.block, rsx_calls)
}
(syn::ImplItem::Type(new_item), syn::ImplItem::Type(old_item)) => {
@ -114,7 +114,6 @@ fn find_rsx_item(
|| new_item.semi_token != old_item.semi_token
|| new_item.ident != old_item.ident
}
(syn::Item::Macro2(new_item), syn::Item::Macro2(old_item)) => old_item != new_item,
(syn::Item::Mod(new_item), syn::Item::Mod(old_item)) => {
match (&new_item.content, &old_item.content) {
(Some((_, new_items)), Some((_, old_items))) => {
@ -186,7 +185,7 @@ fn find_rsx_trait(
true
}
}
(syn::TraitItem::Method(new_item), syn::TraitItem::Method(old_item)) => {
(syn::TraitItem::Fn(new_item), syn::TraitItem::Fn(old_item)) => {
if let (Some(new_block), Some(old_block)) = (&new_item.default, &old_item.default) {
find_rsx_block(new_block, old_block, rsx_calls)
} else {
@ -239,8 +238,9 @@ fn find_rsx_stmt(
match (new_stmt, old_stmt) {
(syn::Stmt::Local(new_local), syn::Stmt::Local(old_local)) => {
(match (&new_local.init, &old_local.init) {
(Some((new_eq, new_expr)), Some((old_eq, old_expr))) => {
find_rsx_expr(new_expr, old_expr, rsx_calls) || new_eq != old_eq
(Some(new_local), Some(old_local)) => {
find_rsx_expr(&new_local.expr, &old_local.expr, rsx_calls)
|| new_local != old_local
}
(None, None) => false,
_ => true,
@ -252,11 +252,13 @@ fn find_rsx_stmt(
(syn::Stmt::Item(new_item), syn::Stmt::Item(old_item)) => {
find_rsx_item(new_item, old_item, rsx_calls)
}
(syn::Stmt::Expr(new_expr), syn::Stmt::Expr(old_expr)) => {
(syn::Stmt::Expr(new_expr, _), syn::Stmt::Expr(old_expr, _)) => {
find_rsx_expr(new_expr, old_expr, rsx_calls)
}
(syn::Stmt::Semi(new_expr, new_semi), syn::Stmt::Semi(old_expr, old_semi)) => {
find_rsx_expr(new_expr, old_expr, rsx_calls) || new_semi != old_semi
(syn::Stmt::Macro(new_macro), syn::Stmt::Macro(old_macro)) => {
find_rsx_macro(&new_macro.mac, &old_macro.mac, rsx_calls)
|| new_macro.attrs != old_macro.attrs
|| new_macro.semi_token != old_macro.semi_token
}
_ => true,
}
@ -285,12 +287,6 @@ fn find_rsx_expr(
|| new_expr.attrs != old_expr.attrs
|| new_expr.eq_token != old_expr.eq_token
}
(syn::Expr::AssignOp(new_expr), syn::Expr::AssignOp(old_expr)) => {
find_rsx_expr(&new_expr.left, &old_expr.left, rsx_calls)
|| find_rsx_expr(&new_expr.right, &old_expr.right, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.op != old_expr.op
}
(syn::Expr::Async(new_expr), syn::Expr::Async(old_expr)) => {
find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
|| new_expr.attrs != old_expr.attrs
@ -314,11 +310,6 @@ fn find_rsx_expr(
|| new_expr.attrs != old_expr.attrs
|| new_expr.label != old_expr.label
}
(syn::Expr::Box(new_expr), syn::Expr::Box(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.box_token != old_expr.box_token
}
(syn::Expr::Break(new_expr), syn::Expr::Break(old_expr)) => {
match (&new_expr.expr, &old_expr.expr) {
(Some(new_inner), Some(old_inner)) => {
@ -478,7 +469,7 @@ fn find_rsx_expr(
}
(syn::Expr::Path(new_expr), syn::Expr::Path(old_expr)) => old_expr != new_expr,
(syn::Expr::Range(new_expr), syn::Expr::Range(old_expr)) => {
match (&new_expr.from, &old_expr.from) {
match (&new_expr.start, &old_expr.start) {
(Some(new_expr), Some(old_expr)) => {
if find_rsx_expr(new_expr, old_expr, rsx_calls) {
return true;
@ -487,7 +478,7 @@ fn find_rsx_expr(
(None, None) => (),
_ => return true,
}
match (&new_expr.to, &old_expr.to) {
match (&new_expr.end, &old_expr.end) {
(Some(new_inner), Some(old_inner)) => {
find_rsx_expr(new_inner, old_inner, rsx_calls)
|| new_expr.attrs != old_expr.attrs
@ -568,12 +559,6 @@ fn find_rsx_expr(
}
new_expr.attrs != old_expr.attrs || new_expr.paren_token != old_expr.paren_token
}
(syn::Expr::Type(new_expr), syn::Expr::Type(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs
|| new_expr.colon_token != old_expr.colon_token
|| new_expr.ty != old_expr.ty
}
(syn::Expr::Unary(new_expr), syn::Expr::Unary(old_expr)) => {
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|| new_expr.attrs != old_expr.attrs

View file

@ -15,18 +15,22 @@
mod errors;
mod component;
mod element;
#[cfg(feature = "hot_reload")]
pub mod hot_reload;
mod ifmt;
mod node;
use std::{collections::HashMap, fmt::Debug, hash::Hash};
use std::{fmt::Debug, hash::Hash};
// Re-export the namespaces into each other
pub use component::*;
#[cfg(feature = "hot_reload")]
use dioxus_core::{Template, TemplateAttribute, TemplateNode};
pub use element::*;
#[cfg(feature = "hot_reload")]
pub use hot_reload::HotReloadingContext;
pub use ifmt::*;
#[cfg(feature = "hot_reload")]
use internment::Intern;
pub use node::*;
@ -38,6 +42,7 @@ use syn::{
Result, Token,
};
#[cfg(feature = "hot_reload")]
// interns a object into a static object, resusing the value if it already exists
fn intern<T: Eq + Hash + Send + Sync + ?Sized + 'static>(s: impl Into<Intern<T>>) -> &'static T {
s.into().as_ref()
@ -50,6 +55,7 @@ pub struct CallBody {
}
impl CallBody {
#[cfg(feature = "hot_reload")]
/// This will try to create a new template from the current body and the previous body. This will return None if the rsx has some dynamic part that has changed.
/// This function intentionally leaks memory to create a static template.
/// Keeping the template static allows us to simplify the core of dioxus and leaking memory in dev mode is less of an issue.
@ -140,6 +146,7 @@ pub struct TemplateRenderer<'a> {
}
impl<'a> TemplateRenderer<'a> {
#[cfg(feature = "hot_reload")]
fn update_template<Ctx: HotReloadingContext>(
&mut self,
previous_call: Option<CallBody>,
@ -248,14 +255,16 @@ impl<'a> ToTokens for TemplateRenderer<'a> {
}
}
#[cfg(feature = "hot_reload")]
#[derive(Default, Debug)]
struct DynamicMapping {
attribute_to_idx: HashMap<ElementAttr, Vec<usize>>,
attribute_to_idx: std::collections::HashMap<ElementAttr, Vec<usize>>,
last_attribute_idx: usize,
node_to_idx: HashMap<BodyNode, Vec<usize>>,
node_to_idx: std::collections::HashMap<BodyNode, Vec<usize>>,
last_element_idx: usize,
}
#[cfg(feature = "hot_reload")]
impl DynamicMapping {
fn from(nodes: Vec<BodyNode>) -> Self {
let mut new = Self::default();
@ -349,6 +358,7 @@ pub struct DynamicContext<'a> {
}
impl<'a> DynamicContext<'a> {
#[cfg(feature = "hot_reload")]
fn update_node<Ctx: HotReloadingContext>(
&mut self,
root: &'a BodyNode,
@ -558,6 +568,7 @@ impl<'a> DynamicContext<'a> {
}
}
#[cfg(feature = "hot_reload")]
#[test]
fn create_template() {
let input = quote! {
@ -643,6 +654,7 @@ fn create_template() {
)
}
#[cfg(feature = "hot_reload")]
#[test]
fn diff_template() {
use dioxus_core::Scope;

View file

@ -222,7 +222,7 @@ impl Parse for ForLoop {
fn parse(input: ParseStream) -> Result<Self> {
let for_token: Token![for] = input.parse()?;
let pat = input.parse()?;
let pat = Pat::parse_single(input)?;
let in_token: Token![in] = input.parse()?;
let expr: Expr = input.call(Expr::parse_without_eager_brace)?;