Hotreloading of for/if/body, formatted strings, literals, component props, nested rsx, light CLI rewrite, cli TUI (#2258)

Hotreload the contents of for loops, if chains, component bodies, props, attributes, and any literals discovered in rsx!
Add a TUI renderer to the CLI.
Improve the CLI build system to be async and parallel.
Refactor RSX to allow partial expansion of expressions.
Merge autofmt implementations for consistency.
Merge the representation of elements and components under the hood.
Add a diagnostics system for rsx for improved error messages.
Drop interprocess and move to websockets for communication between the CLI and the server.
Assign IDs to nodes and attributes in a stable way to be used in non compiler contexts.
Add hotreloading to any body of component/for loop/if chain/etc.

---------

Co-authored-by: Evan Almloff <evanalmloff@gmail.com>
Co-authored-by: Liam Mitchell <liamkarlmitchell@gmail.com>
This commit is contained in:
Jonathan Kelley 2024-07-17 19:11:18 -07:00 committed by GitHub
parent 176e67e5b7
commit 88af3e7eff
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
181 changed files with 12777 additions and 10828 deletions

View file

@ -228,13 +228,21 @@ jobs:
args: "--all --tests", args: "--all --tests",
} }
- { - {
target: x86_64-apple-darwin, target: aarch64-apple-darwin,
os: macos-latest, os: macos-latest,
toolchain: "1.79.0", toolchain: "1.79.0",
cross: false, cross: false,
command: "test", command: "test",
args: "--all --tests", args: "--all --tests",
} }
- {
target: x86_64-apple-darwin,
os: macos-13,
toolchain: "1.79.0",
cross: false,
command: "test",
args: "--all --tests",
}
- { - {
target: aarch64-apple-ios, target: aarch64-apple-ios,
os: macos-latest, os: macos-latest,

1370
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -105,20 +105,22 @@ wasm-bindgen-futures = "0.4.42"
html_parser = "0.7.0" html_parser = "0.7.0"
thiserror = "1.0.40" thiserror = "1.0.40"
prettyplease = { version = "0.2.16", features = ["verbatim"] } prettyplease = { version = "0.2.16", features = ["verbatim"] }
manganis-cli-support = { git = "https://github.com/DogeDark/dioxus-collect-assets/", features = ["html"] } manganis-cli-support = { git = "https://github.com/dioxuslabs/collect-assets/", features = ["html"] }
manganis = { version = "0.2.1" } manganis = { version = "0.2.1" }
interprocess = { version = "1.2.2", package = "interprocess-docfix" }
const_format = "0.2.32" const_format = "0.2.32"
cargo_toml = { version = "0.20.3" }
tauri-utils = { version = "=1.5.*" }
tauri-bundler = { version = "=1.4.*" }
lru = "0.12.2" lru = "0.12.2"
async-trait = "0.1.77" async-trait = "0.1.77"
axum = "0.7.0" axum = "0.7.0"
axum-server = { version = "0.6.0", default-features = false } axum-server = { version = "0.6.0", default-features = false }
tower = "0.4.13" tower = "0.4.13"
http = "1.0.0" http = "1.0.0"
notify = { version = "6.1.1" }
tower-http = "0.5.2" tower-http = "0.5.2"
hyper = "1.0.0" hyper = "1.0.0"
hyper-rustls = "0.26.0" hyper-rustls = "0.27.2"
serde_json = "1.0.61" serde_json = "1.0.61"
serde = "1.0.61" serde = "1.0.61"
syn = "2.0" syn = "2.0"
@ -127,19 +129,18 @@ proc-macro2 = "1.0"
axum_session = "0.12.1" axum_session = "0.12.1"
axum_session_auth = "0.12.1" axum_session_auth = "0.12.1"
axum-extra = "0.9.2" axum-extra = "0.9.2"
reqwest = "0.11.24" reqwest = "0.12.5"
owo-colors = "4.0.0" owo-colors = "4.0.0"
ciborium = "0.2.1" ciborium = "0.2.1"
base64 = "0.21.0" base64 = "0.22.1"
once_cell = "1.17.1" once_cell = "1.17.1"
uuid = "1.9.1"
convert_case = "0.6.0"
tokio-tungstenite = { version = "0.23.1" }
# cli, cli-config # cli, cli-config
dirs = "5.0.1" dirs = "5.0.1"
# speed up some macros by optimizing them
[profile.dev.package.insta]
opt-level = 3
[profile.dev.package.dioxus-core-macro] [profile.dev.package.dioxus-core-macro]
opt-level = 3 opt-level = 3
@ -175,7 +176,7 @@ publish = false
[dependencies] [dependencies]
manganis = { workspace = true, optional = true } manganis = { workspace = true, optional = true }
reqwest = { version = "0.11.9", features = ["json"], optional = true } reqwest = { workspace = true, features = ["json"], optional = true }
http-range = { version = "0.1.5", optional = true } http-range = { version = "0.1.5", optional = true }
ciborium = { version = "0.2.1", optional = true } ciborium = { version = "0.2.1", optional = true }
base64 = { version = "0.21.0", optional = true } base64 = { version = "0.21.0", optional = true }

View file

@ -94,7 +94,7 @@ fn app() -> Element {
} }
// Expressions can be used in element position too: // Expressions can be used in element position too:
{rsx!(p { "More templating!" })}, {rsx!(p { "More templating!" })}
// Iterators // Iterators
{(0..10).map(|i| rsx!(li { "{i}" }))} {(0..10).map(|i| rsx!(li { "{i}" }))}

View file

@ -8,7 +8,7 @@ use std::fmt::{Result, Write};
use dioxus_rsx::IfmtInput; use dioxus_rsx::IfmtInput;
use crate::{indent::IndentOptions, write_ifmt}; use crate::indent::IndentOptions;
/// The output buffer that tracks indent and string /// The output buffer that tracks indent and string
#[derive(Debug, Default)] #[derive(Debug, Default)]
@ -51,7 +51,7 @@ impl Buffer {
} }
pub fn write_text(&mut self, text: &IfmtInput) -> Result { pub fn write_text(&mut self, text: &IfmtInput) -> Result {
write_ifmt(text, &mut self.buf) write!(self.buf, "{}", text.to_string_with_quotes())
} }
} }

View file

@ -7,8 +7,15 @@ use syn::{visit::Visit, File, Macro};
type CollectedMacro<'a> = &'a Macro; type CollectedMacro<'a> = &'a Macro;
pub fn collect_from_file<'a>(file: &'a File, macros: &mut Vec<CollectedMacro<'a>>) { pub fn collect_from_file(file: &File) -> Vec<CollectedMacro<'_>> {
MacroCollector::visit_file(&mut MacroCollector { macros }, file); let mut macros = vec![];
MacroCollector::visit_file(
&mut MacroCollector {
macros: &mut macros,
},
file,
);
macros
} }
struct MacroCollector<'a, 'b> { struct MacroCollector<'a, 'b> {
@ -46,7 +53,6 @@ pub fn byte_offset(input: &str, location: LineColumn) -> usize {
fn parses_file_and_collects_rsx_macros() { fn parses_file_and_collects_rsx_macros() {
let contents = include_str!("../tests/samples/long.rsx"); let contents = include_str!("../tests/samples/long.rsx");
let parsed = syn::parse_file(contents).unwrap(); let parsed = syn::parse_file(contents).unwrap();
let mut macros = vec![]; let macros = collect_from_file(&parsed);
collect_from_file(&parsed, &mut macros);
assert_eq!(macros.len(), 3); assert_eq!(macros.len(), 3);
} }

View file

@ -1,291 +0,0 @@
use crate::{ifmt_to_string, prettier_please::unparse_expr, writer::Location, Writer};
use dioxus_rsx::*;
use quote::ToTokens;
use std::fmt::{Result, Write};
use syn::{spanned::Spanned, AngleBracketedGenericArguments};
#[derive(Debug)]
enum ShortOptimization {
// Special because we want to print the closing bracket immediately
Empty,
// Special optimization to put everything on the same line
Oneliner,
// Optimization where children flow but props remain fixed on top
PropsOnTop,
// The noisiest optimization where everything flows
NoOpt,
}
impl Writer<'_> {
pub fn write_component(
&mut self,
Component {
name,
fields,
children,
manual_props,
prop_gen_args,
key,
..
}: &Component,
) -> Result {
self.write_component_name(name, prop_gen_args)?;
// decide if we have any special optimizations
// Default with none, opt the cases in one-by-one
let mut opt_level = ShortOptimization::NoOpt;
// check if we have a lot of attributes
let attr_len = self.field_len(fields, manual_props) + self.key_len(key.as_ref());
let is_short_attr_list = attr_len < 80;
let is_small_children = self.is_short_children(children).is_some();
// if we have few attributes and a lot of children, place the attrs on top
if is_short_attr_list && !is_small_children {
opt_level = ShortOptimization::PropsOnTop;
}
// even if the attr is long, it should be put on one line
if !is_short_attr_list && (fields.len() <= 1 && manual_props.is_none()) {
if children.is_empty() {
opt_level = ShortOptimization::Oneliner;
} else {
opt_level = ShortOptimization::PropsOnTop;
}
}
// if we have few children and few attributes, make it a one-liner
if is_short_attr_list && is_small_children {
opt_level = ShortOptimization::Oneliner;
}
// If there's nothing at all, empty optimization
if fields.is_empty() && children.is_empty() && manual_props.is_none() && key.is_none() {
opt_level = ShortOptimization::Empty;
}
// multiline handlers bump everything down
if attr_len > 1000 || self.out.indent.split_line_attributes() {
opt_level = ShortOptimization::NoOpt;
}
// Useful for debugging
// dbg!(
// name.to_token_stream().to_string(),
// &opt_level,
// attr_len,
// is_short_attr_list,
// is_small_children
// );
match opt_level {
ShortOptimization::Empty => {}
ShortOptimization::Oneliner => {
write!(self.out, " ")?;
self.write_component_fields(fields, key.as_ref(), manual_props, true)?;
if !children.is_empty() && !fields.is_empty() {
write!(self.out, ", ")?;
}
for (id, child) in children.iter().enumerate() {
self.write_ident(child)?;
if id != children.len() - 1 && children.len() > 1 {
write!(self.out, ", ")?;
}
}
write!(self.out, " ")?;
}
ShortOptimization::PropsOnTop => {
write!(self.out, " ")?;
self.write_component_fields(fields, key.as_ref(), manual_props, true)?;
if !children.is_empty() && !fields.is_empty() {
write!(self.out, ",")?;
}
self.write_body_indented(children)?;
self.out.tabbed_line()?;
}
ShortOptimization::NoOpt => {
self.write_component_fields(fields, key.as_ref(), manual_props, false)?;
if !children.is_empty() && !fields.is_empty() {
write!(self.out, ",")?;
}
self.write_body_indented(children)?;
self.out.tabbed_line()?;
}
}
write!(self.out, "}}")?;
Ok(())
}
fn write_component_name(
&mut self,
name: &syn::Path,
generics: &Option<AngleBracketedGenericArguments>,
) -> Result {
let mut name = name.to_token_stream().to_string();
name.retain(|c| !c.is_whitespace());
write!(self.out, "{name}")?;
if let Some(generics) = generics {
let mut written = generics.to_token_stream().to_string();
written.retain(|c| !c.is_whitespace());
write!(self.out, "{written}")?;
}
write!(self.out, " {{")?;
Ok(())
}
fn write_component_fields(
&mut self,
fields: &[ComponentField],
key: Option<&IfmtInput>,
manual_props: &Option<syn::Expr>,
sameline: bool,
) -> Result {
let mut field_iter = fields.iter().peekable();
// write the key
if let Some(key) = key {
write!(self.out, "key: {}", ifmt_to_string(key))?;
if !fields.is_empty() {
write!(self.out, ",")?;
if sameline {
write!(self.out, " ")?;
}
}
}
while let Some(field) = field_iter.next() {
if !sameline {
self.out.indented_tabbed_line().unwrap();
}
let name = &field.name;
match &field.content {
ContentField::ManExpr(_exp) if field.can_be_shorthand() => {
write!(self.out, "{name}")?;
}
ContentField::ManExpr(exp) => {
let out = self.unparse_expr(exp);
let mut lines = out.split('\n').peekable();
let first = lines.next().unwrap();
write!(self.out, "{name}: {first}")?;
for line in lines {
self.out.new_line()?;
self.out.indented_tab()?;
write!(self.out, "{line}")?;
}
}
ContentField::Formatted(s) => {
write!(
self.out,
"{}: {}",
name,
s.source.as_ref().unwrap().to_token_stream()
)?;
}
ContentField::Shorthand(e) => {
write!(self.out, "{}", e.to_token_stream())?;
}
}
if field_iter.peek().is_some() || manual_props.is_some() {
write!(self.out, ",")?;
if sameline {
write!(self.out, " ")?;
}
}
}
if let Some(exp) = manual_props {
if !sameline {
self.out.indented_tabbed_line().unwrap();
}
self.write_manual_props(exp)?;
}
Ok(())
}
pub fn field_len(
&mut self,
fields: &[ComponentField],
manual_props: &Option<syn::Expr>,
) -> usize {
let attr_len = fields
.iter()
.map(|field| match &field.content {
ContentField::Formatted(s) => ifmt_to_string(s).len() ,
ContentField::Shorthand(e) => e.to_token_stream().to_string().len(),
ContentField::ManExpr(exp) => {
let formatted = unparse_expr(exp);
let len = if formatted.contains('\n') {
10000
} else {
formatted.len()
};
self.cached_formats.insert(Location::new(exp.span().start()) , formatted);
len
},
} + 10)
.sum::<usize>();
match manual_props {
Some(p) => {
let content = unparse_expr(p);
if content.len() + attr_len > 80 {
return 100000;
}
let mut lines = content.lines();
lines.next().unwrap();
if lines.next().is_none() {
attr_len + content.len()
} else {
100000
}
}
None => attr_len,
}
}
fn write_manual_props(&mut self, exp: &syn::Expr) -> Result {
/*
We want to normalize the expr to the appropriate indent level.
*/
let formatted = self.unparse_expr(exp);
let mut lines = formatted.lines();
let first_line = lines.next().unwrap();
write!(self.out, "..{first_line}")?;
for line in lines {
self.out.indented_tabbed_line()?;
write!(self.out, "{line}")?;
}
Ok(())
}
}

View file

@ -1,58 +0,0 @@
//! pretty printer for rsx!
use std::fmt::{Result, Write};
use proc_macro2::Span;
use crate::{collect_macros::byte_offset, Writer};
impl Writer<'_> {
pub fn write_raw_expr(&mut self, placement: Span) -> Result {
/*
We want to normalize the expr to the appropriate indent level.
*/
let start = placement.start();
let end = placement.end();
// if the expr is on one line, just write it directly
if start.line == end.line {
// split counting utf8 chars
let start = byte_offset(self.raw_src, start);
let end = byte_offset(self.raw_src, end);
let row = self.raw_src[start..end].trim();
write!(self.out, "{row}")?;
return Ok(());
}
// If the expr is multiline, we want to collect all of its lines together and write them out properly
// This involves unshifting the first line if it's aligned
let first_line = &self.src[start.line - 1];
write!(self.out, "{}", &first_line[start.column..].trim_start())?;
let prev_block_indent_level = self.out.indent.count_indents(first_line);
for (id, line) in self.src[start.line..end.line].iter().enumerate() {
writeln!(self.out)?;
// check if this is the last line
let line = {
if id == (end.line - start.line) - 1 {
&line[..end.column]
} else {
line
}
};
// trim the leading whitespace
let previous_indent = self.out.indent.count_indents(line);
let offset = previous_indent.saturating_sub(prev_block_indent_level);
let required_indent = self.out.indent_level + offset;
self.out.write_tabs(required_indent)?;
let line = line.trim_start();
write!(self.out, "{line}")?;
}
Ok(())
}
}

View file

@ -2,22 +2,16 @@
#![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")] #![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")]
#![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")] #![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
use std::fmt::{Display, Write};
use crate::writer::*; use crate::writer::*;
use collect_macros::byte_offset; use dioxus_rsx::{BodyNode, CallBody};
use dioxus_rsx::{BodyNode, CallBody, IfmtInput};
use proc_macro2::LineColumn; use proc_macro2::LineColumn;
use quote::ToTokens; use syn::{parse::Parser, ExprMacro};
use syn::{parse::Parser, ExprMacro, MacroDelimiter};
mod buffer; mod buffer;
mod collect_macros; mod collect_macros;
mod component;
mod element;
mod expr;
mod indent; mod indent;
mod prettier_please; mod prettier_please;
mod rsx_block;
mod writer; mod writer;
pub use indent::{IndentOptions, IndentType}; pub use indent::{IndentOptions, IndentType};
@ -55,9 +49,7 @@ pub fn fmt_file(contents: &str, indent: IndentOptions) -> Vec<FormattedBlock> {
let mut formatted_blocks = Vec::new(); let mut formatted_blocks = Vec::new();
let parsed = syn::parse_file(contents).unwrap(); let parsed = syn::parse_file(contents).unwrap();
let macros = collect_macros::collect_from_file(&parsed);
let mut macros = vec![];
collect_macros::collect_from_file(&parsed, &mut macros);
// No macros, no work to do // No macros, no work to do
if macros.is_empty() { if macros.is_empty() {
@ -86,7 +78,11 @@ pub fn fmt_file(contents: &str, indent: IndentOptions) -> Vec<FormattedBlock> {
.indent .indent
.count_indents(writer.src[rsx_start.line - 1]); .count_indents(writer.src[rsx_start.line - 1]);
write_body(&mut writer, &body); // TESTME
// If we fail to parse this macro then we have no choice to give up and return what we've got
if writer.write_rsx_call(&body.body).is_err() {
return formatted_blocks;
}
// writing idents leaves the final line ended at the end of the last ident // writing idents leaves the final line ended at the end of the last ident
if writer.out.buf.contains('\n') { if writer.out.buf.contains('\n') {
@ -94,23 +90,15 @@ pub fn fmt_file(contents: &str, indent: IndentOptions) -> Vec<FormattedBlock> {
writer.out.tab().unwrap(); writer.out.tab().unwrap();
} }
let span = match item.delimiter { let span = item.delimiter.span().join();
MacroDelimiter::Paren(b) => b.span, let mut formatted = writer.out.buf.split_off(0);
MacroDelimiter::Brace(b) => b.span,
MacroDelimiter::Bracket(b) => b.span,
}
.join();
let mut formatted = String::new(); let start = collect_macros::byte_offset(contents, span.start()) + 1;
let end = collect_macros::byte_offset(contents, span.end()) - 1;
std::mem::swap(&mut formatted, &mut writer.out.buf);
let start = byte_offset(contents, span.start()) + 1;
let end = byte_offset(contents, span.end()) - 1;
// Rustfmt will remove the space between the macro and the opening paren if the macro is a single expression // Rustfmt will remove the space between the macro and the opening paren if the macro is a single expression
let body_is_solo_expr = body.roots.len() == 1 let body_is_solo_expr = body.body.roots.len() == 1
&& matches!(body.roots[0], BodyNode::RawExpr(_) | BodyNode::Text(_)); && matches!(body.body.roots[0], BodyNode::RawExpr(_) | BodyNode::Text(_));
if formatted.len() <= 80 && !formatted.contains('\n') && !body_is_solo_expr { if formatted.len() <= 80 && !formatted.contains('\n') && !body_is_solo_expr {
formatted = format!(" {formatted} "); formatted = format!(" {formatted} ");
@ -132,33 +120,20 @@ pub fn fmt_file(contents: &str, indent: IndentOptions) -> Vec<FormattedBlock> {
formatted_blocks formatted_blocks
} }
/// Write a Callbody (the rsx block) to a string
///
/// If the tokens can't be formatted, this returns None. This is usually due to an incomplete expression
/// that passed partial expansion but failed to parse.
pub fn write_block_out(body: &CallBody) -> Option<String> { pub fn write_block_out(body: &CallBody) -> Option<String> {
let mut buf = Writer::new(""); let mut buf = Writer::new("");
buf.write_rsx_call(&body.body).ok()?;
write_body(&mut buf, body);
buf.consume() buf.consume()
} }
fn write_body(buf: &mut Writer, body: &CallBody) {
match body.roots.len() {
0 => {}
1 if matches!(body.roots[0], BodyNode::Text(_)) => {
write!(buf.out, " ").unwrap();
buf.write_ident(&body.roots[0]).unwrap();
write!(buf.out, " ").unwrap();
}
_ => buf.write_body_indented(&body.roots).unwrap(),
}
}
pub fn fmt_block_from_expr(raw: &str, expr: ExprMacro) -> Option<String> { pub fn fmt_block_from_expr(raw: &str, expr: ExprMacro) -> Option<String> {
let body = CallBody::parse_strict.parse2(expr.mac.tokens).unwrap(); let body = CallBody::parse_strict.parse2(expr.mac.tokens).unwrap();
let mut buf = Writer::new(raw); let mut buf = Writer::new(raw);
buf.write_rsx_call(&body.body).ok()?;
write_body(&mut buf, &body);
buf.consume() buf.consume()
} }
@ -166,11 +141,9 @@ pub fn fmt_block(block: &str, indent_level: usize, indent: IndentOptions) -> Opt
let body = CallBody::parse_strict.parse_str(block).unwrap(); let body = CallBody::parse_strict.parse_str(block).unwrap();
let mut buf = Writer::new(block); let mut buf = Writer::new(block);
buf.out.indent = indent; buf.out.indent = indent;
buf.out.indent_level = indent_level; buf.out.indent_level = indent_level;
buf.write_rsx_call(&body.body).ok()?;
write_body(&mut buf, &body);
// writing idents leaves the final line ended at the end of the last ident // writing idents leaves the final line ended at the end of the last ident
if buf.out.buf.contains('\n') { if buf.out.buf.contains('\n') {
@ -180,16 +153,6 @@ pub fn fmt_block(block: &str, indent_level: usize, indent: IndentOptions) -> Opt
buf.consume() buf.consume()
} }
pub fn apply_format(input: &str, block: FormattedBlock) -> String {
let start = block.start;
let end = block.end;
let (left, _) = input.split_at(start);
let (_, right) = input.split_at(end);
format!("{}{}{}", left, block.formatted, right)
}
// Apply all the blocks // Apply all the blocks
pub fn apply_formats(input: &str, blocks: Vec<FormattedBlock>) -> String { pub fn apply_formats(input: &str, blocks: Vec<FormattedBlock>) -> String {
let mut out = String::new(); let mut out = String::new();
@ -213,24 +176,3 @@ pub fn apply_formats(input: &str, blocks: Vec<FormattedBlock>) -> String {
out out
} }
struct DisplayIfmt<'a>(&'a IfmtInput);
impl Display for DisplayIfmt<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let inner_tokens = self.0.source.as_ref().unwrap().to_token_stream();
inner_tokens.fmt(f)
}
}
pub(crate) fn ifmt_to_string(input: &IfmtInput) -> String {
let mut buf = String::new();
let display = DisplayIfmt(input);
write!(&mut buf, "{}", display).unwrap();
buf
}
pub(crate) fn write_ifmt(input: &IfmtInput, writable: &mut impl Write) -> std::fmt::Result {
let display = DisplayIfmt(input);
write!(writable, "{}", display)
}

View file

@ -1,4 +1,4 @@
use crate::{ifmt_to_string, prettier_please::unparse_expr, Writer}; use crate::{prettier_please::unparse_expr, Writer};
use dioxus_rsx::*; use dioxus_rsx::*;
use proc_macro2::Span; use proc_macro2::Span;
use quote::ToTokens; use quote::ToTokens;
@ -30,47 +30,24 @@ enum ShortOptimization {
NoOpt, NoOpt,
} }
/*
// whitespace
div {
// some whitespace
class: "asdasd"
// whjiot
asdasd // whitespace
}
*/
impl Writer<'_> { impl Writer<'_> {
pub fn write_element(&mut self, el: &Element) -> Result { /// Basically elements and components are the same thing
let Element { ///
name, /// This writes the contents out for both in one function, centralizing the annoying logic like
key, /// key handling, breaks, closures, etc
attributes, pub fn write_rsx_block(
children, &mut self,
brace, attributes: &[Attribute],
.. spreads: &[Spread],
} = el; children: &[BodyNode],
brace: &Brace,
let brace = brace ) -> Result {
.as_ref()
.expect("braces should always be present in strict mode");
/*
1. Write the tag
2. Write the key
3. Write the attributes
4. Write the children
*/
write!(self.out, "{name} {{")?;
// decide if we have any special optimizations // decide if we have any special optimizations
// Default with none, opt the cases in one-by-one // Default with none, opt the cases in one-by-one
let mut opt_level = ShortOptimization::NoOpt; let mut opt_level = ShortOptimization::NoOpt;
// check if we have a lot of attributes // check if we have a lot of attributes
let attr_len = self.is_short_attrs(attributes); let attr_len = self.is_short_attrs(attributes, spreads);
let is_short_attr_list = (attr_len + self.out.indent_level * 4) < 80; let is_short_attr_list = (attr_len + self.out.indent_level * 4) < 80;
let children_len = self.is_short_children(children); let children_len = self.is_short_children(children);
let is_small_children = children_len.is_some(); let is_small_children = children_len.is_some();
@ -82,7 +59,7 @@ impl Writer<'_> {
// even if the attr is long, it should be put on one line // even if the attr is long, it should be put on one line
// However if we have childrne we need to just spread them out for readability // However if we have childrne we need to just spread them out for readability
if !is_short_attr_list && attributes.len() <= 1 { if !is_short_attr_list && attributes.len() <= 1 && spreads.is_empty() {
if children.is_empty() { if children.is_empty() {
opt_level = ShortOptimization::Oneliner; opt_level = ShortOptimization::Oneliner;
} else { } else {
@ -100,7 +77,7 @@ impl Writer<'_> {
} }
// If there's nothing at all, empty optimization // If there's nothing at all, empty optimization
if attributes.is_empty() && children.is_empty() && key.is_none() { if attributes.is_empty() && children.is_empty() && spreads.is_empty() {
opt_level = ShortOptimization::Empty; opt_level = ShortOptimization::Empty;
// Write comments if they exist // Write comments if they exist
@ -112,49 +89,42 @@ impl Writer<'_> {
opt_level = ShortOptimization::NoOpt; opt_level = ShortOptimization::NoOpt;
} }
let has_children = !children.is_empty();
match opt_level { match opt_level {
ShortOptimization::Empty => {} ShortOptimization::Empty => {}
ShortOptimization::Oneliner => { ShortOptimization::Oneliner => {
write!(self.out, " ")?; write!(self.out, " ")?;
self.write_attributes(brace, attributes, key, true)?; self.write_attributes(attributes, spreads, true, brace, has_children)?;
if !children.is_empty() && (!attributes.is_empty() || key.is_some()) { if !children.is_empty() && !attributes.is_empty() {
write!(self.out, ", ")?; write!(self.out, " ")?;
} }
for (id, child) in children.iter().enumerate() { for child in children.iter() {
self.write_ident(child)?; self.write_ident(child)?;
if id != children.len() - 1 && children.len() > 1 {
write!(self.out, ", ")?;
}
} }
write!(self.out, " ")?; write!(self.out, " ")?;
} }
ShortOptimization::PropsOnTop => { ShortOptimization::PropsOnTop => {
if !attributes.is_empty() || key.is_some() { if !attributes.is_empty() {
write!(self.out, " ")?; write!(self.out, " ")?;
} }
self.write_attributes(brace, attributes, key, true)?;
if !children.is_empty() && (!attributes.is_empty() || key.is_some()) { self.write_attributes(attributes, spreads, true, brace, has_children)?;
write!(self.out, ",")?;
}
if !children.is_empty() { if !children.is_empty() {
self.write_body_indented(children)?; self.write_body_indented(children)?;
} }
self.out.tabbed_line()?; self.out.tabbed_line()?;
} }
ShortOptimization::NoOpt => { ShortOptimization::NoOpt => {
self.write_attributes(brace, attributes, key, false)?; self.write_attributes(attributes, spreads, false, brace, has_children)?;
if !children.is_empty() && (!attributes.is_empty() || key.is_some()) {
write!(self.out, ",")?;
}
if !children.is_empty() { if !children.is_empty() {
self.write_body_indented(children)?; self.write_body_indented(children)?;
@ -164,76 +134,99 @@ impl Writer<'_> {
} }
} }
write!(self.out, "}}")?;
Ok(()) Ok(())
} }
fn write_attributes( fn write_attributes(
&mut self, &mut self,
attributes: &[Attribute],
spreads: &[Spread],
props_same_line: bool,
brace: &Brace, brace: &Brace,
attributes: &[AttributeType], has_children: bool,
key: &Option<IfmtInput>,
sameline: bool,
) -> Result { ) -> Result {
let mut attr_iter = attributes.iter().peekable(); enum AttrType<'a> {
Attr(&'a Attribute),
if let Some(key) = key { Spread(&'a Spread),
if !sameline {
self.out.indented_tabbed_line()?;
}
write!(self.out, "key: {}", ifmt_to_string(key))?;
if !attributes.is_empty() {
write!(self.out, ",")?;
if sameline {
write!(self.out, " ")?;
}
}
} }
let mut attr_iter = attributes
.iter()
.map(AttrType::Attr)
.chain(spreads.iter().map(AttrType::Spread))
.peekable();
while let Some(attr) = attr_iter.next() { while let Some(attr) = attr_iter.next() {
self.out.indent_level += 1; self.out.indent_level += 1;
if !sameline { if !props_same_line {
self.write_attr_comments(brace, attr.start())?; self.write_attr_comments(
brace,
match attr {
AttrType::Attr(attr) => attr.span(),
AttrType::Spread(attr) => attr.expr.span(),
},
)?;
} }
self.out.indent_level -= 1; self.out.indent_level -= 1;
if !sameline { if !props_same_line {
self.out.indented_tabbed_line()?; self.out.indented_tabbed_line()?;
} }
self.write_attribute(attr)?; match attr {
AttrType::Attr(attr) => self.write_attribute(attr)?,
AttrType::Spread(attr) => self.write_spread_attribute(&attr.expr)?,
}
if attr_iter.peek().is_some() { if attr_iter.peek().is_some() {
write!(self.out, ",")?; write!(self.out, ",")?;
if sameline { if props_same_line {
write!(self.out, " ")?; write!(self.out, " ")?;
} }
} }
} }
Ok(()) let has_attributes = !attributes.is_empty() || !spreads.is_empty();
}
fn write_attribute_name(&mut self, attr: &ElementAttrName) -> Result { if has_attributes && has_children {
match attr { write!(self.out, ",")?;
ElementAttrName::BuiltIn(name) => {
write!(self.out, "{}", name)?;
}
ElementAttrName::Custom(name) => {
write!(self.out, "{}", name.to_token_stream())?;
}
} }
Ok(()) Ok(())
} }
fn write_attribute_value(&mut self, value: &ElementAttrValue) -> Result { fn write_attribute(&mut self, attr: &Attribute) -> Result {
self.write_attribute_name(&attr.name)?;
// if the attribute is a shorthand, we don't need to write the colon, just the name
if !attr.can_be_shorthand() {
write!(self.out, ": ")?;
self.write_attribute_value(&attr.value)?;
}
Ok(())
}
fn write_attribute_name(&mut self, attr: &AttributeName) -> Result {
match attr {
AttributeName::BuiltIn(name) => {
write!(self.out, "{}", name)?;
}
AttributeName::Custom(name) => {
write!(self.out, "{}", name.to_token_stream())?;
}
AttributeName::Spread(_) => unreachable!(),
}
Ok(())
}
fn write_attribute_value(&mut self, value: &AttributeValue) -> Result {
match value { match value {
ElementAttrValue::AttrOptionalExpr { condition, value } => { AttributeValue::AttrOptionalExpr { condition, value } => {
write!( write!(
self.out, self.out,
"if {condition} {{ ", "if {condition} {{ ",
@ -242,56 +235,47 @@ impl Writer<'_> {
self.write_attribute_value(value)?; self.write_attribute_value(value)?;
write!(self.out, " }}")?; write!(self.out, " }}")?;
} }
ElementAttrValue::AttrLiteral(value) => { AttributeValue::AttrLiteral(value) => {
write!(self.out, "{value}", value = ifmt_to_string(value))?; write!(self.out, "{value}")?;
} }
ElementAttrValue::Shorthand(value) => { AttributeValue::Shorthand(value) => {
write!(self.out, "{value}",)?; write!(self.out, "{value}")?;
}
AttributeValue::EventTokens(closure) => {
self.write_partial_closure(closure)?;
} }
ElementAttrValue::AttrExpr(value) => {
let out = self.unparse_expr(value);
let mut lines = out.split('\n').peekable();
let first = lines.next().unwrap();
// a one-liner for whatever reason AttributeValue::AttrExpr(value) => {
// Does not need a new line let Ok(expr) = value.as_expr() else {
return Err(fmt::Error);
};
let pretty_expr = self.retrieve_formatted_expr(&expr).to_string();
self.write_mulitiline_tokens(pretty_expr)?;
}
}
Ok(())
}
fn write_mulitiline_tokens(&mut self, out: String) -> Result {
let mut lines = out.split('\n').peekable();
let first = lines.next().unwrap();
// a one-liner for whatever reason
// Does not need a new line
if lines.peek().is_none() {
write!(self.out, "{first}")?;
} else {
writeln!(self.out, "{first}")?;
while let Some(line) = lines.next() {
self.out.indented_tab()?;
write!(self.out, "{line}")?;
if lines.peek().is_none() { if lines.peek().is_none() {
write!(self.out, "{first}")?; write!(self.out, "")?;
} else { } else {
writeln!(self.out, "{first}")?; writeln!(self.out)?;
while let Some(line) = lines.next() {
self.out.indented_tab()?;
write!(self.out, "{line}")?;
if lines.peek().is_none() {
write!(self.out, "")?;
} else {
writeln!(self.out)?;
}
}
}
}
ElementAttrValue::EventTokens(tokens) => {
let out = self.retrieve_formatted_expr(tokens).to_string();
let mut lines = out.split('\n').peekable();
let first = lines.next().unwrap();
// a one-liner for whatever reason
// Does not need a new line
if lines.peek().is_none() {
write!(self.out, "{first}")?;
} else {
writeln!(self.out, "{first}")?;
while let Some(line) = lines.next() {
self.out.indented_tab()?;
write!(self.out, "{line}")?;
if lines.peek().is_none() {
write!(self.out, "")?;
} else {
writeln!(self.out)?;
}
}
} }
} }
} }
@ -299,28 +283,34 @@ impl Writer<'_> {
Ok(()) Ok(())
} }
fn write_attribute(&mut self, attr: &AttributeType) -> Result { /// Write out the special PartialClosure type from the rsx crate
match attr { /// Basically just write token by token until we hit the block and then try and format *that*
AttributeType::Named(attr) => self.write_named_attribute(attr), /// We can't just ToTokens
AttributeType::Spread(attr) => self.write_spread_attribute(attr), fn write_partial_closure(&mut self, closure: &PartialClosure) -> Result {
} // Write the pretty version of the closure
} if let Ok(expr) = closure.as_expr() {
let pretty_expr = self.retrieve_formatted_expr(&expr).to_string();
fn write_named_attribute(&mut self, attr: &ElementAttrNamed) -> Result { self.write_mulitiline_tokens(pretty_expr)?;
self.write_attribute_name(&attr.attr.name)?; return Ok(());
// if the attribute is a shorthand, we don't need to write the colon, just the name
if !attr.attr.can_be_shorthand() {
write!(self.out, ": ")?;
self.write_attribute_value(&attr.attr.value)?;
} }
Ok(()) // If we can't parse the closure, writing it is also a failure
// rustfmt won't be able to parse it either so no point in trying
Err(fmt::Error)
} }
fn write_spread_attribute(&mut self, attr: &Expr) -> Result { fn write_spread_attribute(&mut self, attr: &Expr) -> Result {
write!(self.out, "..")?; let formatted = unparse_expr(attr);
write!(self.out, "{}", unparse_expr(attr))?;
let mut lines = formatted.lines();
let first_line = lines.next().unwrap();
write!(self.out, "..{first_line}")?;
for line in lines {
self.out.indented_tabbed_line()?;
write!(self.out, "{line}")?;
}
Ok(()) Ok(())
} }
@ -363,10 +353,10 @@ impl Writer<'_> {
} }
match children { match children {
[BodyNode::Text(ref text)] => Some(ifmt_to_string(text).len()), [BodyNode::Text(ref text)] => Some(text.input.to_string_with_quotes().len()),
// TODO: let rawexprs to be inlined // TODO: let rawexprs to be inlined
[BodyNode::RawExpr(ref expr)] => get_expr_length(expr), [BodyNode::RawExpr(ref expr)] => Some(get_expr_length(expr.span())),
// TODO: let rawexprs to be inlined // TODO: let rawexprs to be inlined
[BodyNode::Component(ref comp)] if comp.fields.is_empty() => Some( [BodyNode::Component(ref comp)] if comp.fields.is_empty() => Some(
@ -430,12 +420,11 @@ impl Writer<'_> {
} }
} }
fn get_expr_length(expr: &impl Spanned) -> Option<usize> { fn get_expr_length(span: Span) -> usize {
let span = expr.span();
let (start, end) = (span.start(), span.end()); let (start, end) = (span.start(), span.end());
if start.line == end.line { if start.line == end.line {
Some(end.column - start.column) end.column - start.column
} else { } else {
None 10000
} }
} }

View file

@ -1,4 +1,9 @@
use dioxus_rsx::{AttributeType, BodyNode, ElementAttrValue, ForLoop, IfChain, IfmtInput}; use crate::buffer::Buffer;
use crate::collect_macros::byte_offset;
use dioxus_rsx::{
Attribute as AttributeType, AttributeName, AttributeValue as ElementAttrValue, BodyNode,
Component, Element, ForLoop, IfChain, Spread, TemplateBody,
};
use proc_macro2::{LineColumn, Span}; use proc_macro2::{LineColumn, Span};
use quote::ToTokens; use quote::ToTokens;
use std::{ use std::{
@ -7,32 +12,15 @@ use std::{
}; };
use syn::{spanned::Spanned, token::Brace, Expr}; use syn::{spanned::Spanned, token::Brace, Expr};
use crate::buffer::Buffer;
use crate::ifmt_to_string;
#[derive(Debug)] #[derive(Debug)]
pub struct Writer<'a> { pub struct Writer<'a> {
pub raw_src: &'a str, pub raw_src: &'a str,
pub src: Vec<&'a str>, pub src: Vec<&'a str>,
pub cached_formats: HashMap<Location, String>, pub cached_formats: HashMap<LineColumn, String>,
pub comments: VecDeque<usize>, pub comments: VecDeque<usize>,
pub out: Buffer, pub out: Buffer,
} }
#[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)]
pub struct Location {
pub line: usize,
pub col: usize,
}
impl Location {
pub fn new(start: LineColumn) -> Self {
Self {
line: start.line,
col: start.column,
}
}
}
impl<'a> Writer<'a> { impl<'a> Writer<'a> {
pub fn new(raw_src: &'a str) -> Self { pub fn new(raw_src: &'a str) -> Self {
let src = raw_src.lines().collect(); let src = raw_src.lines().collect();
@ -45,20 +33,143 @@ impl<'a> Writer<'a> {
} }
} }
pub fn consume(self) -> Option<String> {
Some(self.out.buf)
}
pub fn write_rsx_call(&mut self, body: &TemplateBody) -> Result {
match body.roots.len() {
0 => {}
1 if matches!(body.roots[0], BodyNode::Text(_)) => {
write!(self.out, " ")?;
self.write_ident(&body.roots[0])?;
write!(self.out, " ")?;
}
_ => self.write_body_indented(&body.roots)?,
}
Ok(())
}
// Expects to be written directly into place // Expects to be written directly into place
pub fn write_ident(&mut self, node: &BodyNode) -> Result { pub fn write_ident(&mut self, node: &BodyNode) -> Result {
match node { match node {
BodyNode::Element(el) => self.write_element(el), BodyNode::Element(el) => self.write_element(el),
BodyNode::Component(component) => self.write_component(component), BodyNode::Component(component) => self.write_component(component),
BodyNode::Text(text) => self.out.write_text(text), BodyNode::Text(text) => self.out.write_text(&text.input),
BodyNode::RawExpr(exp) => self.write_raw_expr(exp.span()), BodyNode::RawExpr(exp) => self.write_raw_expr(exp.span()),
BodyNode::ForLoop(forloop) => self.write_for_loop(forloop), BodyNode::ForLoop(forloop) => self.write_for_loop(forloop),
BodyNode::IfChain(ifchain) => self.write_if_chain(ifchain), BodyNode::IfChain(ifchain) => self.write_if_chain(ifchain),
} }
} }
pub fn consume(self) -> Option<String> { pub fn write_element(&mut self, el: &Element) -> Result {
Some(self.out.buf) let Element {
name,
raw_attributes: attributes,
children,
spreads,
brace,
..
} = el;
/*
1. Write the tag
2. Write the key
3. Write the attributes
4. Write the children
*/
write!(self.out, "{name} {{")?;
self.write_rsx_block(attributes, spreads, children, brace)?;
write!(self.out, "}}")?;
Ok(())
}
pub fn write_component(
&mut self,
Component {
name,
fields,
children,
generics,
spreads,
brace,
..
}: &Component,
) -> Result {
// Write the path by to_tokensing it and then removing all whitespace
let mut name = name.to_token_stream().to_string();
name.retain(|c| !c.is_whitespace());
write!(self.out, "{name}")?;
// Same idea with generics, write those via the to_tokens method and then remove all whitespace
if let Some(generics) = generics {
let mut written = generics.to_token_stream().to_string();
written.retain(|c| !c.is_whitespace());
write!(self.out, "{written}")?;
}
write!(self.out, " {{")?;
self.write_rsx_block(fields, spreads, &children.roots, brace)?;
write!(self.out, "}}")?;
Ok(())
}
pub fn write_raw_expr(&mut self, placement: Span) -> Result {
/*
We want to normalize the expr to the appropriate indent level.
*/
let start = placement.start();
let end = placement.end();
// if the expr is on one line, just write it directly
if start.line == end.line {
// split counting utf8 chars
let start = byte_offset(self.raw_src, start);
let end = byte_offset(self.raw_src, end);
let row = self.raw_src[start..end].trim();
write!(self.out, "{row}")?;
return Ok(());
}
// If the expr is multiline, we want to collect all of its lines together and write them out properly
// This involves unshifting the first line if it's aligned
let first_line = &self.src[start.line - 1];
write!(self.out, "{}", &first_line[start.column..].trim_start())?;
let prev_block_indent_level = self.out.indent.count_indents(first_line);
for (id, line) in self.src[start.line..end.line].iter().enumerate() {
writeln!(self.out)?;
// check if this is the last line
let line = {
if id == (end.line - start.line) - 1 {
&line[..end.column]
} else {
line
}
};
// trim the leading whitespace
let previous_indent = self.out.indent.count_indents(line);
let offset = previous_indent.saturating_sub(prev_block_indent_level);
let required_indent = self.out.indent_level + offset;
self.out.write_tabs(required_indent)?;
let line = line.trim_start();
write!(self.out, "{line}")?;
}
Ok(())
} }
pub fn write_attr_comments(&mut self, brace: &Brace, attr_span: Span) -> Result { pub fn write_attr_comments(&mut self, brace: &Brace, attr_span: Span) -> Result {
@ -123,28 +234,13 @@ impl<'a> Writer<'a> {
} }
pub fn write_body_no_indent(&mut self, children: &[BodyNode]) -> Result { pub fn write_body_no_indent(&mut self, children: &[BodyNode]) -> Result {
let last_child = children.len(); for child in children {
let iter = children.iter().peekable().enumerate();
for (idx, child) in iter {
if self.current_span_is_primary(child.span()) { if self.current_span_is_primary(child.span()) {
self.write_comments(child.span())?; self.write_comments(child.span())?;
} };
match child { self.out.tabbed_line()?;
// check if the expr is a short self.write_ident(child)?;
BodyNode::RawExpr { .. } => {
self.out.tabbed_line()?;
self.write_ident(child)?;
if idx != last_child - 1 {
write!(self.out, ",")?;
}
}
_ => {
self.out.tabbed_line()?;
self.write_ident(child)?;
}
}
} }
Ok(()) Ok(())
@ -155,31 +251,35 @@ impl<'a> Writer<'a> {
ElementAttrValue::AttrOptionalExpr { condition, value } => { ElementAttrValue::AttrOptionalExpr { condition, value } => {
let condition_len = self.retrieve_formatted_expr(condition).len(); let condition_len = self.retrieve_formatted_expr(condition).len();
let value_len = self.attr_value_len(value); let value_len = self.attr_value_len(value);
condition_len + value_len + 6 condition_len + value_len + 6
} }
ElementAttrValue::AttrLiteral(lit) => ifmt_to_string(lit).len(), ElementAttrValue::AttrLiteral(lit) => lit.to_string().len(),
ElementAttrValue::Shorthand(expr) => expr.span().line_length(), ElementAttrValue::Shorthand(expr) => expr.span().line_length(),
ElementAttrValue::AttrExpr(expr) => { ElementAttrValue::AttrExpr(expr) => expr
let out = self.retrieve_formatted_expr(expr); .as_expr()
if out.contains('\n') { .map(|expr| self.attr_expr_len(&expr))
100000 .unwrap_or(100000),
} else { ElementAttrValue::EventTokens(closure) => closure
out.len() .as_expr()
} .map(|expr| self.attr_expr_len(&expr))
} .unwrap_or(100000),
ElementAttrValue::EventTokens(tokens) => {
let as_str = self.retrieve_formatted_expr(tokens);
if as_str.contains('\n') {
100000
} else {
as_str.len()
}
}
} }
} }
pub(crate) fn is_short_attrs(&mut self, attributes: &[AttributeType]) -> usize { fn attr_expr_len(&mut self, expr: &Expr) -> usize {
let out = self.retrieve_formatted_expr(expr);
if out.contains('\n') {
100000
} else {
out.len()
}
}
pub(crate) fn is_short_attrs(
&mut self,
attributes: &[AttributeType],
spreads: &[Spread],
) -> usize {
let mut total = 0; let mut total = 0;
// No more than 3 attributes before breaking the line // No more than 3 attributes before breaking the line
@ -188,8 +288,8 @@ impl<'a> Writer<'a> {
} }
for attr in attributes { for attr in attributes {
if self.current_span_is_primary(attr.start()) { if self.current_span_is_primary(attr.span()) {
'line: for line in self.src[..attr.start().start().line - 1].iter().rev() { 'line: for line in self.src[..attr.span().start().line - 1].iter().rev() {
match (line.trim().starts_with("//"), line.is_empty()) { match (line.trim().starts_with("//"), line.is_empty()) {
(true, _) => return 100000, (true, _) => return 100000,
(_, true) => continue 'line, (_, true) => continue 'line,
@ -198,39 +298,37 @@ impl<'a> Writer<'a> {
} }
} }
match attr { let name_len = match &attr.name {
AttributeType::Named(attr) => { AttributeName::BuiltIn(name) => {
let name_len = match &attr.attr.name { let name = name.to_string();
dioxus_rsx::ElementAttrName::BuiltIn(name) => { name.len()
let name = name.to_string();
name.len()
}
dioxus_rsx::ElementAttrName::Custom(name) => name.value().len() + 2,
};
total += name_len;
//
if attr.attr.value.is_shorthand() {
total += 2;
} else {
total += self.attr_value_len(&attr.attr.value);
}
}
AttributeType::Spread(expr) => {
let expr_len = self.retrieve_formatted_expr(expr).len();
total += expr_len + 3;
} }
AttributeName::Custom(name) => name.value().len() + 2,
AttributeName::Spread(_) => unreachable!(),
}; };
total += name_len;
//
if attr.can_be_shorthand() {
total += 2;
} else {
total += self.attr_value_len(&attr.value);
}
total += 6; total += 6;
} }
for spread in spreads {
let expr_len = self.retrieve_formatted_expr(&spread.expr).len();
total += expr_len + 3;
}
total total
} }
#[allow(clippy::map_entry)] #[allow(clippy::map_entry)]
pub fn retrieve_formatted_expr(&mut self, expr: &Expr) -> &str { pub fn retrieve_formatted_expr(&mut self, expr: &Expr) -> &str {
let loc = Location::new(expr.span().start()); let loc = expr.span().start();
if !self.cached_formats.contains_key(&loc) { if !self.cached_formats.contains_key(&loc) {
let formatted = self.unparse_expr(expr); let formatted = self.unparse_expr(expr);
@ -254,7 +352,7 @@ impl<'a> Writer<'a> {
return Ok(()); return Ok(());
} }
self.write_body_indented(&forloop.body)?; self.write_body_indented(&forloop.body.roots)?;
self.out.tabbed_line()?; self.out.tabbed_line()?;
write!(self.out, "}}")?; write!(self.out, "}}")?;
@ -280,7 +378,7 @@ impl<'a> Writer<'a> {
self.write_inline_expr(cond)?; self.write_inline_expr(cond)?;
self.write_body_indented(then_branch)?; self.write_body_indented(&then_branch.roots)?;
if let Some(else_if_branch) = else_if_branch { if let Some(else_if_branch) = else_if_branch {
// write the closing bracket and else // write the closing bracket and else
@ -292,7 +390,7 @@ impl<'a> Writer<'a> {
self.out.tabbed_line()?; self.out.tabbed_line()?;
write!(self.out, "}} else {{")?; write!(self.out, "}} else {{")?;
self.write_body_indented(else_branch)?; self.write_body_indented(&else_branch.roots)?;
branch = None; branch = None;
} else { } else {
branch = None; branch = None;
@ -329,13 +427,6 @@ impl<'a> Writer<'a> {
Ok(()) Ok(())
} }
pub(crate) fn key_len(&self, key: Option<&IfmtInput>) -> usize {
match key {
Some(key) => ifmt_to_string(key).len() + 5,
None => 0,
}
}
} }
pub(crate) trait SpanLength { pub(crate) trait SpanLength {

View file

@ -16,8 +16,8 @@ rsx! {
div { "hi" } div { "hi" }
h2 { class: "asd" } h2 { class: "asd" }
} }
Component::<Generic> {}
Component {} Component {}
Component<Generic> {}
} }
// Long attributes // Long attributes

View file

@ -8,17 +8,17 @@ rsx! {
"hello world" "hello world"
// Comments // Comments
{expr1}, {expr1}
// Comments // Comments
{expr2}, {expr2}
// Comments // Comments
// Comments // Comments
// Comments // Comments
// Comments // Comments
// Comments // Comments
{expr3}, {expr3}
div { div {
// todo some work in here // todo some work in here

View file

@ -12,7 +12,11 @@ pub(crate) fn Nav() -> Element {
let mut sidebar = SHOW_SIDEBAR.write(); let mut sidebar = SHOW_SIDEBAR.write();
*sidebar = !*sidebar; *sidebar = !*sidebar;
}, },
MaterialIcon { name: "menu", size: 24, color: MaterialIconColor::Dark } MaterialIcon {
name: "menu",
size: 24,
color: MaterialIconColor::Dark
}
} }
div { class: "flex z-50 md:flex-1 px-2", LinkList {} } div { class: "flex z-50 md:flex-1 px-2", LinkList {} }

View file

@ -34,7 +34,7 @@ pub fn Explainer<'a>(
rsx! { rsx! {
div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light", div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light",
{left}, {left}
{right} {right}
} }
} }

View file

@ -12,7 +12,11 @@ pub(crate) fn Nav() -> Element {
let mut sidebar = SHOW_SIDEBAR.write(); let mut sidebar = SHOW_SIDEBAR.write();
*sidebar = !*sidebar; *sidebar = !*sidebar;
}, },
MaterialIcon { name: "menu", size: 24, color: MaterialIconColor::Dark } MaterialIcon {
name: "menu",
size: 24,
color: MaterialIconColor::Dark
}
} }
div { class: "flex z-50 md:flex-1 px-2", LinkList {} } div { class: "flex z-50 md:flex-1 px-2", LinkList {} }
} }

View file

@ -1,7 +1,7 @@
fn ItWroks() { fn ItWroks() {
rsx! { rsx! {
div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light", div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light",
{left}, {left}
{right} {right}
} }
} }

View file

@ -1,7 +1,7 @@
fn ItWroks() { fn ItWroks() {
rsx! { rsx! {
div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light", div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light",
{left}, {left}
{right} {right}
} }
} }

View file

@ -13,20 +13,21 @@ clap = { version = "4.2", features = ["derive"], optional = true }
serde = { version = "1.0.136", features = ["derive"] } serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0.79" serde_json = "1.0.79"
toml = { workspace = true, optional = true } toml = { workspace = true, optional = true }
cargo_toml = { version = "0.18.0", optional = true } cargo_toml = { workspace = true, optional = true }
once_cell = "1.18.0" once_cell = "1.18.0"
tracing = { workspace = true } tracing = { workspace = true }
# bundling # bundling
tauri-bundler = { version = "=1.4.0", features = ["native-tls-vendored"], optional = true } tauri-bundler = { workspace = true, optional = true }
tauri-utils = { version = "=1.5.*", optional = true } tauri-utils = { workspace = true, optional = true }
dirs = { workspace = true, optional = true } dirs = { workspace = true, optional = true }
[features] [features]
default = ["read-config"] default = ["read-config"]
cli = ["dep:tauri-bundler", "dep:tauri-utils", "dep:clap", "dep:toml", "dep:cargo_toml", "dep:dirs"] cli = ["dep:tauri-bundler", "dep:tauri-utils", "read-from-args", "dep:toml", "dep:cargo_toml", "dep:dirs"]
read-config = [] read-config = []
read-from-args = ["dep:clap"]
[package.metadata.docs.rs] [package.metadata.docs.rs]
cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"] cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]

View file

@ -1,115 +0,0 @@
//! Utilities for working with cargo and rust files
use std::error::Error;
use std::{
env,
fmt::{Display, Formatter},
fs,
path::{Path, PathBuf},
process::Command,
str,
};
#[derive(Debug, Clone)]
pub struct CargoError {
msg: String,
}
impl CargoError {
pub fn new(msg: String) -> Self {
Self { msg }
}
}
impl Display for CargoError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "CargoError: {}", self.msg)
}
}
impl Error for CargoError {}
/// How many parent folders are searched for a `Cargo.toml`
const MAX_ANCESTORS: u32 = 10;
/// Some fields parsed from `cargo metadata` command
pub struct Metadata {
pub workspace_root: PathBuf,
pub target_directory: PathBuf,
}
/// Returns the root of the crate that the command is run from
///
/// If the command is run from the workspace root, this will return the top-level Cargo.toml
pub fn crate_root() -> Result<PathBuf, CargoError> {
// From the current directory we work our way up, looking for `Cargo.toml`
env::current_dir()
.ok()
.and_then(|mut wd| {
for _ in 0..MAX_ANCESTORS {
if contains_manifest(&wd) {
return Some(wd);
}
if !wd.pop() {
break;
}
}
None
})
.ok_or_else(|| {
CargoError::new("Failed to find directory containing Cargo.toml".to_string())
})
}
/// Checks if the directory contains `Cargo.toml`
fn contains_manifest(path: &Path) -> bool {
fs::read_dir(path)
.map(|entries| {
entries
.filter_map(Result::ok)
.any(|ent| &ent.file_name() == "Cargo.toml")
})
.unwrap_or(false)
}
impl Metadata {
/// Returns the struct filled from `cargo metadata` output
/// TODO @Jon, find a different way that doesn't rely on the cargo metadata command (it's slow)
pub fn get() -> Result<Self, CargoError> {
let output = Command::new("cargo")
.args(["metadata"])
.output()
.map_err(|_| CargoError::new("Manifset".to_string()))?;
if !output.status.success() {
let mut msg = str::from_utf8(&output.stderr).unwrap().trim();
if msg.starts_with("error: ") {
msg = &msg[7..];
}
return Err(CargoError::new(msg.to_string()));
}
let stdout = str::from_utf8(&output.stdout).unwrap();
if let Some(line) = stdout.lines().next() {
let meta: serde_json::Value = serde_json::from_str(line)
.map_err(|_| CargoError::new("InvalidOutput".to_string()))?;
let workspace_root = meta["workspace_root"]
.as_str()
.ok_or_else(|| CargoError::new("InvalidOutput".to_string()))?
.into();
let target_directory = meta["target_directory"]
.as_str()
.ok_or_else(|| CargoError::new("InvalidOutput".to_string()))?
.into();
return Ok(Self {
workspace_root,
target_directory,
});
}
Err(CargoError::new("InvalidOutput".to_string()))
}
}

View file

@ -1,16 +1,19 @@
use crate::BundleConfig; use crate::BundleConfig;
use crate::CargoError;
use core::fmt::{Display, Formatter};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Debug)] #[derive(
Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Debug, Default,
)]
#[cfg_attr(feature = "cli", derive(clap::ValueEnum))] #[cfg_attr(feature = "cli", derive(clap::ValueEnum))]
#[non_exhaustive] #[non_exhaustive]
pub enum Platform { pub enum Platform {
/// Targeting the web platform using WASM /// Targeting the web platform using WASM
#[cfg_attr(feature = "cli", clap(name = "web"))] #[cfg_attr(feature = "cli", clap(name = "web"))]
#[serde(rename = "web")] #[serde(rename = "web")]
#[default]
Web, Web,
/// Targeting the desktop platform using Tao/Wry-based webview /// Targeting the desktop platform using Tao/Wry-based webview
@ -29,6 +32,56 @@ pub enum Platform {
StaticGeneration, StaticGeneration,
} }
/// An error that occurs when a platform is not recognized
pub struct UnknownPlatformError;
impl std::fmt::Display for UnknownPlatformError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Unknown platform")
}
}
impl FromStr for Platform {
type Err = UnknownPlatformError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"web" => Ok(Self::Web),
"desktop" => Ok(Self::Desktop),
"fullstack" => Ok(Self::Fullstack),
"static-generation" => Ok(Self::StaticGeneration),
_ => Err(UnknownPlatformError),
}
}
}
impl Display for Platform {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let feature = self.feature_name();
f.write_str(feature)
}
}
impl Platform {
/// All platforms the dioxus CLI supports
pub const ALL: &'static [Self] = &[
Platform::Web,
Platform::Desktop,
Platform::Fullstack,
Platform::StaticGeneration,
];
/// Get the feature name for the platform in the dioxus crate
pub fn feature_name(&self) -> &str {
match self {
Platform::Web => "web",
Platform::Desktop => "desktop",
Platform::Fullstack => "fullstack",
Platform::StaticGeneration => "static-generation",
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DioxusConfig { pub struct DioxusConfig {
pub application: ApplicationConfig, pub application: ApplicationConfig,
@ -37,165 +90,6 @@ pub struct DioxusConfig {
#[serde(default)] #[serde(default)]
pub bundle: BundleConfig, pub bundle: BundleConfig,
#[cfg(feature = "cli")]
#[serde(default = "default_plugin")]
pub plugin: toml::Value,
#[cfg(feature = "cli")]
pub cli_settings: Option<crate::CliSettings>,
}
#[cfg(feature = "cli")]
fn default_plugin() -> toml::Value {
toml::Value::Boolean(true)
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LoadDioxusConfigError {
location: String,
error: String,
}
impl std::fmt::Display for LoadDioxusConfigError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {}", self.location, self.error)
}
}
impl std::error::Error for LoadDioxusConfigError {}
#[derive(Debug)]
#[non_exhaustive]
pub enum CrateConfigError {
Cargo(CargoError),
Io(std::io::Error),
#[cfg(feature = "cli")]
Toml(toml::de::Error),
LoadDioxusConfig(LoadDioxusConfigError),
}
impl From<CargoError> for CrateConfigError {
fn from(err: CargoError) -> Self {
Self::Cargo(err)
}
}
impl From<std::io::Error> for CrateConfigError {
fn from(err: std::io::Error) -> Self {
Self::Io(err)
}
}
#[cfg(feature = "cli")]
impl From<toml::de::Error> for CrateConfigError {
fn from(err: toml::de::Error) -> Self {
Self::Toml(err)
}
}
impl From<LoadDioxusConfigError> for CrateConfigError {
fn from(err: LoadDioxusConfigError) -> Self {
Self::LoadDioxusConfig(err)
}
}
impl Display for CrateConfigError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Cargo(err) => write!(f, "{}", err),
Self::Io(err) => write!(f, "{}", err),
#[cfg(feature = "cli")]
Self::Toml(err) => write!(f, "{}", err),
Self::LoadDioxusConfig(err) => write!(f, "{}", err),
}
}
}
impl std::error::Error for CrateConfigError {}
impl DioxusConfig {
#[cfg(feature = "cli")]
/// Load the dioxus config from a path
#[tracing::instrument]
pub fn load(bin: Option<PathBuf>) -> Result<Option<DioxusConfig>, CrateConfigError> {
use crate::CliSettings;
let crate_dir = crate::cargo::crate_root();
let crate_dir = match crate_dir {
Ok(dir) => {
if let Some(bin) = bin {
dir.join(bin)
} else {
dir
}
}
Err(_) => return Ok(None),
};
let crate_dir = crate_dir.as_path();
let Some(dioxus_conf_file) = acquire_dioxus_toml(crate_dir) else {
tracing::warn!(?crate_dir, "no dioxus config found for");
return Ok(None);
};
let dioxus_conf_file = dioxus_conf_file.as_path();
let cfg = toml::from_str::<DioxusConfig>(&std::fs::read_to_string(dioxus_conf_file)?)
.map_err(|err| {
let error_location = dioxus_conf_file
.strip_prefix(crate_dir)
.unwrap_or(dioxus_conf_file)
.display();
CrateConfigError::LoadDioxusConfig(LoadDioxusConfigError {
location: error_location.to_string(),
error: err.to_string(),
})
})
.map(Some);
match cfg {
Ok(Some(mut cfg)) => {
let name = cfg.application.name.clone();
if cfg.bundle.identifier.is_none() {
cfg.bundle.identifier = Some(format!("io.github.{name}"));
}
if cfg.bundle.publisher.is_none() {
cfg.bundle.publisher = Some(name);
}
// Handle Cli Settings
if cfg.cli_settings.is_none() {
cfg.cli_settings = Some(CliSettings::default());
}
let cli_settings = cfg.cli_settings.as_mut().unwrap();
// If the project-level settings doesn't exist, let's grab it from global.
if let Some(global_cli_settings) = crate::CliSettings::from_global() {
if cli_settings.always_hot_reload.is_none() {
cli_settings.always_hot_reload = global_cli_settings.always_hot_reload;
}
if cli_settings.always_open_browser.is_none() {
cli_settings.always_open_browser = global_cli_settings.always_open_browser;
}
}
Ok(Some(cfg))
}
cfg => cfg,
}
}
}
#[cfg(feature = "cli")]
#[tracing::instrument]
fn acquire_dioxus_toml(dir: &std::path::Path) -> Option<PathBuf> {
use tracing::trace;
["Dioxus.toml", "dioxus.toml"]
.into_iter()
.map(|file| dir.join(file))
.inspect(|path| trace!("checking [{path:?}]"))
.find(|path| path.is_file())
} }
impl Default for DioxusConfig { impl Default for DioxusConfig {
@ -208,9 +102,6 @@ impl Default for DioxusConfig {
out_dir: out_dir_default(), out_dir: out_dir_default(),
asset_dir: asset_dir_default(), asset_dir: asset_dir_default(),
#[cfg(feature = "cli")]
tools: Default::default(),
sub_package: None, sub_package: None,
}, },
web: WebConfig { web: WebConfig {
@ -242,11 +133,6 @@ impl Default for DioxusConfig {
publisher: Some(name), publisher: Some(name),
..Default::default() ..Default::default()
}, },
#[cfg(feature = "cli")]
plugin: toml::Value::Table(toml::map::Map::new()),
#[cfg(feature = "cli")]
cli_settings: Some(crate::CliSettings::default()),
} }
} }
} }
@ -265,10 +151,6 @@ pub struct ApplicationConfig {
#[serde(default = "asset_dir_default")] #[serde(default = "asset_dir_default")]
pub asset_dir: PathBuf, pub asset_dir: PathBuf,
#[cfg(feature = "cli")]
#[serde(default)]
pub tools: std::collections::HashMap<String, toml::Value>,
#[serde(default)] #[serde(default)]
pub sub_package: Option<String>, pub sub_package: Option<String>,
} }
@ -362,6 +244,16 @@ pub struct WebAppConfig {
pub base_path: Option<String>, pub base_path: Option<String>,
} }
impl WebAppConfig {
/// Get the normalized base path for the application with `/` trimmed from both ends. If the base path is not set, this will return `.`.
pub fn base_path(&self) -> &str {
match &self.base_path {
Some(path) => path.trim_matches('/'),
None => ".",
}
}
}
impl Default for WebAppConfig { impl Default for WebAppConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
@ -429,228 +321,6 @@ pub struct WebHttpsConfig {
pub cert_path: Option<String>, pub cert_path: Option<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CrateConfig {
pub crate_dir: PathBuf,
pub workspace_dir: PathBuf,
pub target_dir: PathBuf,
#[cfg(feature = "cli")]
pub manifest: cargo_toml::Manifest<cargo_toml::Value>,
pub executable: ExecutableType,
pub dioxus_config: DioxusConfig,
pub release: bool,
pub hot_reload: bool,
pub cross_origin_policy: bool,
pub verbose: bool,
pub custom_profile: Option<String>,
pub features: Option<Vec<String>>,
pub target: Option<String>,
pub cargo_args: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ExecutableType {
Binary(String),
Lib(String),
Example(String),
}
impl ExecutableType {
/// Get the name of the executable if it is a binary or an example.
pub fn executable(&self) -> Option<&str> {
match self {
Self::Binary(bin) | Self::Example(bin) => Some(bin),
_ => None,
}
}
}
impl CrateConfig {
#[cfg(feature = "cli")]
pub fn new(bin: Option<PathBuf>) -> Result<Self, CrateConfigError> {
let dioxus_config = DioxusConfig::load(bin.clone())?.unwrap_or_default();
let crate_root = crate::crate_root()?;
let crate_dir = if let Some(package) = &dioxus_config.application.sub_package {
crate_root.join(package)
} else if let Some(bin) = bin {
crate_root.join(bin)
} else {
crate_root
};
let meta = crate::Metadata::get()?;
let workspace_dir = meta.workspace_root;
let target_dir = meta.target_directory;
let cargo_def = &crate_dir.join("Cargo.toml");
let manifest = cargo_toml::Manifest::from_path(cargo_def).unwrap();
let mut output_filename = String::from("dioxus_app");
if let Some(package) = &manifest.package.as_ref() {
output_filename = match &package.default_run {
Some(default_run_target) => default_run_target.to_owned(),
None => manifest
.bin
.iter()
.find(|b| {
#[allow(clippy::useless_asref)]
let matching_bin =
b.name == manifest.package.as_ref().map(|pkg| pkg.name.clone());
matching_bin
})
.or(manifest
.bin
.iter()
.find(|b| b.path == Some("src/main.rs".to_owned())))
.or(manifest.bin.first())
.or(manifest.lib.as_ref())
.and_then(|prod| prod.name.clone())
.unwrap_or(String::from("dioxus_app")),
};
}
let executable = ExecutableType::Binary(output_filename);
let release = false;
let hot_reload = false;
let cross_origin_policy = false;
let verbose = false;
let custom_profile = None;
let features = None;
let target = None;
let cargo_args = vec![];
Ok(Self {
crate_dir,
workspace_dir,
target_dir,
#[cfg(feature = "cli")]
manifest,
executable,
dioxus_config,
release,
hot_reload,
cross_origin_policy,
verbose,
custom_profile,
features,
target,
cargo_args,
})
}
/// Compose an asset directory. Represents the typical "public" directory
/// with publicly available resources (configurable in the `Dioxus.toml`).
pub fn asset_dir(&self) -> PathBuf {
self.crate_dir
.join(&self.dioxus_config.application.asset_dir)
}
/// Compose an out directory. Represents the typical "dist" directory that
/// is "distributed" after building an application (configurable in the
/// `Dioxus.toml`).
pub fn out_dir(&self) -> PathBuf {
self.crate_dir.join(&self.dioxus_config.application.out_dir)
}
/// Compose an out directory for the fullstack platform. See `out_dir()`
/// method.
pub fn fullstack_out_dir(&self) -> PathBuf {
self.crate_dir.join(".dioxus")
}
/// Compose a target directory for the server (fullstack-only?).
pub fn server_target_dir(&self) -> PathBuf {
self.fullstack_out_dir().join("ssr")
}
/// Compose a target directory for the client (fullstack-only?).
pub fn client_target_dir(&self) -> PathBuf {
self.fullstack_out_dir().join("web")
}
pub fn as_example(&mut self, example_name: String) -> &mut Self {
self.executable = ExecutableType::Example(example_name);
self
}
pub fn with_release(&mut self, release: bool) -> &mut Self {
self.release = release;
self
}
pub fn with_hot_reload(&mut self, hot_reload: bool) -> &mut Self {
self.hot_reload = hot_reload;
self
}
pub fn with_cross_origin_policy(&mut self, cross_origin_policy: bool) -> &mut Self {
self.cross_origin_policy = cross_origin_policy;
self
}
pub fn with_verbose(&mut self, verbose: bool) -> &mut Self {
self.verbose = verbose;
self
}
pub fn set_profile(&mut self, profile: String) -> &mut Self {
self.custom_profile = Some(profile);
self
}
pub fn set_features(&mut self, features: Vec<String>) -> &mut Self {
self.features = Some(features);
self
}
pub fn set_target(&mut self, target: String) -> &mut Self {
self.target = Some(target);
self
}
pub fn set_cargo_args(&mut self, cargo_args: Vec<String>) -> &mut Self {
self.cargo_args = cargo_args;
self
}
pub fn add_features(&mut self, feature: Vec<String>) -> &mut Self {
if let Some(features) = &mut self.features {
features.extend(feature);
} else {
self.features = Some(feature);
}
self
}
#[cfg(feature = "cli")]
pub fn extend_with_platform(&mut self, platform: Platform) -> &mut Self {
let manifest = &self.manifest;
let features = match platform {
Platform::Web if manifest.features.contains_key("web") => {
vec!["web".to_string()]
}
Platform::Desktop if manifest.features.contains_key("desktop") => {
vec!["desktop".to_string()]
}
_ => {
// fullstack has its own feature insertion - we use a different featureset for the client and server
vec![]
}
};
self.add_features(features);
self
}
/// Check if assets should be pre_compressed. This will only be true in release mode if the user has enabled pre_compress in the web config.
pub fn should_pre_compress_web_assets(&self) -> bool {
self.dioxus_config.web.pre_compress && self.release
}
}
fn true_bool() -> bool { fn true_bool() -> bool {
true true
} }

View file

@ -8,37 +8,21 @@ pub use config::*;
mod bundle; mod bundle;
pub use bundle::*; pub use bundle::*;
mod cargo;
pub use cargo::*;
#[cfg(feature = "cli")]
mod serve; mod serve;
#[cfg(feature = "cli")]
pub use serve::*; pub use serve::*;
#[cfg(feature = "cli")]
mod settings;
#[cfg(feature = "cli")]
pub use settings::*;
#[doc(hidden)] #[doc(hidden)]
pub mod __private { pub mod __private {
use crate::CrateConfig; use crate::DioxusConfig;
pub(crate) const CONFIG_ENV: &str = "DIOXUS_CONFIG"; pub(crate) const CONFIG_ENV: &str = "DIOXUS_CONFIG";
pub(crate) const CONFIG_BASE_PATH_ENV: &str = "DIOXUS_CONFIG_BASE_PATH"; pub(crate) const CONFIG_BASE_PATH_ENV: &str = "DIOXUS_CONFIG_BASE_PATH";
pub fn save_config(config: &CrateConfig) -> CrateConfigDropGuard { pub fn save_config(config: &DioxusConfig) -> CrateConfigDropGuard {
std::env::set_var(CONFIG_ENV, serde_json::to_string(config).unwrap()); std::env::set_var(CONFIG_ENV, serde_json::to_string(config).unwrap());
std::env::set_var( std::env::set_var(
CONFIG_BASE_PATH_ENV, CONFIG_BASE_PATH_ENV,
config config.web.app.base_path.clone().unwrap_or_default(),
.dioxus_config
.web
.app
.base_path
.clone()
.unwrap_or_default(),
); );
CrateConfigDropGuard CrateConfigDropGuard
} }
@ -53,7 +37,7 @@ pub mod __private {
} }
} }
#[cfg(feature = "cli")] #[cfg(feature = "read-config")]
/// The environment variable that stores the CLIs serve configuration. /// The environment variable that stores the CLIs serve configuration.
/// We use this to communicate between the CLI and the server for fullstack applications. /// We use this to communicate between the CLI and the server for fullstack applications.
pub const SERVE_ENV: &str = "DIOXUS_SERVE_CONFIG"; pub const SERVE_ENV: &str = "DIOXUS_SERVE_CONFIG";
@ -74,7 +58,7 @@ impl std::error::Error for DioxusCLINotUsed {}
#[cfg(feature = "read-config")] #[cfg(feature = "read-config")]
/// The current crate's configuration. /// The current crate's configuration.
pub static CURRENT_CONFIG: once_cell::sync::Lazy< pub static CURRENT_CONFIG: once_cell::sync::Lazy<
Result<crate::config::CrateConfig, DioxusCLINotUsed>, Result<crate::config::DioxusConfig, DioxusCLINotUsed>,
> = once_cell::sync::Lazy::new(|| { > = once_cell::sync::Lazy::new(|| {
CURRENT_CONFIG_JSON CURRENT_CONFIG_JSON
.and_then(|config| serde_json::from_str(config).ok()) .and_then(|config| serde_json::from_str(config).ok())

View file

@ -1,35 +1,98 @@
#![allow(unused)] // lots of configs...
use std::net::{IpAddr, Ipv4Addr, SocketAddr, SocketAddrV4};
#[cfg(feature = "read-from-args")]
use clap::Parser; use clap::Parser;
/// Arguments for the serve command /// The arguments for the address the server will run on
#[derive(Clone, Debug, Parser, serde::Serialize, serde::Deserialize)]
pub struct ServeArguments { #[cfg(feature = "read-from-args")]
#[derive(Clone, Debug, Parser)]
pub struct AddressArguments {
/// The port the server will run on /// The port the server will run on
#[clap(long)] #[clap(long)]
#[clap(default_value_t = default_port())] #[clap(default_value_t = default_port())]
pub port: u16, pub port: u16,
/// The address the server will run on /// The address the server will run on
#[clap(long)] #[clap(long, default_value_t = default_address())]
pub addr: Option<std::net::IpAddr>, pub addr: std::net::IpAddr,
} }
impl Default for ServeArguments { #[cfg(feature = "read-from-args")]
impl Default for AddressArguments {
fn default() -> Self { fn default() -> Self {
Self { Self {
port: default_port(), port: default_port(),
addr: None, addr: default_address(),
} }
} }
} }
impl ServeArguments { #[cfg(feature = "read-from-args")]
impl AddressArguments {
/// Get the address the server should run on
pub fn address(&self) -> SocketAddr {
SocketAddr::new(self.addr, self.port)
}
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct RuntimeCLIArguments {
/// The address hot reloading is running on
cli_address: SocketAddr,
/// The address the server should run on
server_socket: Option<SocketAddr>,
}
impl RuntimeCLIArguments {
/// Create a new RuntimeCLIArguments
pub fn new(cli_address: SocketAddr, server_socket: Option<SocketAddr>) -> Self {
Self {
cli_address,
server_socket,
}
}
/// Attempt to read the current serve settings from the CLI. This will only be set for the fullstack platform on recent versions of the CLI. /// Attempt to read the current serve settings from the CLI. This will only be set for the fullstack platform on recent versions of the CLI.
pub fn from_cli() -> Option<Self> { pub fn from_cli() -> Option<Self> {
std::env::var(crate::__private::SERVE_ENV) std::env::var(crate::__private::SERVE_ENV)
.ok() .ok()
.and_then(|json| serde_json::from_str(&json).ok()) .and_then(|json| serde_json::from_str(&json).ok())
} }
/// Get the address the server should run on
pub fn server_socket(&self) -> Option<SocketAddr> {
self.server_socket
}
/// Get the address the CLI is running on
pub fn cli_address(&self) -> SocketAddr {
self.cli_address
}
/// Get the address the proxied fullstack server should run on
#[cfg(feature = "read-from-args")]
pub fn fullstack_address(&self) -> AddressArguments {
let socket = self.server_socket.unwrap_or_else(|| {
SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::LOCALHOST, default_port()))
});
AddressArguments {
port: socket.port(),
addr: socket.ip(),
}
}
} }
#[cfg(feature = "read-from-args")]
fn default_port() -> u16 { fn default_port() -> u16 {
8080 8080
} }
#[cfg(feature = "read-from-args")]
fn default_address() -> IpAddr {
IpAddr::V4(std::net::Ipv4Addr::new(127, 0, 0, 1))
}

View file

@ -25,26 +25,30 @@ serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0.79" serde_json = "1.0.79"
toml = { workspace = true } toml = { workspace = true }
fs_extra = "1.2.0" fs_extra = "1.2.0"
cargo_toml = "0.18.0" cargo_toml = { workspace = true }
futures-util = { workspace = true, features = ["async-await-macro"] } futures-util = { workspace = true, features = ["async-await-macro"] }
notify = { version = "5.0.0-pre.16", features = ["serde"] } notify = { workspace = true, features = ["serde"] }
html_parser = { workspace = true } html_parser = { workspace = true }
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
tokio = { version = "1.16.1", features = ["fs", "sync", "rt", "macros"] } tokio = { version = "1.16.1", features = ["fs", "sync", "rt", "macros", "process", "rt-multi-thread"] }
tokio-stream = "0.1.15"
atty = "0.2.14" atty = "0.2.14"
chrono = "0.4.19" chrono = "0.4.19"
anyhow = "1" anyhow = "1"
hyper = { workspace = true } hyper = { workspace = true }
hyper-util = "0.1.3" hyper-util = "0.1.3"
hyper-rustls = { workspace = true } hyper-rustls = { workspace = true }
indicatif = "0.17.5" rustls = "0.23.11"
subprocess = "0.2.9" subprocess = "0.2.9"
rayon = "1.8.0" rayon = "1.8.0"
futures-channel = { workspace = true }
krates = { version = "0.17.0" }
axum = { workspace = true, features = ["ws"] } axum = { workspace = true, features = ["ws"] }
axum-server = { workspace = true, features = ["tls-rustls"] } axum-server = { workspace = true, features = ["tls-rustls"] }
axum-extra = { workspace = true, features = ["typed-header"] } axum-extra = { workspace = true, features = ["typed-header"] }
tower-http = { workspace = true, features = ["full"] } tower-http = { workspace = true, features = ["full"] }
proc-macro2 = { workspace = true, features = ["span-locations"] }
syn = { workspace = true, features = ["full", "extra-traits", "visit", "visit-mut"] }
headers = "0.3.7" headers = "0.3.7"
walkdir = "2" walkdir = "2"
@ -61,55 +65,55 @@ flate2 = "1.0.22"
tar = "0.4.38" tar = "0.4.38"
zip = "0.6.2" zip = "0.6.2"
tower = { workspace = true } tower = { workspace = true }
lazy_static = "1.4.0" once_cell = "1.19.0"
# plugin packages # plugin packages
mlua = { version = "0.8.1", features = [
"lua54",
"vendored",
"async",
"send",
"macros",
], optional = true }
ctrlc = "3.2.3" ctrlc = "3.2.3"
open = "5.0.1" open = "5.0.1"
cargo-generate = "0.20.0" cargo-generate = "=0.21.1"
toml_edit = "0.21.0" toml_edit = "0.22.15"
# bundling # bundling
tauri-bundler = { version = "=1.4.*", features = ["native-tls-vendored"] } tauri-bundler = { workspace = true }
# formatting # formatting
syn = { workspace = true } # syn = { workspace = true }
prettyplease = { workspace = true } prettyplease = { workspace = true }
# Assets # Assets
manganis-cli-support = { workspace = true, features = ["html"] } manganis-cli-support = { workspace = true, features = ["html"] }
brotli = "5.0.0" brotli = "6.0.0"
dioxus-autofmt = { workspace = true } dioxus-autofmt = { workspace = true }
dioxus-check = { workspace = true } dioxus-check = { workspace = true }
rsx-rosetta = { workspace = true } rsx-rosetta = { workspace = true }
dioxus-rsx = { workspace = true } dioxus-rsx = { workspace = true, features = ["serde"]}
dioxus-html = { workspace = true, features = ["hot-reload-context"] } dioxus-html = { workspace = true, features = ["hot-reload-context"] }
dioxus-core = { workspace = true, features = ["serialize"] } dioxus-core = { workspace = true, features = ["serialize"] }
dioxus-hot-reload = { workspace = true, features = ["serve"] } dioxus-hot-reload = { workspace = true, features = ["serve"] }
interprocess = { workspace = true }
# interprocess-docfix = { version = "1.2.2" }
ignore = "0.4.22" ignore = "0.4.22"
env_logger = "0.11.3" env_logger = "0.11.3"
tracing-subscriber = { version = "0.3.18", features = ["std", "env-filter"] } tracing-subscriber = { version = "0.3.18", features = ["std", "env-filter"] }
console-subscriber = { version = "0.3.0", optional = true }
tracing = { workspace = true } tracing = { workspace = true }
wasm-opt = "0.116.1" wasm-opt = "0.116.1"
ratatui = { version = "0.27.0", features = ["crossterm", "unstable"] }
crossterm = { version = "0.27.0", features = ["event-stream"] }
ansi-to-tui = "=5.0.0-rc.1"
ansi-to-html = "0.2.1"
# on maco, we need to specify the vendored feature on ssl when cross compiling # on maco, we need to specify the vendored feature on ssl when cross compiling
[target.'cfg(target_os = "macos")'.dependencies] # [target.'cfg(target_os = "macos")'.dependencies]
openssl = { version = "0.10", features = ["vendored"] } # openssl = { version = "0.10", features = ["vendored"] }
[build-dependencies]
built = { version = "=0.7.3", features = ["git2"] }
[features] [features]
default = [] default = []
plugin = ["dep:mlua"] plugin = []
tokio-console = ["dep:console-subscriber"]
[[bin]] [[bin]]
path = "src/main.rs" path = "src/main.rs"

View file

@ -47,12 +47,6 @@ script = []
# serve: [dev-server] only # serve: [dev-server] only
script = [] script = []
[application.plugins]
available = true
required = []
[bundler] [bundler]
# Bundle identifier # Bundle identifier
identifier = "io.github.{{project-name}}" identifier = "io.github.{{project-name}}"

View file

Before

Width:  |  Height:  |  Size: 130 KiB

After

Width:  |  Height:  |  Size: 130 KiB

View file

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html>
<head>
<title>{app_title}</title>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta charset="UTF-8" />
</head>
<body>
<div id="main"></div>
</body>
</html>

View file

@ -0,0 +1,118 @@
<!DOCTYPE html>
<html style="margin: 0; padding: 0; width: 100%; height: 100%">
<head>
<title>Dioxus Build In Progress</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta charset="UTF-8" />
<style>
#main {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100%;
width: 100%;
background: #f6f7f8;
}
#main :nth-child(n) {
animation: fade-in 0.5s;
}
#error-message {
color: rgb(182, 0, 0);
font-size: 0.75em;
white-space: pre-wrap;
}
@media (prefers-color-scheme: dark) {
#main {
background: #000;
color: #f6f7f8;
}
#error-message {
color: rgb(255, 89, 89);
}
}
@keyframes fade-in {
0% {
opacity: 0;
}
100% {
opacity: 1;
}
}
</style>
</head>
<body style="margin: 0; padding: 0; width: 100%; height: 100%">
<div id="main">
<h1 id="loading-indicator">Building project 0%</h1>
</div>
<script>
// Wait for a "Ready" message from the server on the websocket served at /_dioxus/build_status
let protocol;
if (window.location.protocol === "https:") {
protocol = "wss:";
} else {
protocol = "ws:";
}
let url =
protocol + "//" + window.location.host + "/_dioxus/build_status";
let ws = new WebSocket(url);
ws.onmessage = (event) => {
// Parse the message as json
let data = JSON.parse(event.data);
// If the message is "Ready", reload the page
if (data.type === "Ready") {
setTimeout(() => {
// Once we get a "Ready" message, reload the page
window.location.reload();
}, 500);
} else if (data.type === "BuildError") {
// If the message is "BuildError", display an error message
document.getElementById("main").innerHTML = `<div>
<h1>Failed to build project</h1>
<pre id="error-message">${data.data.error}</pre>
</div>`;
} else if (data.type === "Building") {
// Get the current progress
const progress = data.data.progress;
const roundedProgress = Math.round(progress * 100);
// Update the loading indicator
let loadingIndicator = document.getElementById("loading-indicator");
if (loadingIndicator) {
loadingIndicator.innerHTML = `Building project ${roundedProgress}%`;
} else {
document.getElementById(
"main"
).innerHTML = `<h1 id="loading-indicator">Building project ${progress}%</h1>`;
}
}
};
const POLL_INTERVAL_MIN = 250;
const POLL_INTERVAL_MAX = 4000;
const POLL_INTERVAL_SCALE_FACTOR = 2;
const reload_upon_connect = (event, poll_interval) => {
// Firefox will send a 1001 code when the connection is closed because the page is reloaded
// Only firefox will trigger the onclose event when the page is reloaded manually: https://stackoverflow.com/questions/10965720/should-websocket-onclose-be-triggered-by-user-navigation-or-refresh
// We should not reload the page in this case
if (event.code === 1001) {
return;
}
window.setTimeout(() => {
var ws = new WebSocket(url);
ws.onopen = () => window.location.reload();
ws.onclose = (event) => {
reload_upon_connect(
event,
Math.min(
POLL_INTERVAL_MAX,
poll_interval * POLL_INTERVAL_SCALE_FACTOR
)
);
};
}, poll_interval);
};
ws.onclose = (event) => reload_upon_connect(event, POLL_INTERVAL_MIN);
</script>
</body>
</html>

3
packages/cli/build.rs Normal file
View file

@ -0,0 +1,3 @@
fn main() {
built::write_built_file().expect("Failed to acquire build-time information");
}

View file

@ -1,20 +1,21 @@
use crate::builder::{BuildMessage, MessageType, Stage, UpdateBuildProgress, UpdateStage};
use crate::dioxus_crate::DioxusCrate;
use crate::Result;
use anyhow::Context;
use brotli::enc::BrotliEncoderParams; use brotli::enc::BrotliEncoderParams;
use futures_channel::mpsc::UnboundedSender;
use manganis_cli_support::{process_file, AssetManifest, AssetManifestExt, AssetType};
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::{ffi::OsString, path::PathBuf}; use std::{ffi::OsString, path::PathBuf};
use walkdir::WalkDir;
use std::{fs::File, io::Write}; use std::{fs::File, io::Write};
use tracing::Level;
use crate::Result; use walkdir::WalkDir;
use dioxus_cli_config::CrateConfig;
use dioxus_cli_config::Platform;
use manganis_cli_support::{AssetManifest, AssetManifestExt};
/// The temp file name for passing manganis json from linker to current exec. /// The temp file name for passing manganis json from linker to current exec.
pub const MG_JSON_OUT: &str = "mg-out"; pub const MG_JSON_OUT: &str = "mg-out";
pub fn asset_manifest(config: &CrateConfig) -> AssetManifest { pub fn asset_manifest(config: &DioxusCrate) -> AssetManifest {
let file_path = config.out_dir().join(MG_JSON_OUT); let file_path = config.out_dir().join(MG_JSON_OUT);
let read = fs::read_to_string(&file_path).unwrap(); let read = fs::read_to_string(&file_path).unwrap();
_ = fs::remove_file(file_path); _ = fs::remove_file(file_path);
@ -24,14 +25,18 @@ pub fn asset_manifest(config: &CrateConfig) -> AssetManifest {
} }
/// Create a head file that contains all of the imports for assets that the user project uses /// Create a head file that contains all of the imports for assets that the user project uses
pub fn create_assets_head(config: &CrateConfig, manifest: &AssetManifest) -> Result<()> { pub fn create_assets_head(config: &DioxusCrate, manifest: &AssetManifest) -> Result<()> {
let mut file = File::create(config.out_dir().join("__assets_head.html"))?; let mut file = File::create(config.out_dir().join("__assets_head.html"))?;
file.write_all(manifest.head().as_bytes())?; file.write_all(manifest.head().as_bytes())?;
Ok(()) Ok(())
} }
/// Process any assets collected from the binary /// Process any assets collected from the binary
pub(crate) fn process_assets(config: &CrateConfig, manifest: &AssetManifest) -> anyhow::Result<()> { pub(crate) fn process_assets(
config: &DioxusCrate,
manifest: &AssetManifest,
progress: &mut UnboundedSender<UpdateBuildProgress>,
) -> anyhow::Result<()> {
let static_asset_output_dir = PathBuf::from( let static_asset_output_dir = PathBuf::from(
config config
.dioxus_config .dioxus_config
@ -43,7 +48,44 @@ pub(crate) fn process_assets(config: &CrateConfig, manifest: &AssetManifest) ->
); );
let static_asset_output_dir = config.out_dir().join(static_asset_output_dir); let static_asset_output_dir = config.out_dir().join(static_asset_output_dir);
manifest.copy_static_assets_to(static_asset_output_dir)?; std::fs::create_dir_all(&static_asset_output_dir)
.context("Failed to create static asset output directory")?;
let mut assets_finished: usize = 0;
let assets = manifest.assets();
let asset_count = assets.len();
assets.iter().try_for_each(move |asset| {
if let AssetType::File(file_asset) = asset {
match process_file(file_asset, &static_asset_output_dir) {
Ok(_) => {
// Update the progress
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::OptimizingAssets,
update: UpdateStage::AddMessage(BuildMessage {
level: Level::INFO,
message: MessageType::Text(format!(
"Optimized static asset {}",
file_asset
)),
source: None,
}),
});
assets_finished += 1;
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::OptimizingAssets,
update: UpdateStage::SetProgress(
assets_finished as f64 / asset_count as f64,
),
});
}
Err(err) => {
tracing::error!("Failed to copy static asset: {}", err);
return Err(err);
}
}
}
Ok::<(), anyhow::Error>(())
})?;
Ok(()) Ok(())
} }
@ -68,21 +110,11 @@ impl Drop for AssetConfigDropGuard {
} }
} }
pub fn copy_assets_dir(config: &CrateConfig, platform: Platform) -> anyhow::Result<()> { pub(crate) fn copy_dir_to(
tracing::info!("Copying public assets to the output directory..."); src_dir: PathBuf,
let out_dir = config.out_dir(); dest_dir: PathBuf,
let asset_dir = config.asset_dir(); pre_compress: bool,
) -> std::io::Result<()> {
if asset_dir.is_dir() {
// Only pre-compress the assets from the web build. Desktop assets are not served, so they don't need to be pre_compressed
let pre_compress = platform == Platform::Web && config.should_pre_compress_web_assets();
copy_dir_to(asset_dir, out_dir, pre_compress)?;
}
Ok(())
}
fn copy_dir_to(src_dir: PathBuf, dest_dir: PathBuf, pre_compress: bool) -> std::io::Result<()> {
let entries = std::fs::read_dir(&src_dir)?; let entries = std::fs::read_dir(&src_dir)?;
let mut children: Vec<std::thread::JoinHandle<std::io::Result<()>>> = Vec::new(); let mut children: Vec<std::thread::JoinHandle<std::io::Result<()>>> = Vec::new();

View file

@ -1,38 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>{app_title}</title>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta charset="UTF-8" />
<link
rel="preload"
href="/{base_path}/assets/dioxus/{app_name}.js"
as="style"
/>
<link
rel="preload"
href="/{base_path}/assets/dioxus/{app_name}_bg.wasm"
as="fetch"
type="application/wasm"
crossorigin=""
/>
{style_include}
</head>
<body>
<div id="main"></div>
<script>
// We can't use a module script here because we need to start the script immediately when streaming
import("/{base_path}/assets/dioxus/{app_name}.js").then(
({ default: init }) => {
init("/{base_path}/assets/dioxus/{app_name}_bg.wasm").then((wasm) => {
if (wasm.__wbindgen_start == undefined) {
wasm.main();
}
});
}
);
</script>
{script_include}
</body>
</html>

View file

@ -1,847 +0,0 @@
use crate::{
assets::{
asset_manifest, copy_assets_dir, create_assets_head, pre_compress_folder, process_assets,
AssetConfigDropGuard,
},
error::{Error, Result},
link::LinkCommand,
tools::Tool,
};
use anyhow::Context;
use cargo_metadata::{diagnostic::Diagnostic, Message};
use dioxus_cli_config::{crate_root, CrateConfig, ExecutableType, WasmOptLevel};
use indicatif::{ProgressBar, ProgressStyle};
use lazy_static::lazy_static;
use manganis_cli_support::{AssetManifest, ManganisSupportGuard};
use std::{
env,
fs::{copy, create_dir_all, File},
io::{self, IsTerminal, Read},
panic,
path::PathBuf,
process::Command,
time::Duration,
};
use wasm_bindgen_cli_support::Bindgen;
lazy_static! {
static ref PROGRESS_BARS: indicatif::MultiProgress = indicatif::MultiProgress::new();
}
#[derive(Debug, Clone)]
pub struct BuildResult {
pub warnings: Vec<Diagnostic>,
pub executable: Option<PathBuf>,
pub elapsed_time: u128,
pub assets: Option<AssetManifest>,
}
/// This trait is only created for the convenient and concise way to set
/// `RUSTFLAGS` environment variable for the `subprocess::Exec`.
pub trait ExecWithRustFlagsSetter {
fn set_rust_flags(self, rust_flags: Option<String>) -> Self;
}
impl ExecWithRustFlagsSetter for subprocess::Exec {
/// Sets (appends to, if already set) `RUSTFLAGS` environment variable if
/// `rust_flags` is not `None`.
fn set_rust_flags(self, rust_flags: Option<String>) -> Self {
if let Some(rust_flags) = rust_flags {
// Some `RUSTFLAGS` might be already set in the environment or provided
// by the user. They should take higher priority than the default flags.
// If no default flags are provided, then there is no point in
// redefining the environment variable with the same value, if it is
// even set. If no custom flags are set, then there is no point in
// adding the unnecessary whitespace to the command.
self.env(
"RUSTFLAGS",
if let Ok(custom_rust_flags) = env::var("RUSTFLAGS") {
rust_flags + " " + custom_rust_flags.as_str()
} else {
rust_flags
},
)
} else {
self
}
}
}
/// Build client (WASM).
/// Note: `rust_flags` argument is only used for the fullstack platform.
pub fn build_web(
config: &CrateConfig,
skip_assets: bool,
rust_flags: Option<String>,
) -> Result<BuildResult> {
// [1] Build the project with cargo, generating a wasm32-unknown-unknown target (is there a more specific, better target to leverage?)
// [2] Generate the appropriate build folders
// [3] Wasm-bindgen the .wasm file, and move it into the {builddir}/modules/xxxx/xxxx_bg.wasm
// [4] Wasm-opt the .wasm file with whatever optimizations need to be done
// [5][OPTIONAL] Builds the Tailwind CSS file using the Tailwind standalone binary
// [6] Link up the html page to the wasm module
let CrateConfig {
crate_dir,
target_dir,
dioxus_config,
..
} = config;
let out_dir = config.out_dir();
let _asset_guard = AssetConfigDropGuard::new();
let _manganis_support = ManganisSupportGuard::default();
// start to build the assets
build_assets(config)?;
let t_start = std::time::Instant::now();
let _guard = dioxus_cli_config::__private::save_config(config);
// [1] Build the .wasm module
tracing::info!("🚅 Running build command...");
// If the user has rustup, we can check if the wasm32-unknown-unknown target is installed
// Otherwise we can just assume it is installed - which i snot great...
// Eventually we can poke at the errors and let the user know they need to install the target
if let Ok(wasm_check_command) = Command::new("rustup").args(["show"]).output() {
let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
if !wasm_check_output.contains("wasm32-unknown-unknown") {
tracing::info!("wasm32-unknown-unknown target not detected, installing..");
let _ = Command::new("rustup")
.args(["target", "add", "wasm32-unknown-unknown"])
.output()?;
}
}
let mut cargo_args = vec!["--target".to_string(), "wasm32-unknown-unknown".to_string()];
let mut cmd = subprocess::Exec::cmd("cargo")
.set_rust_flags(rust_flags)
.env("CARGO_TARGET_DIR", target_dir)
.cwd(crate_dir)
.arg("build")
.arg("--message-format=json-render-diagnostics");
// TODO: make the initial variable mutable to simplify all the expressions
// below. Look inside the `build_desktop()` as an example.
if config.release {
cargo_args.push("--release".to_string());
}
if config.verbose {
cargo_args.push("--verbose".to_string());
} else {
cargo_args.push("--quiet".to_string());
}
if config.custom_profile.is_some() {
let custom_profile = config.custom_profile.as_ref().unwrap();
cargo_args.push("--profile".to_string());
cargo_args.push(custom_profile.to_string());
}
if config.features.is_some() {
let features_str = config.features.as_ref().unwrap().join(" ");
cargo_args.push("--features".to_string());
cargo_args.push(features_str);
}
if let Some(target) = &config.target {
cargo_args.push("--target".to_string());
cargo_args.push(target.clone());
}
cargo_args.append(&mut config.cargo_args.clone());
match &config.executable {
ExecutableType::Binary(name) => {
cargo_args.push("--bin".to_string());
cargo_args.push(name.to_string());
}
ExecutableType::Lib(name) => {
cargo_args.push("--lib".to_string());
cargo_args.push(name.to_string());
}
ExecutableType::Example(name) => {
cargo_args.push("--example".to_string());
cargo_args.push(name.to_string());
}
};
cmd = cmd.args(&cargo_args);
let CargoBuildResult {
warnings,
output_location,
} = prettier_build(cmd)?;
// Start Manganis linker intercept.
let linker_args = vec![format!("{}", config.out_dir().display())];
manganis_cli_support::start_linker_intercept(
&LinkCommand::command_name(),
cargo_args,
Some(linker_args),
)
.unwrap();
let output_location = output_location.context("No output location found")?;
// [2] Establish the output directory structure
let bindgen_outdir = out_dir.join("assets").join("dioxus");
let input_path = output_location.with_extension("wasm");
tracing::info!("Running wasm-bindgen");
let run_wasm_bindgen = || {
// [3] Bindgen the final binary for use easy linking
let mut bindgen_builder = Bindgen::new();
let keep_debug = dioxus_config.web.wasm_opt.debug || (!config.release);
bindgen_builder
.input_path(&input_path)
.web(true)
.unwrap()
.debug(keep_debug)
.demangle(keep_debug)
.keep_debug(keep_debug)
.reference_types(true)
.remove_name_section(!keep_debug)
.remove_producers_section(!keep_debug)
.out_name(&dioxus_config.application.name)
.generate(&bindgen_outdir)
.unwrap();
};
let bindgen_result = panic::catch_unwind(run_wasm_bindgen);
// WASM bindgen requires the exact version of the bindgen schema to match the version the CLI was built with
// If we get an error, we can try to recover by pinning the user's wasm-bindgen version to the version we used
if let Err(err) = bindgen_result {
tracing::error!("Bindgen build failed: {:?}", err);
update_wasm_bindgen_version()?;
run_wasm_bindgen();
}
// Run wasm-opt if this is a release build
if config.release {
tracing::info!("Running optimization with wasm-opt...");
let mut options = match dioxus_config.web.wasm_opt.level {
WasmOptLevel::Z => wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively(),
WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
};
let wasm_file = bindgen_outdir.join(format!("{}_bg.wasm", dioxus_config.application.name));
let old_size = wasm_file.metadata()?.len();
options
// WASM bindgen relies on reference types
.enable_feature(wasm_opt::Feature::ReferenceTypes)
.debug_info(dioxus_config.web.wasm_opt.debug)
.run(&wasm_file, &wasm_file)
.map_err(|err| Error::Other(anyhow::anyhow!(err)))?;
let new_size = wasm_file.metadata()?.len();
tracing::info!(
"wasm-opt reduced WASM size from {} to {} ({:2}%)",
old_size,
new_size,
(new_size as f64 - old_size as f64) / old_size as f64 * 100.0
);
}
// If pre-compressing is enabled, we can pre_compress the wasm-bindgen output
pre_compress_folder(&bindgen_outdir, config.should_pre_compress_web_assets())?;
// [5][OPTIONAL] If tailwind is enabled and installed we run it to generate the CSS
let dioxus_tools = dioxus_config.application.tools.clone();
if dioxus_tools.contains_key("tailwindcss") {
let info = dioxus_tools.get("tailwindcss").unwrap();
let tailwind = crate::tools::Tool::Tailwind;
if tailwind.is_installed() {
if let Some(sub) = info.as_table() {
tracing::info!("Building Tailwind bundle CSS file...");
let input_path = match sub.get("input") {
Some(val) => val.as_str().unwrap(),
None => "./public",
};
let config_path = match sub.get("config") {
Some(val) => val.as_str().unwrap(),
None => "./src/tailwind.config.js",
};
let mut args = vec![
"-i",
input_path,
"-o",
"dist/tailwind.css",
"-c",
config_path,
];
if config.release {
args.push("--minify");
}
tailwind.call("tailwindcss", args)?;
}
} else {
tracing::warn!(
"Tailwind tool not found, you can use `dx tool add tailwindcss` to install it."
);
}
}
// this code will copy all public file to the output dir
copy_assets_dir(config, dioxus_cli_config::Platform::Web)?;
let assets = if !skip_assets {
tracing::info!("Processing assets");
let assets = asset_manifest(config);
process_assets(config, &assets)?;
Some(assets)
} else {
None
};
Ok(BuildResult {
warnings,
executable: Some(output_location),
elapsed_time: t_start.elapsed().as_millis(),
assets,
})
}
// Attempt to automatically recover from a bindgen failure by updating the wasm-bindgen version
fn update_wasm_bindgen_version() -> Result<()> {
let cli_bindgen_version = wasm_bindgen_shared::version();
tracing::info!("Attempting to recover from bindgen failure by setting the wasm-bindgen version to {cli_bindgen_version}...");
let output = Command::new("cargo")
.args([
"update",
"-p",
"wasm-bindgen",
"--precise",
&cli_bindgen_version,
])
.output();
let mut error_message = None;
if let Ok(output) = output {
if output.status.success() {
tracing::info!("Successfully updated wasm-bindgen to {cli_bindgen_version}");
return Ok(());
} else {
error_message = Some(output);
}
}
if let Some(output) = error_message {
tracing::error!("Failed to update wasm-bindgen: {:#?}", output);
}
Err(Error::BuildFailed(format!("WASM bindgen build failed!\nThis is probably due to the Bindgen version, dioxus-cli is using `{cli_bindgen_version}` which is not compatible with your crate.\nPlease reinstall the dioxus cli to fix this issue.\nYou can reinstall the dioxus cli by running `cargo install dioxus-cli --force` and then rebuild your project")))
}
/// Note: `rust_flags` argument is only used for the fullstack platform
/// (server).
pub fn build_desktop(
config: &CrateConfig,
_is_serve: bool,
skip_assets: bool,
rust_flags: Option<String>,
) -> Result<BuildResult> {
tracing::info!("🚅 Running build [Desktop] command...");
let t_start = std::time::Instant::now();
build_assets(config)?;
let _guard = dioxus_cli_config::__private::save_config(config);
let _manganis_support = ManganisSupportGuard::default();
let _asset_guard = AssetConfigDropGuard::new();
let mut cargo_args = Vec::new();
let mut cmd = subprocess::Exec::cmd("cargo")
.set_rust_flags(rust_flags)
.env("CARGO_TARGET_DIR", &config.target_dir)
.cwd(&config.crate_dir)
.arg("build")
.arg("--message-format=json-render-diagnostics");
if config.release {
cargo_args.push("--release".to_string());
}
if config.verbose {
cargo_args.push("--verbose".to_string());
} else {
cargo_args.push("--quiet".to_string());
}
if config.custom_profile.is_some() {
let custom_profile = config.custom_profile.as_ref().unwrap();
cargo_args.push("--profile".to_string());
cargo_args.push(custom_profile.to_string());
}
if config.features.is_some() {
let features_str = config.features.as_ref().unwrap().join(" ");
cargo_args.push("--features".to_string());
cargo_args.push(features_str);
}
if let Some(target) = &config.target {
cargo_args.push("--target".to_string());
cargo_args.push(target.clone());
}
cargo_args.append(&mut config.cargo_args.clone());
match &config.executable {
ExecutableType::Binary(name) => {
cargo_args.push("--bin".to_string());
cargo_args.push(name.to_string());
}
ExecutableType::Lib(name) => {
cargo_args.push("--lib".to_string());
cargo_args.push(name.to_string());
}
ExecutableType::Example(name) => {
cargo_args.push("--example".to_string());
cargo_args.push(name.to_string());
}
};
cmd = cmd.args(&cargo_args);
let warning_messages = prettier_build(cmd)?;
// Start Manganis linker intercept.
let linker_args = vec![format!("{}", config.out_dir().display())];
manganis_cli_support::start_linker_intercept(
&LinkCommand::command_name(),
cargo_args,
Some(linker_args),
)?;
let file_name: String = config.executable.executable().unwrap().to_string();
let target_file = if cfg!(windows) {
format!("{}.exe", &file_name)
} else {
file_name
};
if !config.out_dir().is_dir() {
create_dir_all(config.out_dir())?;
}
let output_path = config.out_dir().join(target_file);
if let Some(res_path) = &warning_messages.output_location {
copy(res_path, &output_path)?;
}
copy_assets_dir(config, dioxus_cli_config::Platform::Desktop)?;
let assets = if !skip_assets {
tracing::info!("Processing assets");
let assets = asset_manifest(config);
// Collect assets
process_assets(config, &assets)?;
// Create the __assets_head.html file for bundling
create_assets_head(config, &assets)?;
Some(assets)
} else {
None
};
tracing::info!(
"🚩 Build completed: [./{}]",
config.dioxus_config.application.out_dir.clone().display()
);
println!("build desktop done");
Ok(BuildResult {
warnings: warning_messages.warnings,
executable: Some(output_path),
elapsed_time: t_start.elapsed().as_millis(),
assets,
})
}
struct CargoBuildResult {
warnings: Vec<Diagnostic>,
output_location: Option<PathBuf>,
}
struct Outputter {
progress_bar: Option<ProgressBar>,
}
impl Outputter {
pub fn new() -> Self {
let stdout = io::stdout().lock();
let mut myself = Self { progress_bar: None };
if stdout.is_terminal() {
let mut pb = ProgressBar::new_spinner();
pb.enable_steady_tick(Duration::from_millis(200));
pb = PROGRESS_BARS.add(pb);
pb.set_style(
ProgressStyle::with_template("{spinner:.dim.bold} {wide_msg}")
.unwrap()
.tick_chars("/|\\- "),
);
myself.progress_bar = Some(pb);
}
myself
}
pub fn println(&self, msg: impl ToString) {
let msg = msg.to_string();
if let Some(pb) = &self.progress_bar {
pb.set_message(msg)
} else {
println!("{msg}");
}
}
pub fn finish_with_message(&self, msg: impl ToString) {
let msg = msg.to_string();
if let Some(pb) = &self.progress_bar {
pb.finish_with_message(msg)
} else {
println!("{msg}");
}
}
}
fn prettier_build(cmd: subprocess::Exec) -> anyhow::Result<CargoBuildResult> {
let mut warning_messages: Vec<Diagnostic> = vec![];
let output = Outputter::new();
output.println("💼 Waiting to start building the project...");
let stdout = cmd.detached().stream_stdout()?;
let reader = std::io::BufReader::new(stdout);
let mut output_location = None;
for message in cargo_metadata::Message::parse_stream(reader) {
match message.unwrap() {
Message::CompilerMessage(msg) => {
let message = msg.message;
match message.level {
cargo_metadata::diagnostic::DiagnosticLevel::Error => {
return {
Err(anyhow::anyhow!(message
.rendered
.unwrap_or("Unknown".into())))
};
}
cargo_metadata::diagnostic::DiagnosticLevel::Warning => {
warning_messages.push(message.clone());
}
_ => {}
}
}
Message::CompilerArtifact(artifact) => {
output.println(format!("⚙ Compiling {} ", artifact.package_id));
if let Some(executable) = artifact.executable {
output_location = Some(executable.into());
}
}
Message::BuildScriptExecuted(script) => {
let _package_id = script.package_id.to_string();
}
Message::BuildFinished(finished) => {
if finished.success {
output.finish_with_message("👑 Build done.");
} else {
output.finish_with_message("❌ Build failed.");
return Err(anyhow::anyhow!("Build failed"));
}
}
_ => {
// Unknown message
}
}
}
Ok(CargoBuildResult {
warnings: warning_messages,
output_location,
})
}
pub fn gen_page(config: &CrateConfig, manifest: Option<&AssetManifest>, serve: bool) -> String {
let _guard = AssetConfigDropGuard::new();
let crate_root = crate_root().unwrap();
let custom_html_file = crate_root.join("index.html");
let mut html = if custom_html_file.is_file() {
let mut buf = String::new();
let mut file = File::open(custom_html_file).unwrap();
if file.read_to_string(&mut buf).is_ok() {
buf
} else {
String::from(include_str!("./assets/index.html"))
}
} else {
String::from(include_str!("./assets/index.html"))
};
let resources = config.dioxus_config.web.resource.clone();
let mut style_list = resources.style.unwrap_or_default();
let mut script_list = resources.script.unwrap_or_default();
if serve {
let mut dev_style = resources.dev.style.clone();
let mut dev_script = resources.dev.script.clone();
style_list.append(&mut dev_style);
script_list.append(&mut dev_script);
}
let mut style_str = String::new();
for style in style_list {
style_str.push_str(&format!(
"<link rel=\"stylesheet\" href=\"{}\">\n",
&style.to_str().unwrap(),
))
}
if config
.dioxus_config
.application
.tools
.clone()
.contains_key("tailwindcss")
{
style_str.push_str("<link rel=\"stylesheet\" href=\"/{base_path}/tailwind.css\">\n");
}
if let Some(manifest) = manifest {
style_str.push_str(&manifest.head());
}
replace_or_insert_before("{style_include}", &style_str, "</head", &mut html);
let mut script_str = String::new();
for script in script_list {
script_str.push_str(&format!(
"<script src=\"{}\"></script>\n",
&script.to_str().unwrap(),
))
}
replace_or_insert_before("{script_include}", &script_str, "</body", &mut html);
if serve {
html += &format!("<script>{}</script>", dioxus_hot_reload::RECONNECT_SCRIPT);
}
let base_path = match &config.dioxus_config.web.app.base_path {
Some(path) => path.trim_matches('/'),
None => ".",
};
let app_name = &config.dioxus_config.application.name;
// Check if a script already exists
if html.contains("{app_name}") && html.contains("{base_path}") {
html = html.replace("{app_name}", app_name);
html = html.replace("{base_path}", base_path);
} else {
// If not, insert the script
html = html.replace(
"</body",
&format!(
r#"<script type="module">
import init from "/{base_path}/assets/dioxus/{app_name}.js";
init("/{base_path}/assets/dioxus/{app_name}_bg.wasm").then(wasm => {{
if (wasm.__wbindgen_start == undefined) {{
wasm.main();
}}
}});
</script>
</body"#
),
);
// And try to insert preload links for the wasm and js files
html = html.replace(
"</head",
&format!(
r#"<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}_bg.wasm" as="fetch" type="application/wasm" crossorigin="">
<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}.js" as="script">
</head"#
),
);
}
let title = config.dioxus_config.web.app.title.clone();
replace_or_insert_before("{app_title}", &title, "</title", &mut html);
html
}
fn replace_or_insert_before(
replace: &str,
with: &str,
or_insert_before: &str,
content: &mut String,
) {
if content.contains(replace) {
*content = content.replace(replace, with);
} else {
*content = content.replace(or_insert_before, &format!("{}{}", with, or_insert_before));
}
}
// this function will build some assets file
// like sass tool resources
// this function will return a array which file don't need copy to out_dir.
fn build_assets(config: &CrateConfig) -> Result<Vec<PathBuf>> {
let mut result = vec![];
let dioxus_config = &config.dioxus_config;
let dioxus_tools = dioxus_config.application.tools.clone();
// check sass tool state
let sass = Tool::Sass;
if sass.is_installed() && dioxus_tools.contains_key("sass") {
let sass_conf = dioxus_tools.get("sass").unwrap();
if let Some(tab) = sass_conf.as_table() {
let source_map = tab.contains_key("source_map");
let source_map = if source_map && tab.get("source_map").unwrap().is_bool() {
if tab.get("source_map").unwrap().as_bool().unwrap_or_default() {
"--source-map"
} else {
"--no-source-map"
}
} else {
"--source-map"
};
if tab.contains_key("input") {
if tab.get("input").unwrap().is_str() {
let file = tab.get("input").unwrap().as_str().unwrap().trim();
if file == "*" {
// if the sass open auto, we need auto-check the assets dir.
let asset_dir = config.asset_dir().clone();
if asset_dir.is_dir() {
for entry in walkdir::WalkDir::new(&asset_dir)
.into_iter()
.filter_map(|e| e.ok())
{
let temp = entry.path();
if temp.is_file() {
let suffix = temp.extension();
if suffix.is_none() {
continue;
}
let suffix = suffix.unwrap().to_str().unwrap();
if suffix == "scss" || suffix == "sass" {
// if file suffix is `scss` / `sass` we need transform it.
let out_file = format!(
"{}.css",
temp.file_stem().unwrap().to_str().unwrap()
);
let target_path = config
.out_dir()
.join(
temp.strip_prefix(&asset_dir)
.unwrap()
.parent()
.unwrap(),
)
.join(out_file);
let res = sass.call(
"sass",
vec![
temp.to_str().unwrap(),
target_path.to_str().unwrap(),
source_map,
],
);
if res.is_ok() {
result.push(temp.to_path_buf());
}
}
}
}
}
} else {
// just transform one file.
let relative_path = if &file[0..1] == "/" {
&file[1..file.len()]
} else {
file
};
let path = config.asset_dir().join(relative_path);
let out_file =
format!("{}.css", path.file_stem().unwrap().to_str().unwrap());
let target_path = config
.out_dir()
.join(PathBuf::from(relative_path).parent().unwrap())
.join(out_file);
if path.is_file() {
let res = sass.call(
"sass",
vec![
path.to_str().unwrap(),
target_path.to_str().unwrap(),
source_map,
],
);
if res.is_ok() {
result.push(path);
} else {
tracing::error!("{:?}", res);
}
}
}
} else if tab.get("input").unwrap().is_array() {
// check files list.
let list = tab.get("input").unwrap().as_array().unwrap();
for i in list {
if i.is_str() {
let path = i.as_str().unwrap();
let relative_path = if &path[0..1] == "/" {
&path[1..path.len()]
} else {
path
};
let path = config.asset_dir().join(relative_path);
let out_file =
format!("{}.css", path.file_stem().unwrap().to_str().unwrap());
let target_path = config
.out_dir()
.join(PathBuf::from(relative_path).parent().unwrap())
.join(out_file);
if path.is_file() {
let res = sass.call(
"sass",
vec![
path.to_str().unwrap(),
target_path.to_str().unwrap(),
source_map,
],
);
if res.is_ok() {
result.push(path);
}
}
}
}
}
}
}
}
// SASS END
Ok(result)
}

View file

@ -0,0 +1,224 @@
use super::web::install_web_build_tooling;
use super::BuildRequest;
use super::BuildResult;
use crate::assets::copy_dir_to;
use crate::assets::create_assets_head;
use crate::assets::{asset_manifest, process_assets, AssetConfigDropGuard};
use crate::builder::progress::build_cargo;
use crate::builder::progress::CargoBuildResult;
use crate::builder::progress::Stage;
use crate::builder::progress::UpdateBuildProgress;
use crate::builder::progress::UpdateStage;
use crate::link::LinkCommand;
use crate::Result;
use anyhow::Context;
use futures_channel::mpsc::UnboundedSender;
use manganis_cli_support::ManganisSupportGuard;
use std::fs::create_dir_all;
impl BuildRequest {
/// Create a list of arguments for cargo builds
pub(crate) fn build_arguments(&self) -> Vec<String> {
let mut cargo_args = Vec::new();
if self.build_arguments.release {
cargo_args.push("--release".to_string());
}
if self.build_arguments.verbose {
cargo_args.push("--verbose".to_string());
} else {
cargo_args.push("--quiet".to_string());
}
if let Some(custom_profile) = &self.build_arguments.profile {
cargo_args.push("--profile".to_string());
cargo_args.push(custom_profile.to_string());
}
if !self.build_arguments.target_args.features.is_empty() {
let features_str = self.build_arguments.target_args.features.join(" ");
cargo_args.push("--features".to_string());
cargo_args.push(features_str);
}
if let Some(target) = self.web.then_some("wasm32-unknown-unknown").or(self
.build_arguments
.target_args
.target
.as_deref())
{
cargo_args.push("--target".to_string());
cargo_args.push(target.to_string());
}
cargo_args.append(&mut self.build_arguments.cargo_args.clone());
match self.dioxus_crate.executable_type() {
krates::cm::TargetKind::Bin => {
cargo_args.push("--bin".to_string());
}
krates::cm::TargetKind::Lib => {
cargo_args.push("--lib".to_string());
}
krates::cm::TargetKind::Example => {
cargo_args.push("--example".to_string());
}
_ => {}
};
cargo_args.push(self.dioxus_crate.executable_name().to_string());
cargo_args
}
/// Create a build command for cargo
fn prepare_build_command(&self) -> Result<(tokio::process::Command, Vec<String>)> {
let mut cmd = tokio::process::Command::new("cargo");
cmd.arg("rustc");
if let Some(target_dir) = &self.target_dir {
cmd.env("CARGO_TARGET_DIR", target_dir);
}
cmd.current_dir(self.dioxus_crate.crate_dir())
.arg("--message-format")
.arg("json-diagnostic-rendered-ansi");
let cargo_args = self.build_arguments();
cmd.args(&cargo_args);
cmd.arg("--").args(self.rust_flags.clone());
Ok((cmd, cargo_args))
}
pub async fn build(
&self,
mut progress: UnboundedSender<UpdateBuildProgress>,
) -> Result<BuildResult> {
tracing::info!("🚅 Running build [Desktop] command...");
// Set up runtime guards
let _guard = dioxus_cli_config::__private::save_config(&self.dioxus_crate.dioxus_config);
let _manganis_support = ManganisSupportGuard::default();
let _asset_guard = AssetConfigDropGuard::new();
// If this is a web, build make sure we have the web build tooling set up
if self.web {
install_web_build_tooling(&mut progress).await?;
}
// Create the build command
let (cmd, cargo_args) = self.prepare_build_command()?;
// Run the build command with a pretty loader
let crate_count = self.get_unit_count_estimate().await;
let cargo_result = build_cargo(crate_count, cmd, &mut progress).await?;
// Post process the build result
let build_result = self
.post_process_build(cargo_args, &cargo_result, &mut progress)
.await
.context("Failed to post process build")?;
tracing::info!(
"🚩 Build completed: [./{}]",
self.dioxus_crate
.dioxus_config
.application
.out_dir
.clone()
.display()
);
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::Finished,
update: UpdateStage::Start,
});
Ok(build_result)
}
async fn post_process_build(
&self,
cargo_args: Vec<String>,
cargo_build_result: &CargoBuildResult,
progress: &mut UnboundedSender<UpdateBuildProgress>,
) -> Result<BuildResult> {
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::OptimizingAssets,
update: UpdateStage::Start,
});
// Start Manganis linker intercept.
let linker_args = vec![format!("{}", self.dioxus_crate.out_dir().display())];
manganis_cli_support::start_linker_intercept(
&LinkCommand::command_name(),
cargo_args,
Some(linker_args),
)?;
let file_name = self.dioxus_crate.executable_name();
// Move the final output executable into the dist folder
let out_dir = self.dioxus_crate.out_dir();
if !out_dir.is_dir() {
create_dir_all(&out_dir)?;
}
let mut output_path = out_dir.join(file_name);
if self.web {
output_path.set_extension("wasm");
} else if cfg!(windows) {
output_path.set_extension("exe");
}
if let Some(res_path) = &cargo_build_result.output_location {
std::fs::copy(res_path, &output_path)?;
}
self.copy_assets_dir()?;
let assets = if !self.build_arguments.skip_assets {
let assets = asset_manifest(&self.dioxus_crate);
// Collect assets
process_assets(&self.dioxus_crate, &assets, progress)?;
// Create the __assets_head.html file for bundling
create_assets_head(&self.dioxus_crate, &assets)?;
Some(assets)
} else {
None
};
// Create the build result
let build_result = BuildResult {
executable: output_path,
web: self.web,
platform: self
.build_arguments
.platform
.expect("To be resolved by now"),
};
// If this is a web build, run web post processing steps
if self.web {
self.post_process_web_build(&build_result, assets.as_ref(), progress)
.await?;
}
Ok(build_result)
}
pub fn copy_assets_dir(&self) -> anyhow::Result<()> {
tracing::info!("Copying public assets to the output directory...");
let out_dir = self.dioxus_crate.out_dir();
let asset_dir = self.dioxus_crate.asset_dir();
if asset_dir.is_dir() {
// Only pre-compress the assets from the web build. Desktop assets are not served, so they don't need to be pre_compressed
let pre_compress = self.web
&& self
.dioxus_crate
.should_pre_compress_web_assets(self.build_arguments.release);
copy_dir_to(asset_dir, out_dir, pre_compress)?;
}
Ok(())
}
}

View file

@ -0,0 +1,109 @@
use crate::builder::Build;
use crate::dioxus_crate::DioxusCrate;
use dioxus_cli_config::Platform;
use crate::builder::BuildRequest;
use std::path::PathBuf;
static CLIENT_RUST_FLAGS: &[&str] = &["-Cdebuginfo=none", "-Cstrip=debuginfo"];
// The `opt-level=2` increases build times, but can noticeably decrease time
// between saving changes and being able to interact with an app. The "overall"
// time difference (between having and not having the optimization) can be
// almost imperceptible (~1 s) but also can be very noticeable (~6 s) — depends
// on setup (hardware, OS, browser, idle load).
static SERVER_RUST_FLAGS: &[&str] = &["-O"];
static DEBUG_RUST_FLAG: &str = "-Cdebug-assertions";
fn add_debug_rust_flags(build: &Build, flags: &mut Vec<String>) {
if !build.release {
flags.push(DEBUG_RUST_FLAG.to_string());
}
}
fn fullstack_rust_flags(build: &Build, base_flags: &[&str]) -> Vec<String> {
// If we are forcing debug mode, don't add any debug flags
if build.force_debug {
return Default::default();
}
let mut rust_flags = base_flags.iter().map(ToString::to_string).collect();
add_debug_rust_flags(build, &mut rust_flags);
rust_flags
}
// Fullstack builds run the server and client builds parallel by default
// To make them run in parallel, we need to set up different target directories for the server and client within /.dioxus
fn get_target_directory(build: &Build, target: PathBuf) -> Option<PathBuf> {
(!build.force_sequential).then_some(target)
}
impl BuildRequest {
pub(crate) fn new_fullstack(
config: DioxusCrate,
build_arguments: Build,
serve: bool,
) -> Vec<Self> {
vec![
Self::new_server(serve, &config, &build_arguments),
Self::new_client(serve, &config, &build_arguments),
]
}
fn new_with_target_directory_rust_flags_and_features(
serve: bool,
config: &DioxusCrate,
build: &Build,
target_directory: PathBuf,
rust_flags: &[&str],
feature: String,
web: bool,
) -> Self {
let config = config.clone();
let mut build = build.clone();
build.platform = Some(if web {
Platform::Web
} else {
Platform::Desktop
});
// Set the target directory we are building the server in
let target_dir = get_target_directory(&build, target_directory);
// Add the server feature to the features we pass to the build
build.target_args.features.push(feature);
// Add the server flags to the build arguments
let rust_flags = fullstack_rust_flags(&build, rust_flags);
Self {
web,
serve,
build_arguments: build.clone(),
dioxus_crate: config,
rust_flags,
target_dir,
}
}
fn new_server(serve: bool, config: &DioxusCrate, build: &Build) -> Self {
Self::new_with_target_directory_rust_flags_and_features(
serve,
config,
build,
config.server_target_dir(),
SERVER_RUST_FLAGS,
build.target_args.server_feature.clone(),
false,
)
}
fn new_client(serve: bool, config: &DioxusCrate, build: &Build) -> Self {
Self::new_with_target_directory_rust_flags_and_features(
serve,
config,
build,
config.client_target_dir(),
CLIENT_RUST_FLAGS,
build.target_args.client_feature.clone(),
true,
)
}
}

View file

@ -0,0 +1,145 @@
use crate::build::Build;
use crate::cli::serve::ServeArguments;
use crate::dioxus_crate::DioxusCrate;
use crate::Result;
use dioxus_cli_config::{Platform, RuntimeCLIArguments};
use futures_util::stream::select_all;
use futures_util::StreamExt;
use std::net::SocketAddr;
use std::{path::PathBuf, process::Stdio};
use tokio::process::{Child, Command};
mod cargo;
mod fullstack;
mod prepare_html;
mod progress;
mod web;
pub use progress::{BuildMessage, MessageType, Stage, UpdateBuildProgress, UpdateStage};
/// A request for a project to be built
pub struct BuildRequest {
/// Whether the build is for serving the application
pub serve: bool,
/// Whether this is a web build
pub web: bool,
/// The configuration for the crate we are building
pub dioxus_crate: DioxusCrate,
/// The arguments for the build
pub build_arguments: Build,
/// The rustc flags to pass to the build
pub rust_flags: Vec<String>,
/// The target directory for the build
pub target_dir: Option<PathBuf>,
}
impl BuildRequest {
pub fn create(
serve: bool,
dioxus_crate: &DioxusCrate,
build_arguments: impl Into<Build>,
) -> Vec<Self> {
let build_arguments = build_arguments.into();
let dioxus_crate = dioxus_crate.clone();
let platform = build_arguments.platform();
match platform {
Platform::Web | Platform::Desktop => {
let web = platform == Platform::Web;
vec![Self {
serve,
web,
dioxus_crate,
build_arguments,
rust_flags: Default::default(),
target_dir: Default::default(),
}]
}
Platform::StaticGeneration | Platform::Fullstack => {
Self::new_fullstack(dioxus_crate, build_arguments, serve)
}
_ => unimplemented!("Unknown platform: {platform:?}"),
}
}
pub async fn build_all_parallel(build_requests: Vec<BuildRequest>) -> Result<Vec<BuildResult>> {
let multi_platform_build = build_requests.len() > 1;
let mut build_progress = Vec::new();
let mut set = tokio::task::JoinSet::new();
for build_request in build_requests {
let (tx, rx) = futures_channel::mpsc::unbounded();
build_progress.push((build_request.build_arguments.platform(), rx));
set.spawn(async move { build_request.build(tx).await });
}
// Watch the build progress as it comes in
loop {
let mut next = select_all(
build_progress
.iter_mut()
.map(|(platform, rx)| rx.map(move |update| (*platform, update))),
);
match next.next().await {
Some((platform, update)) => {
if multi_platform_build {
print!("{platform} build: ");
update.to_std_out();
} else {
update.to_std_out();
}
}
None => {
break;
}
}
}
let mut all_results = Vec::new();
while let Some(result) = set.join_next().await {
let result = result
.map_err(|_| crate::Error::Unique("Failed to build project".to_owned()))??;
all_results.push(result);
}
Ok(all_results)
}
}
#[derive(Debug, Clone)]
pub(crate) struct BuildResult {
pub executable: PathBuf,
pub web: bool,
pub platform: Platform,
}
impl BuildResult {
/// Open the executable if this is a native build
pub fn open(
&self,
serve: &ServeArguments,
fullstack_address: Option<SocketAddr>,
) -> std::io::Result<Option<Child>> {
if self.web {
return Ok(None);
}
let arguments = RuntimeCLIArguments::new(serve.address.address(), fullstack_address);
let executable = self.executable.canonicalize()?;
// This is the /dist folder generally
let output_folder = executable.parent().unwrap();
// This is the workspace folder
let workspace_folder = output_folder.parent().unwrap();
Ok(Some(
Command::new(&executable)
// When building the fullstack server, we need to forward the serve arguments (like port) to the fullstack server through env vars
.env(
dioxus_cli_config::__private::SERVE_ENV,
serde_json::to_string(&arguments).unwrap(),
)
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.kill_on_drop(true)
.current_dir(workspace_folder)
.spawn()?,
))
}
}

View file

@ -0,0 +1,132 @@
//! Build the HTML file to load a web application. The index.html file may be created from scratch or modified from the `index.html` file in the crate root.
use super::BuildRequest;
use crate::Result;
use manganis_cli_support::AssetManifest;
use std::fmt::Write;
use std::path::Path;
const DEFAULT_HTML: &str = include_str!("../../assets/index.html");
impl BuildRequest {
pub(crate) fn prepare_html(&self, assets: Option<&AssetManifest>) -> Result<String> {
let mut html = html_or_default(&self.dioxus_crate.crate_dir());
// Inject any resources from the config into the html
self.inject_resources(&mut html, assets)?;
// Inject loading scripts if they are not already present
self.inject_loading_scripts(&mut html);
// Replace any special placeholders in the HTML with resolved values
self.replace_template_placeholders(&mut html);
let title = self.dioxus_crate.dioxus_config.web.app.title.clone();
replace_or_insert_before("{app_title}", "</title", &title, &mut html);
Ok(html)
}
// Inject any resources from the config into the html
fn inject_resources(&self, html: &mut String, assets: Option<&AssetManifest>) -> Result<()> {
// Collect all resources into a list of styles and scripts
let resources = &self.dioxus_crate.dioxus_config.web.resource;
let mut style_list = resources.style.clone().unwrap_or_default();
let mut script_list = resources.script.clone().unwrap_or_default();
if self.serve {
style_list.extend(resources.dev.style.iter().cloned());
script_list.extend(resources.dev.script.iter().cloned());
}
let mut head_resources = String::new();
// Add all styles to the head
for style in style_list {
writeln!(
&mut head_resources,
"<link rel=\"stylesheet\" href=\"{}\">",
&style.to_str().unwrap(),
)?;
}
// Add all scripts to the head
for script in script_list {
writeln!(
&mut head_resources,
"<script src=\"{}\"></script>",
&script.to_str().unwrap(),
)?;
}
// Inject any resources from manganis into the head
if let Some(assets) = assets {
head_resources.push_str(&assets.head());
}
replace_or_insert_before("{style_include}", "</head", &head_resources, html);
Ok(())
}
/// Inject loading scripts if they are not already present
fn inject_loading_scripts(&self, html: &mut String) {
// If it looks like we are already loading wasm or the current build opted out of injecting loading scripts, don't inject anything
if !self.build_arguments.inject_loading_scripts || html.contains("__wbindgen_start") {
return;
}
// If not, insert the script
*html = html.replace(
"</body",
r#"<script>
// We can't use a module script here because we need to start the script immediately when streaming
import("/{base_path}/assets/dioxus/{app_name}.js").then(
({ default: init }) => {
init("/{base_path}/assets/dioxus/{app_name}_bg.wasm").then((wasm) => {
if (wasm.__wbindgen_start == undefined) {
wasm.main();
}
});
}
);
</script></body"#,
);
// And try to insert preload links for the wasm and js files
*html = html.replace(
"</head",
r#"<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}_bg.wasm" as="fetch" type="application/wasm" crossorigin="">
<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}.js" as="script">
</head"#);
}
/// Replace any special placeholders in the HTML with resolved values
fn replace_template_placeholders(&self, html: &mut String) {
let base_path = self.dioxus_crate.dioxus_config.web.app.base_path();
*html = html.replace("{base_path}", base_path);
let app_name = &self.dioxus_crate.dioxus_config.application.name;
*html = html.replace("{app_name}", app_name);
}
}
/// Read the html file from the crate root or use the default html file
fn html_or_default(crate_root: &Path) -> String {
let custom_html_file = crate_root.join("index.html");
std::fs::read_to_string(custom_html_file).unwrap_or_else(|_| String::from(DEFAULT_HTML))
}
/// Replace a string or insert the new contents before a marker
fn replace_or_insert_before(
replace: &str,
or_insert_before: &str,
with: &str,
content: &mut String,
) {
if content.contains(replace) {
*content = content.replace(replace, with);
} else if let Some(pos) = content.find(or_insert_before) {
content.insert_str(pos, with);
}
}

View file

@ -0,0 +1,273 @@
//! Report progress about the build to the user. We use channels to report progress back to the CLI.
use anyhow::Context;
use cargo_metadata::{diagnostic::Diagnostic, Message};
use futures_channel::mpsc::UnboundedSender;
use serde::Deserialize;
use std::ops::Deref;
use std::path::PathBuf;
use std::process::Stdio;
use tokio::io::AsyncBufReadExt;
use tracing::Level;
use super::BuildRequest;
#[derive(Default, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub enum Stage {
#[default]
Initializing = 0,
InstallingWasmTooling = 1,
Compiling = 2,
OptimizingWasm = 3,
OptimizingAssets = 4,
Finished = 5,
}
impl Deref for Stage {
type Target = str;
fn deref(&self) -> &Self::Target {
match self {
Stage::Initializing => "Initializing",
Stage::InstallingWasmTooling => "Installing Wasm Tooling",
Stage::Compiling => "Compiling",
Stage::OptimizingWasm => "Optimizing Wasm",
Stage::OptimizingAssets => "Optimizing Assets",
Stage::Finished => "Finished",
}
}
}
impl std::fmt::Display for Stage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.deref())
}
}
pub struct UpdateBuildProgress {
pub stage: Stage,
pub update: UpdateStage,
}
impl UpdateBuildProgress {
pub fn to_std_out(&self) {
match &self.update {
UpdateStage::Start => println!("--- {} ---", self.stage),
UpdateStage::AddMessage(message) => match &message.message {
MessageType::Cargo(message) => {
println!("{}", message.rendered.clone().unwrap_or_default());
}
MessageType::Text(message) => {
println!("{}", message);
}
},
UpdateStage::SetProgress(progress) => {
println!("Build progress {:0.0}%", progress * 100.0);
}
UpdateStage::Failed(message) => {
println!("Build failed: {}", message);
}
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum UpdateStage {
Start,
AddMessage(BuildMessage),
SetProgress(f64),
Failed(String),
}
#[derive(Debug, Clone, PartialEq)]
pub struct BuildMessage {
pub level: Level,
pub message: MessageType,
pub source: Option<String>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum MessageType {
Cargo(Diagnostic),
Text(String),
}
impl From<Diagnostic> for BuildMessage {
fn from(message: Diagnostic) -> Self {
Self {
level: match message.level {
cargo_metadata::diagnostic::DiagnosticLevel::Ice
| cargo_metadata::diagnostic::DiagnosticLevel::FailureNote
| cargo_metadata::diagnostic::DiagnosticLevel::Error => Level::ERROR,
cargo_metadata::diagnostic::DiagnosticLevel::Warning => Level::WARN,
cargo_metadata::diagnostic::DiagnosticLevel::Note => Level::INFO,
cargo_metadata::diagnostic::DiagnosticLevel::Help => Level::DEBUG,
_ => Level::DEBUG,
},
source: Some("cargo".to_string()),
message: MessageType::Cargo(message),
}
}
}
pub(crate) async fn build_cargo(
crate_count: usize,
mut cmd: tokio::process::Command,
progress: &mut UnboundedSender<UpdateBuildProgress>,
) -> anyhow::Result<CargoBuildResult> {
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::Compiling,
update: UpdateStage::Start,
});
let mut child = cmd
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.context("Failed to spawn cargo build")?;
let stdout = child.stdout.take().unwrap();
let stderr = child.stderr.take().unwrap();
let stdout = tokio::io::BufReader::new(stdout);
let stderr = tokio::io::BufReader::new(stderr);
let mut output_location = None;
let mut stdout = stdout.lines();
let mut stderr = stderr.lines();
let mut units_compiled = 0;
let mut errors = Vec::new();
loop {
let line = tokio::select! {
line = stdout.next_line() => {
line
}
line = stderr.next_line() => {
line
}
};
let Some(line) = line? else {
break;
};
let mut deserializer = serde_json::Deserializer::from_str(line.trim());
deserializer.disable_recursion_limit();
let message = Message::deserialize(&mut deserializer).unwrap_or(Message::TextLine(line));
match message {
Message::CompilerMessage(msg) => {
let message = msg.message;
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::Compiling,
update: UpdateStage::AddMessage(message.clone().into()),
});
const WARNING_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
cargo_metadata::diagnostic::DiagnosticLevel::Help,
cargo_metadata::diagnostic::DiagnosticLevel::Note,
cargo_metadata::diagnostic::DiagnosticLevel::Warning,
cargo_metadata::diagnostic::DiagnosticLevel::Error,
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
];
const FATAL_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
cargo_metadata::diagnostic::DiagnosticLevel::Error,
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
];
if WARNING_LEVELS.contains(&message.level) {
if let Some(rendered) = message.rendered {
errors.push(rendered);
}
}
if FATAL_LEVELS.contains(&message.level) {
return Err(anyhow::anyhow!(errors.join("\n")));
}
}
Message::CompilerArtifact(artifact) => {
units_compiled += 1;
if let Some(executable) = artifact.executable {
output_location = Some(executable.into());
} else {
let build_progress = units_compiled as f64 / crate_count as f64;
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::Compiling,
update: UpdateStage::SetProgress((build_progress).clamp(0.0, 1.00)),
});
}
}
Message::BuildScriptExecuted(_) => {
units_compiled += 1;
}
Message::BuildFinished(finished) => {
if !finished.success {
return Err(anyhow::anyhow!("Build failed"));
}
}
Message::TextLine(line) => {
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::Compiling,
update: UpdateStage::AddMessage(BuildMessage {
level: Level::DEBUG,
message: MessageType::Text(line),
source: None,
}),
});
}
_ => {
// Unknown message
}
}
}
Ok(CargoBuildResult { output_location })
}
pub(crate) struct CargoBuildResult {
pub(crate) output_location: Option<PathBuf>,
}
impl BuildRequest {
/// Try to get the unit graph for the crate. This is a nightly only feature which may not be available with the current version of rustc the user has installed.
async fn get_unit_count(&self) -> Option<usize> {
#[derive(Debug, Deserialize)]
struct UnitGraph {
units: Vec<serde_json::Value>,
}
let mut cmd = tokio::process::Command::new("cargo");
cmd.arg("+nightly");
cmd.arg("build");
cmd.arg("--unit-graph");
cmd.arg("-Z").arg("unstable-options");
cmd.args(self.build_arguments());
let output = cmd
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.await
.ok()?;
if !output.status.success() {
return None;
}
let output_text = String::from_utf8(output.stdout).ok()?;
let graph: UnitGraph = serde_json::from_str(&output_text).ok()?;
Some(graph.units.len())
}
/// Get an estimate of the number of units in the crate. If nightly rustc is not available, this will return an estimate of the number of units in the crate based on cargo metadata.
/// TODO: always use https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#unit-graph once it is stable
pub(crate) async fn get_unit_count_estimate(&self) -> usize {
// Try to get it from nightly
self.get_unit_count().await.unwrap_or_else(|| {
// Otherwise, use cargo metadata
(self
.dioxus_crate
.krates
.krates_filtered(krates::DepKind::Dev)
.iter()
.map(|k| k.targets.len())
.sum::<usize>() as f64
/ 3.5) as usize
})
}
}

View file

@ -0,0 +1,182 @@
use super::BuildRequest;
use super::BuildResult;
use crate::assets::pre_compress_folder;
use crate::builder::progress::Stage;
use crate::builder::progress::UpdateBuildProgress;
use crate::builder::progress::UpdateStage;
use crate::error::{Error, Result};
use dioxus_cli_config::WasmOptLevel;
use futures_channel::mpsc::UnboundedSender;
use manganis_cli_support::AssetManifest;
use std::path::Path;
use tokio::process::Command;
use wasm_bindgen_cli_support::Bindgen;
// Attempt to automatically recover from a bindgen failure by updating the wasm-bindgen version
async fn update_wasm_bindgen_version() -> Result<()> {
let cli_bindgen_version = wasm_bindgen_shared::version();
tracing::info!("Attempting to recover from bindgen failure by setting the wasm-bindgen version to {cli_bindgen_version}...");
let output = Command::new("cargo")
.args([
"update",
"-p",
"wasm-bindgen",
"--precise",
&cli_bindgen_version,
])
.output()
.await;
let mut error_message = None;
if let Ok(output) = output {
if output.status.success() {
tracing::info!("Successfully updated wasm-bindgen to {cli_bindgen_version}");
return Ok(());
} else {
error_message = Some(output);
}
}
if let Some(output) = error_message {
tracing::error!("Failed to update wasm-bindgen: {:#?}", output);
}
Err(Error::BuildFailed(format!("WASM bindgen build failed!\nThis is probably due to the Bindgen version, dioxus-cli is using `{cli_bindgen_version}` which is not compatible with your crate.\nPlease reinstall the dioxus cli to fix this issue.\nYou can reinstall the dioxus cli by running `cargo install dioxus-cli --force` and then rebuild your project")))
}
/// Check if the wasm32-unknown-unknown target is installed and try to install it if not
pub(crate) async fn install_web_build_tooling(
progress: &mut UnboundedSender<UpdateBuildProgress>,
) -> Result<()> {
// If the user has rustup, we can check if the wasm32-unknown-unknown target is installed
// Otherwise we can just assume it is installed - which is not great...
// Eventually we can poke at the errors and let the user know they need to install the target
if let Ok(wasm_check_command) = Command::new("rustup").args(["show"]).output().await {
let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
if !wasm_check_output.contains("wasm32-unknown-unknown") {
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::InstallingWasmTooling,
update: UpdateStage::Start,
});
tracing::info!("wasm32-unknown-unknown target not detected, installing..");
let _ = Command::new("rustup")
.args(["target", "add", "wasm32-unknown-unknown"])
.output()
.await?;
}
}
Ok(())
}
impl BuildRequest {
async fn run_wasm_bindgen(&self, input_path: &Path, bindgen_outdir: &Path) -> Result<()> {
tracing::info!("Running wasm-bindgen");
let input_path = input_path.to_path_buf();
let bindgen_outdir = bindgen_outdir.to_path_buf();
let keep_debug =
self.dioxus_crate.dioxus_config.web.wasm_opt.debug || (!self.build_arguments.release);
let name = self.dioxus_crate.dioxus_config.application.name.clone();
let run_wasm_bindgen = move || {
// [3] Bindgen the final binary for use easy linking
let mut bindgen_builder = Bindgen::new();
bindgen_builder
.input_path(&input_path)
.web(true)
.unwrap()
.debug(keep_debug)
.demangle(keep_debug)
.keep_debug(keep_debug)
.reference_types(true)
.remove_name_section(!keep_debug)
.remove_producers_section(!keep_debug)
.out_name(&name)
.generate(&bindgen_outdir)
.unwrap();
};
let bindgen_result = tokio::task::spawn_blocking(run_wasm_bindgen.clone()).await;
// WASM bindgen requires the exact version of the bindgen schema to match the version the CLI was built with
// If we get an error, we can try to recover by pinning the user's wasm-bindgen version to the version we used
if let Err(err) = bindgen_result {
tracing::error!("Bindgen build failed: {:?}", err);
update_wasm_bindgen_version().await?;
run_wasm_bindgen();
}
Ok(())
}
/// Post process the WASM build artifacts
pub(crate) async fn post_process_web_build(
&self,
build_result: &BuildResult,
assets: Option<&AssetManifest>,
progress: &mut UnboundedSender<UpdateBuildProgress>,
) -> Result<()> {
_ = progress.start_send(UpdateBuildProgress {
stage: Stage::OptimizingWasm,
update: UpdateStage::Start,
});
// Create the index.html file
let html = self.prepare_html(assets)?;
let html_path = self.dioxus_crate.out_dir().join("index.html");
std::fs::write(&html_path, html)?;
// Find the wasm file
let output_location = build_result.executable.clone();
let input_path = output_location.with_extension("wasm");
// Create the directory where the bindgen output will be placed
let bindgen_outdir = self.dioxus_crate.out_dir().join("assets").join("dioxus");
// Run wasm-bindgen
self.run_wasm_bindgen(&input_path, &bindgen_outdir).await?;
// Run wasm-opt if this is a release build
if self.build_arguments.release {
tracing::info!("Running optimization with wasm-opt...");
let mut options = match self.dioxus_crate.dioxus_config.web.wasm_opt.level {
WasmOptLevel::Z => {
wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively()
}
WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
};
let wasm_file = bindgen_outdir.join(format!(
"{}_bg.wasm",
self.dioxus_crate.dioxus_config.application.name
));
let old_size = wasm_file.metadata()?.len();
options
// WASM bindgen relies on reference types
.enable_feature(wasm_opt::Feature::ReferenceTypes)
.debug_info(self.dioxus_crate.dioxus_config.web.wasm_opt.debug)
.run(&wasm_file, &wasm_file)
.map_err(|err| Error::Other(anyhow::anyhow!(err)))?;
let new_size = wasm_file.metadata()?.len();
tracing::info!(
"wasm-opt reduced WASM size from {} to {} ({:2}%)",
old_size,
new_size,
(new_size as f64 - old_size as f64) / old_size as f64 * 100.0
);
}
// If pre-compressing is enabled, we can pre_compress the wasm-bindgen output
let pre_compress = self
.dioxus_crate
.should_pre_compress_web_assets(self.build_arguments.release);
tokio::task::spawn_blocking(move || pre_compress_folder(&bindgen_outdir, pre_compress))
.await
.unwrap()?;
Ok(())
}
}

View file

@ -1,121 +1,145 @@
use crate::{assets::AssetConfigDropGuard, server::fullstack}; use anyhow::Context;
use dioxus_cli_config::Platform; use dioxus_cli_config::Platform;
use crate::{builder::BuildRequest, dioxus_crate::DioxusCrate};
use super::*; use super::*;
/// Build the Rust WASM app and all of its assets. /// Information about the target to build
#[derive(Clone, Debug, Parser)] #[derive(Clone, Debug, Default, Deserialize, Parser)]
pub struct TargetArgs {
/// Build for nightly [default: false]
#[clap(long)]
pub nightly: bool,
/// Build a example [default: ""]
#[clap(long)]
pub example: Option<String>,
/// Build a binary [default: ""]
#[clap(long)]
pub bin: Option<String>,
/// The package to build
#[clap(long)]
pub package: Option<String>,
/// Space separated list of features to activate
#[clap(long)]
pub features: Vec<String>,
/// The feature to use for the client in a fullstack app [default: "web"]
#[clap(long, default_value_t = { "web".to_string() })]
pub client_feature: String,
/// The feature to use for the server in a fullstack app [default: "server"]
#[clap(long, default_value_t = { "server".to_string() })]
pub server_feature: String,
/// Rustc platform triple
#[clap(long)]
pub target: Option<String>,
}
/// Build the Rust Dioxus app and all of its assets.
#[derive(Clone, Debug, Default, Deserialize, Parser)]
#[clap(name = "build")] #[clap(name = "build")]
pub struct Build { pub struct Build {
/// Build in release mode [default: false]
#[clap(long, short)]
#[serde(default)]
pub release: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run with something in between debug and release mode. This flag will force the build to run in debug mode. [default: false]
#[clap(long)]
#[serde(default)]
pub force_debug: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run the server and client builds in parallel. This flag will force the build to run the server build first, then the client build. [default: false]
#[clap(long)]
#[serde(default)]
pub force_sequential: bool,
// Use verbose output [default: false]
#[clap(long)]
#[serde(default)]
pub verbose: bool,
/// Build with custom profile
#[clap(long)]
pub profile: Option<String>,
/// Build platform: support Web & Desktop [default: "default_platform"]
#[clap(long, value_enum)]
pub platform: Option<Platform>,
/// Skip collecting assets from dependencies [default: false]
#[clap(long)]
#[serde(default)]
pub skip_assets: bool,
/// Extra arguments passed to cargo build
#[clap(last = true)]
pub cargo_args: Vec<String>,
/// Inject scripts to load the wasm and js files for your dioxus app if they are not already present [default: true]
#[clap(long, default_value_t = true)]
pub inject_loading_scripts: bool,
/// Information about the target to build
#[clap(flatten)] #[clap(flatten)]
pub build: ConfigOptsBuild, pub target_args: TargetArgs,
} }
impl Build { impl Build {
/// Note: `rust_flags` argument is only used for the fullstack platform. pub fn resolve(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
pub fn build( // Inherit the platform from the defaults
self,
bin: Option<PathBuf>,
target_dir: Option<&std::path::Path>,
rust_flags: Option<String>,
) -> Result<()> {
let mut crate_config = dioxus_cli_config::CrateConfig::new(bin)?;
if let Some(target_dir) = target_dir {
crate_config.target_dir = target_dir.to_path_buf();
}
// change the release state.
crate_config.with_release(self.build.release);
crate_config.with_verbose(self.build.verbose);
if self.build.example.is_some() {
crate_config.as_example(self.build.example.clone().unwrap());
}
if self.build.profile.is_some() {
crate_config.set_profile(self.build.profile.clone().unwrap());
}
if self.build.features.is_some() {
crate_config.set_features(self.build.features.clone().unwrap());
}
let platform = self let platform = self
.build
.platform .platform
.unwrap_or(crate_config.dioxus_config.application.default_platform); .unwrap_or_else(|| self.auto_detect_platform(dioxus_crate));
self.platform = Some(platform);
if let Some(target) = self.build.target.clone() { // Add any features required to turn on the platform we are building for
crate_config.set_target(target); self.target_args
} .features
.extend(dioxus_crate.features_for_platform(platform));
crate_config.set_cargo_args(self.build.cargo_args.clone());
crate_config.extend_with_platform(platform);
// #[cfg(feature = "plugin")]
// let _ = crate::plugin::PluginManager::on_build_start(&crate_config, &platform);
let build_result = match platform {
Platform::Web => {
// `rust_flags` are used by fullstack's client build.
crate::builder::build_web(&crate_config, self.build.skip_assets, rust_flags)?
}
Platform::Desktop => {
// Since desktop platform doesn't use `rust_flags`, this
// argument is explicitly set to `None`.
crate::builder::build_desktop(&crate_config, false, self.build.skip_assets, None)?
}
Platform::Fullstack | Platform::StaticGeneration => {
// Fullstack mode must be built with web configs on the desktop
// (server) binary as well as the web binary
let _config = AssetConfigDropGuard::new();
let client_rust_flags = fullstack::client_rust_flags(&self.build);
let server_rust_flags = fullstack::server_rust_flags(&self.build);
{
let mut web_config = crate_config.clone();
let web_feature = self.build.client_feature;
let features = &mut web_config.features;
match features {
Some(features) => {
features.push(web_feature);
}
None => web_config.features = Some(vec![web_feature]),
};
crate::builder::build_web(
&web_config,
self.build.skip_assets,
Some(client_rust_flags),
)?;
}
{
let mut desktop_config = crate_config.clone();
let desktop_feature = self.build.server_feature;
let features = &mut desktop_config.features;
match features {
Some(features) => {
features.push(desktop_feature);
}
None => desktop_config.features = Some(vec![desktop_feature]),
};
crate::builder::build_desktop(
&desktop_config,
false,
self.build.skip_assets,
Some(server_rust_flags),
)?
}
}
_ => unreachable!(),
};
let temp = gen_page(&crate_config, build_result.assets.as_ref(), false);
let mut file = std::fs::File::create(crate_config.out_dir().join("index.html"))?;
file.write_all(temp.as_bytes())?;
// #[cfg(feature = "plugin")]
// let _ = crate::plugin::PluginManager::on_build_finish(&crate_config, &platform);
Ok(()) Ok(())
} }
pub async fn build(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
self.resolve(dioxus_crate)?;
let build_requests = BuildRequest::create(false, dioxus_crate, self.clone());
BuildRequest::build_all_parallel(build_requests).await?;
Ok(())
}
pub async fn run(&mut self) -> anyhow::Result<()> {
let mut dioxus_crate =
DioxusCrate::new(&self.target_args).context("Failed to load Dioxus workspace")?;
self.build(&mut dioxus_crate).await?;
Ok(())
}
fn auto_detect_platform(&self, resolved: &DioxusCrate) -> Platform {
for dioxus in resolved.krates.krates_by_name("dioxus") {
let Some(features) = resolved.krates.get_enabled_features(dioxus.kid) else {
continue;
};
if let Some(platform) = features
.iter()
.find_map(|platform| platform.parse::<Platform>().ok())
{
return platform;
}
}
resolved.dioxus_config.application.default_platform
}
/// Get the platform from the build arguments
pub fn platform(&self) -> Platform {
self.platform.unwrap_or_default()
}
} }

View file

@ -1,11 +1,13 @@
use core::panic; use crate::build::Build;
use dioxus_cli_config::ExecutableType; use crate::DioxusCrate;
use std::{env::current_dir, fs::create_dir_all, str::FromStr}; use anyhow::Context;
use std::env::current_dir;
use std::fs::create_dir_all;
use std::ops::Deref;
use std::str::FromStr;
use tauri_bundler::{BundleSettings, PackageSettings, SettingsBuilder}; use tauri_bundler::{BundleSettings, PackageSettings, SettingsBuilder};
use super::*; use super::*;
use crate::{build_desktop, cfg::ConfigOptsBundle};
/// Bundle the Rust desktop app and all of its assets /// Bundle the Rust desktop app and all of its assets
#[derive(Clone, Debug, Parser)] #[derive(Clone, Debug, Parser)]
@ -13,8 +15,17 @@ use crate::{build_desktop, cfg::ConfigOptsBundle};
pub struct Bundle { pub struct Bundle {
#[clap(long)] #[clap(long)]
pub package: Option<Vec<String>>, pub package: Option<Vec<String>>,
/// The arguments for the dioxus build
#[clap(flatten)] #[clap(flatten)]
pub build: ConfigOptsBundle, pub build_arguments: Build,
}
impl Deref for Bundle {
type Target = Build;
fn deref(&self) -> &Self::Target {
&self.build_arguments
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -62,48 +73,19 @@ impl From<PackageType> for tauri_bundler::PackageType {
} }
impl Bundle { impl Bundle {
pub fn bundle(self, bin: Option<PathBuf>) -> Result<()> { pub async fn bundle(mut self) -> anyhow::Result<()> {
let mut crate_config = dioxus_cli_config::CrateConfig::new(bin)?; let mut dioxus_crate = DioxusCrate::new(&self.build_arguments.target_args)
.context("Failed to load Dioxus workspace")?;
// change the release state. self.build_arguments.resolve(&mut dioxus_crate)?;
crate_config.with_release(true);
crate_config.with_verbose(self.build.verbose);
if self.build.example.is_some() { // Build the app
crate_config.as_example(self.build.example.unwrap()); self.build_arguments.build(&mut dioxus_crate).await?;
}
if self.build.profile.is_some() {
crate_config.set_profile(self.build.profile.unwrap());
}
if let Some(target) = &self.build.target {
crate_config.set_target(target.to_string());
}
crate_config.set_cargo_args(self.build.cargo_args);
if let Some(platform) = self.build.platform {
crate_config.extend_with_platform(platform);
}
if let Some(features) = self.build.features {
crate_config.set_features(features);
}
// build the desktop app
// Since the `bundle()` function is only run for the desktop platform,
// the `rust_flags` argument is set to `None`.
build_desktop(&crate_config, false, false, None)?;
// copy the binary to the out dir // copy the binary to the out dir
let package = crate_config.manifest.package.as_ref().unwrap(); let package = dioxus_crate.package();
let mut name: PathBuf = match &crate_config.executable { let mut name: PathBuf = dioxus_crate.executable_name().into();
ExecutableType::Binary(name)
| ExecutableType::Lib(name)
| ExecutableType::Example(name) => name,
}
.into();
if cfg!(windows) { if cfg!(windows) {
name.set_extension("exe"); name.set_extension("exe");
} }
@ -111,12 +93,12 @@ impl Bundle {
// bundle the app // bundle the app
let binaries = vec![ let binaries = vec![
tauri_bundler::BundleBinary::new(name.display().to_string(), true) tauri_bundler::BundleBinary::new(name.display().to_string(), true)
.set_src_path(Some(crate_config.crate_dir.display().to_string())), .set_src_path(Some(dioxus_crate.workspace_dir().display().to_string())),
]; ];
let mut bundle_settings: BundleSettings = crate_config.dioxus_config.bundle.clone().into(); let mut bundle_settings: BundleSettings = dioxus_crate.dioxus_config.bundle.clone().into();
if cfg!(windows) { if cfg!(windows) {
let windows_icon_override = crate_config let windows_icon_override = dioxus_crate
.dioxus_config .dioxus_config
.bundle .bundle
.windows .windows
@ -135,7 +117,7 @@ impl Bundle {
if !path.exists() { if !path.exists() {
create_dir_all(path.parent().unwrap()).unwrap(); create_dir_all(path.parent().unwrap()).unwrap();
let mut file = File::create(&path).unwrap(); let mut file = File::create(&path).unwrap();
file.write_all(include_bytes!("../assets/icon.ico")) file.write_all(include_bytes!("../../assets/icon.ico"))
.unwrap(); .unwrap();
} }
path path
@ -145,10 +127,10 @@ impl Bundle {
} }
// Copy the assets in the dist directory to the bundle // Copy the assets in the dist directory to the bundle
let static_asset_output_dir = &crate_config.dioxus_config.application.out_dir; let static_asset_output_dir = &dioxus_crate.dioxus_config.application.out_dir;
// Make sure the dist directory is relative to the crate directory // Make sure the dist directory is relative to the crate directory
let static_asset_output_dir = static_asset_output_dir let static_asset_output_dir = static_asset_output_dir
.strip_prefix(&crate_config.crate_dir) .strip_prefix(dioxus_crate.workspace_dir())
.unwrap_or(static_asset_output_dir); .unwrap_or(static_asset_output_dir);
let static_asset_output_dir = static_asset_output_dir.display().to_string(); let static_asset_output_dir = static_asset_output_dir.display().to_string();
@ -156,8 +138,8 @@ impl Bundle {
// Don't copy the executable or the old bundle directory // Don't copy the executable or the old bundle directory
let ignored_files = [ let ignored_files = [
crate_config.out_dir().join("bundle"), dioxus_crate.out_dir().join("bundle"),
crate_config.out_dir().join(name), dioxus_crate.out_dir().join(name),
]; ];
for entry in std::fs::read_dir(&static_asset_output_dir)?.flatten() { for entry in std::fs::read_dir(&static_asset_output_dir)?.flatten() {
@ -181,27 +163,27 @@ impl Bundle {
} }
let mut settings = SettingsBuilder::new() let mut settings = SettingsBuilder::new()
.project_out_directory(crate_config.out_dir()) .project_out_directory(dioxus_crate.out_dir())
.package_settings(PackageSettings { .package_settings(PackageSettings {
product_name: crate_config.dioxus_config.application.name.clone(), product_name: dioxus_crate.dioxus_config.application.name.clone(),
version: package.version().to_string(), version: package.version.to_string(),
description: package.description().unwrap_or_default().to_string(), description: package.description.clone().unwrap_or_default(),
homepage: Some(package.homepage().unwrap_or_default().to_string()), homepage: Some(package.homepage.clone().unwrap_or_default()),
authors: Some(Vec::from(package.authors())), authors: Some(package.authors.clone()),
default_run: Some(crate_config.dioxus_config.application.name.clone()), default_run: Some(dioxus_crate.dioxus_config.application.name.clone()),
}) })
.binaries(binaries) .binaries(binaries)
.bundle_settings(bundle_settings); .bundle_settings(bundle_settings);
if let Some(packages) = self.package { if let Some(packages) = &self.package {
settings = settings.package_types( settings = settings.package_types(
packages packages
.into_iter() .iter()
.map(|p| p.parse::<PackageType>().unwrap().into()) .map(|p| p.parse::<PackageType>().unwrap().into())
.collect(), .collect(),
); );
} }
if let Some(target) = &self.build.target { if let Some(target) = &self.target_args.target {
settings = settings.target(target.to_string()); settings = settings.target(target.to_string());
} }

View file

@ -1,218 +0,0 @@
use dioxus_cli_config::Platform;
use dioxus_cli_config::ServeArguments;
use super::*;
/// Config options for the build system.
#[derive(Clone, Debug, Default, Deserialize, Parser)]
pub struct ConfigOptsBuild {
/// Build in release mode [default: false]
#[clap(long, short)]
#[serde(default)]
pub release: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run with something in between debug and release mode. This flag will force the build to run in debug mode. [default: false]
#[clap(long)]
#[serde(default)]
pub force_debug: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run the server and client builds in parallel. This flag will force the build to run the server build first, then the client build. [default: false]
#[clap(long)]
#[serde(default)]
pub force_sequential: bool,
// Use verbose output [default: false]
#[clap(long)]
#[serde(default)]
pub verbose: bool,
/// Build a example [default: ""]
#[clap(long)]
pub example: Option<String>,
/// Build with custom profile
#[clap(long)]
pub profile: Option<String>,
/// Build platform: support Web & Desktop [default: "default_platform"]
#[clap(long, value_enum)]
pub platform: Option<Platform>,
/// Skip collecting assets from dependencies [default: false]
#[clap(long)]
#[serde(default)]
pub skip_assets: bool,
/// Space separated list of features to activate
#[clap(long)]
pub features: Option<Vec<String>>,
/// The feature to use for the client in a fullstack app [default: "web"]
#[clap(long, default_value_t = { "web".to_string() })]
pub client_feature: String,
/// The feature to use for the server in a fullstack app [default: "server"]
#[clap(long, default_value_t = { "server".to_string() })]
pub server_feature: String,
/// Rustc platform triple
#[clap(long)]
pub target: Option<String>,
/// Extra arguments passed to cargo build
#[clap(last = true)]
pub cargo_args: Vec<String>,
}
impl From<ConfigOptsServe> for ConfigOptsBuild {
fn from(serve: ConfigOptsServe) -> Self {
Self {
target: serve.target,
release: serve.release,
verbose: serve.verbose,
example: serve.example,
profile: serve.profile,
platform: serve.platform,
features: serve.features,
client_feature: serve.client_feature,
server_feature: serve.server_feature,
skip_assets: serve.skip_assets,
force_debug: serve.force_debug,
force_sequential: serve.force_sequential,
cargo_args: serve.cargo_args,
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Parser)]
#[command(group = clap::ArgGroup::new("release-incompatible").multiple(true).conflicts_with("release"))]
pub struct ConfigOptsServe {
/// Arguments for the serve command
#[clap(flatten)]
pub(crate) server_arguments: ServeArguments,
// TODO: Somehow make this default to `true` if the flag was provided. e.g. `dx serve --open`
// Currently it requires a value: `dx serve --open true`
/// Open the app in the default browser [default: false - unless project or global settings are set]
#[clap(long)]
pub open: Option<bool>,
// TODO: See `open` field
/// Enable full hot reloading for the app [default: true - unless project or global settings are set]
#[clap(long, group = "release-incompatible")]
pub hot_reload: Option<bool>,
/// Build a example [default: ""]
#[clap(long)]
pub example: Option<String>,
/// Build in release mode [default: false]
#[clap(long)]
#[serde(default)]
pub release: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run with something in between debug and release mode. This flag will force the build to run in debug mode. [default: false]
#[clap(long)]
#[serde(default)]
pub force_debug: bool,
/// This flag only applies to fullstack builds. By default fullstack builds will run the server and client builds in parallel. This flag will force the build to run the server build first, then the client build. [default: false]
#[clap(long)]
#[serde(default)]
pub force_sequential: bool,
// Use verbose output [default: false]
#[clap(long)]
#[serde(default)]
pub verbose: bool,
/// Build with custom profile
#[clap(long)]
pub profile: Option<String>,
/// Build platform: support Web, Desktop, and Fullstack [default: "default_platform"]
#[clap(long, value_enum)]
pub platform: Option<Platform>,
/// Build with hot reloading rsx. Will not work with release builds. [default: true]
#[clap(long)]
#[clap(default_missing_value("true"),
default_value("true"),
num_args(0..=1),
require_equals(true),
action = clap::ArgAction::Set,
)]
/// Set cross-origin-policy to same-origin [default: false]
#[clap(name = "cross-origin-policy")]
#[clap(long)]
#[serde(default)]
pub cross_origin_policy: bool,
/// Space separated list of features to activate
#[clap(long)]
pub features: Option<Vec<String>>,
/// Skip collecting assets from dependencies [default: false]
#[clap(long)]
#[serde(default)]
pub skip_assets: bool,
/// The feature to use for the client in a fullstack app [default: "web"]
#[clap(long, default_value_t = { "web".to_string() })]
pub client_feature: String,
/// The feature to use for the server in a fullstack app [default: "server"]
#[clap(long, default_value_t = { "server".to_string() })]
pub server_feature: String,
/// Rustc platform triple
#[clap(long)]
pub target: Option<String>,
/// Additional arguments to pass to the executable
#[clap(long)]
pub args: Vec<String>,
/// Extra arguments passed to cargo build
#[clap(last = true)]
pub cargo_args: Vec<String>,
}
/// Config options for the bundling system.
#[derive(Clone, Debug, Default, Deserialize, Parser)]
pub struct ConfigOptsBundle {
/// Build in release mode [default: false]
#[clap(long, short)]
#[serde(default)]
pub release: bool,
// Use verbose output [default: false]
#[clap(long)]
#[serde(default)]
pub verbose: bool,
/// Build a example [default: ""]
#[clap(long)]
pub example: Option<String>,
/// Build with custom profile
#[clap(long)]
pub profile: Option<String>,
/// Build platform: support Web & Desktop [default: "default_platform"]
#[clap(long)]
pub platform: Option<Platform>,
/// Space separated list of features to activate
#[clap(long)]
pub features: Option<Vec<String>>,
/// Rustc platform triple
#[clap(long)]
pub target: Option<String>,
/// Extra arguments passed to cargo build
#[clap(last = true)]
pub cargo_args: Vec<String>,
}

View file

@ -1,6 +1,9 @@
use crate::build::TargetArgs;
use futures_util::{stream::FuturesUnordered, StreamExt}; use futures_util::{stream::FuturesUnordered, StreamExt};
use std::{path::Path, process::exit}; use std::{path::Path, process::exit};
use crate::DioxusCrate;
use super::*; use super::*;
// For reference, the rustfmt main.rs file // For reference, the rustfmt main.rs file
@ -12,30 +15,31 @@ pub struct Check {
/// Input file /// Input file
#[clap(short, long)] #[clap(short, long)]
pub file: Option<PathBuf>, pub file: Option<PathBuf>,
/// Information about the target to check
#[clap(flatten)]
pub target_args: TargetArgs,
} }
impl Check { impl Check {
// Todo: check the entire crate // Todo: check the entire crate
pub fn check(self) -> Result<()> { pub async fn check(self) -> Result<()> {
let rt = tokio::runtime::Runtime::new().unwrap(); match self.file {
// Default to checking the project
rt.block_on(async move { None => {
match self.file { let dioxus_crate = DioxusCrate::new(&self.target_args)?;
// Default to checking the project if let Err(e) = check_project_and_report(dioxus_crate).await {
None => { eprintln!("error checking project: {}", e);
if let Err(e) = check_project_and_report().await { exit(1);
eprintln!("error checking project: {}", e);
exit(1);
}
}
Some(file) => {
if let Err(e) = check_file_and_report(file).await {
eprintln!("failed to check file: {}", e);
exit(1);
}
} }
} }
}); Some(file) => {
if let Err(e) = check_file_and_report(file).await {
eprintln!("failed to check file: {}", e);
exit(1);
}
}
}
Ok(()) Ok(())
} }
@ -50,11 +54,9 @@ async fn check_file_and_report(path: PathBuf) -> Result<()> {
/// Runs using Tokio for multithreading, so it should be really really fast /// Runs using Tokio for multithreading, so it should be really really fast
/// ///
/// Doesn't do mod-descending, so it will still try to check unreachable files. TODO. /// Doesn't do mod-descending, so it will still try to check unreachable files. TODO.
async fn check_project_and_report() -> Result<()> { async fn check_project_and_report(dioxus_crate: DioxusCrate) -> Result<()> {
let crate_config = dioxus_cli_config::CrateConfig::new(None)?; let mut files_to_check = vec![dioxus_crate.main_source_file()];
collect_rs_files(&dioxus_crate.crate_dir(), &mut files_to_check);
let mut files_to_check = vec![];
collect_rs_files(&crate_config.crate_dir, &mut files_to_check);
check_files_and_report(files_to_check).await check_files_and_report(files_to_check).await
} }

View file

@ -1,3 +1,7 @@
use crate::DioxusCrate;
use anyhow::Context;
use build::TargetArgs;
use super::*; use super::*;
/// Clean build artifacts. /// Clean build artifacts.
@ -6,8 +10,9 @@ use super::*;
pub struct Clean {} pub struct Clean {}
impl Clean { impl Clean {
pub fn clean(self, bin: Option<PathBuf>) -> Result<()> { pub fn clean(self) -> anyhow::Result<()> {
let crate_config = dioxus_cli_config::CrateConfig::new(bin)?; let dioxus_crate =
DioxusCrate::new(&TargetArgs::default()).context("Failed to load Dioxus workspace")?;
let output = Command::new("cargo") let output = Command::new("cargo")
.arg("clean") .arg("clean")
@ -16,15 +21,15 @@ impl Clean {
.output()?; .output()?;
if !output.status.success() { if !output.status.success() {
return custom_error!("Cargo clean failed."); return Err(anyhow::anyhow!("Cargo clean failed."));
} }
let out_dir = &crate_config.out_dir(); let out_dir = &dioxus_crate.out_dir();
if out_dir.is_dir() { if out_dir.is_dir() {
remove_dir_all(out_dir)?; remove_dir_all(out_dir)?;
} }
let fullstack_out_dir = crate_config.fullstack_out_dir(); let fullstack_out_dir = dioxus_crate.fullstack_out_dir();
if fullstack_out_dir.is_dir() { if fullstack_out_dir.is_dir() {
remove_dir_all(fullstack_out_dir)?; remove_dir_all(fullstack_out_dir)?;

View file

@ -1,4 +1,5 @@
use dioxus_cli_config::crate_root; use crate::build::TargetArgs;
use crate::{metadata::crate_root, CliSettings};
use super::*; use super::*;
@ -70,7 +71,7 @@ impl Config {
return Ok(()); return Ok(());
} }
let mut file = File::create(conf_path)?; let mut file = File::create(conf_path)?;
let content = String::from(include_str!("../assets/dioxus.toml")) let content = String::from(include_str!("../../assets/dioxus.toml"))
.replace("{{project-name}}", &name) .replace("{{project-name}}", &name)
.replace("{{default-platform}}", &platform); .replace("{{default-platform}}", &platform);
file.write_all(content.as_bytes())?; file.write_all(content.as_bytes())?;
@ -79,29 +80,21 @@ impl Config {
Config::FormatPrint {} => { Config::FormatPrint {} => {
println!( println!(
"{:#?}", "{:#?}",
dioxus_cli_config::CrateConfig::new(None)?.dioxus_config crate::dioxus_crate::DioxusCrate::new(&TargetArgs::default())?.dioxus_config
); );
} }
Config::CustomHtml {} => { Config::CustomHtml {} => {
let html_path = crate_root.join("index.html"); let html_path = crate_root.join("index.html");
let mut file = File::create(html_path)?; let mut file = File::create(html_path)?;
let content = include_str!("../assets/index.html"); let content = include_str!("../../assets/index.html");
file.write_all(content.as_bytes())?; file.write_all(content.as_bytes())?;
tracing::info!("🚩 Create custom html file done."); tracing::info!("🚩 Create custom html file done.");
} }
Config::SetGlobal { setting, value } => { Config::SetGlobal { setting, value } => {
let mut global_settings = CliSettings::modify_settings(|settings| match setting {
dioxus_cli_config::CliSettings::from_global().unwrap_or_default(); Setting::AlwaysHotReload => settings.always_hot_reload = Some(value.into()),
Setting::AlwaysOpenBrowser => settings.always_open_browser = Some(value.into()),
match setting { })?;
Setting::AlwaysHotReload => {
global_settings.always_hot_reload = Some(value.into())
}
Setting::AlwaysOpenBrowser => {
global_settings.always_open_browser = Some(value.into())
}
}
global_settings.save().unwrap();
} }
} }
Ok(()) Ok(())

View file

@ -63,7 +63,7 @@ pub fn post_create(path: &PathBuf) -> Result<()> {
let toml_paths = [path.join("Cargo.toml"), path.join("Dioxus.toml")]; let toml_paths = [path.join("Cargo.toml"), path.join("Dioxus.toml")];
for toml_path in &toml_paths { for toml_path in &toml_paths {
let toml = std::fs::read_to_string(toml_path)?; let toml = std::fs::read_to_string(toml_path)?;
let mut toml = toml.parse::<toml_edit::Document>().map_err(|e| { let mut toml = toml.parse::<toml_edit::DocumentMut>().map_err(|e| {
anyhow::anyhow!( anyhow::anyhow!(
"failed to parse toml at {}: {}", "failed to parse toml at {}: {}",
toml_path.display(), toml_path.display(),

View file

@ -1,25 +1,17 @@
pub mod autoformat; pub mod autoformat;
pub mod build; pub mod build;
pub mod bundle; pub mod bundle;
pub mod cfg;
pub mod check; pub mod check;
pub mod clean; pub mod clean;
pub mod config; pub mod config;
pub mod create; pub mod create;
pub mod init; pub mod init;
pub mod link; pub mod link;
pub mod plugin;
pub mod serve; pub mod serve;
pub mod translate; pub mod translate;
use crate::{ use crate::{custom_error, error::Result, Error};
cfg::{ConfigOptsBuild, ConfigOptsServe},
custom_error,
error::Result,
gen_page, server, Error,
};
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use dioxus_cli_config::CrateConfig;
use html_parser::Dom; use html_parser::Dom;
use serde::Deserialize; use serde::Deserialize;
use std::{ use std::{
@ -82,11 +74,6 @@ pub enum Commands {
#[clap(subcommand)] #[clap(subcommand)]
Config(config::Config), Config(config::Config),
/// Manage plugins for dioxus cli
#[cfg(feature = "plugin")]
#[clap(subcommand)]
Plugin(plugin::Plugin),
/// Handles parsing of linker arguments for linker-based systems /// Handles parsing of linker arguments for linker-based systems
/// such as Manganis and binary patching. /// such as Manganis and binary patching.
Link(link::LinkCommand), Link(link::LinkCommand),
@ -106,9 +93,6 @@ impl Display for Commands {
Commands::Check(_) => write!(f, "check"), Commands::Check(_) => write!(f, "check"),
Commands::Bundle(_) => write!(f, "bundle"), Commands::Bundle(_) => write!(f, "bundle"),
Commands::Link(_) => write!(f, "link"), Commands::Link(_) => write!(f, "link"),
#[cfg(feature = "plugin")]
Commands::Plugin(_) => write!(f, "plugin"),
} }
} }
} }

View file

@ -1,39 +0,0 @@
#![cfg(feature = "plugin")]
use super::*;
/// Manage plugins for dioxus cli
#[derive(Clone, Debug, Deserialize, Subcommand)]
#[clap(name = "plugin")]
pub enum Plugin {
/// Return all dioxus-cli support tools.
List {},
/// Get default app install path.
AppPath {},
/// Install a new tool.
Add { name: String },
}
impl Plugin {
pub fn plugin(self) -> Result<()> {
match self {
Plugin::List {} => {
for item in crate::plugin::PluginManager::plugin_list() {
println!("- {item}");
}
}
Plugin::AppPath {} => {
let plugin_dir = crate::plugin::PluginManager::init_plugin_dir();
if let Some(v) = plugin_dir.to_str() {
println!("{}", v);
} else {
tracing::error!("Plugin path get failed.");
}
}
Plugin::Add { name: _ } => {
tracing::info!("You can use `dx plugin app-path` to get Installation position");
}
}
Ok(())
}
}

View file

@ -1,110 +1,91 @@
use dioxus_cli_config::Platform; use crate::{
use manganis_cli_support::AssetManifest; settings::{self},
DioxusCrate,
};
use anyhow::Context;
use build::Build;
use dioxus_cli_config::AddressArguments;
use std::ops::Deref;
use super::*; use super::*;
use cargo_toml::Dependency::{Detailed, Inherited, Simple};
use std::fs::create_dir_all; /// Arguments for the serve command
#[derive(Clone, Debug, Parser, Default)]
pub struct ServeArguments {
/// The arguments for the address the server will run on
#[clap(flatten)]
pub address: AddressArguments,
/// Open the app in the default browser [default: false - unless project or global settings are set]
#[arg(long, default_missing_value="true", num_args=0..=1)]
pub open: Option<bool>,
/// Enable full hot reloading for the app [default: true - unless project or global settings are set]
#[clap(long, group = "release-incompatible")]
pub hot_reload: Option<bool>,
/// Set cross-origin-policy to same-origin [default: false]
#[clap(name = "cross-origin-policy")]
#[clap(long)]
pub cross_origin_policy: bool,
/// Additional arguments to pass to the executable
#[clap(long)]
pub args: Vec<String>,
}
/// Run the WASM project on dev-server /// Run the WASM project on dev-server
#[derive(Clone, Debug, Parser)] #[derive(Clone, Debug, Default, Parser)]
#[command(group = clap::ArgGroup::new("release-incompatible").multiple(true).conflicts_with("release"))]
#[clap(name = "serve")] #[clap(name = "serve")]
pub struct Serve { pub struct Serve {
/// Arguments for the serve command
#[clap(flatten)] #[clap(flatten)]
pub serve: ConfigOptsServe, pub(crate) server_arguments: ServeArguments,
/// Arguments for the dioxus build
#[clap(flatten)]
pub(crate) build_arguments: Build,
/// Run the server in interactive mode
#[arg(long, default_missing_value="true", num_args=0..=1, short = 'i')]
pub interactive: Option<bool>,
} }
impl Serve { impl Serve {
pub fn serve(self, bin: Option<PathBuf>) -> Result<()> { /// Resolve the serve arguments from the arguments or the config
let mut crate_config = dioxus_cli_config::CrateConfig::new(bin)?; fn resolve(&mut self, crate_config: &mut DioxusCrate) -> Result<()> {
let mut serve_cfg = self.serve.clone();
// Handle cli settings
let cli_settings = crate_config.dioxus_config.cli_settings.clone().unwrap();
if serve_cfg.hot_reload.is_none() {
// we're going to override the hot_reload setting in the project's cfg based on settings
//
// let hot_reload = self.serve.hot_reload || crate_config.dioxus_config.application.hot_reload;
let value = cli_settings.always_hot_reload.unwrap_or(true);
serve_cfg.hot_reload = Some(value);
crate_config.with_hot_reload(value);
}
if serve_cfg.open.is_none() {
serve_cfg.open = Some(cli_settings.always_open_browser.unwrap_or(false));
}
// Set config settings // Set config settings
crate_config.with_cross_origin_policy(self.serve.cross_origin_policy); let settings = settings::CliSettings::load();
crate_config.with_release(self.serve.release);
crate_config.with_verbose(self.serve.verbose);
if let Some(example) = self.serve.example { if self.server_arguments.hot_reload.is_none() {
crate_config.as_example(example); self.server_arguments.hot_reload = Some(settings.always_hot_reload.unwrap_or(true));
}
if self.server_arguments.open.is_none() {
self.server_arguments.open = Some(settings.always_open_browser.unwrap_or_default());
} }
if let Some(profile) = self.serve.profile { // Resolve the build arguments
crate_config.set_profile(profile); self.build_arguments.resolve(crate_config)?;
}
if let Some(features) = self.serve.features {
crate_config.set_features(features);
}
if let Some(target) = self.serve.target {
crate_config.set_target(target);
}
crate_config.set_cargo_args(self.serve.cargo_args);
let mut platform = self.serve.platform;
if platform.is_none() {
if let Some(dependency) = &crate_config.manifest.dependencies.get("dioxus") {
let features = match dependency {
Inherited(detail) => detail.features.to_vec(),
Detailed(detail) => detail.features.to_vec(),
Simple(_) => vec![],
};
platform = features
.iter()
.find_map(|platform| serde_json::from_str(&format!(r#""{}""#, platform)).ok());
}
}
let platform = platform.unwrap_or(crate_config.dioxus_config.application.default_platform);
crate_config.extend_with_platform(platform);
// start the develop server
use server::{desktop, fullstack, web};
match platform {
Platform::Web => web::startup(crate_config.clone(), &serve_cfg)?,
Platform::Desktop => desktop::startup(crate_config.clone(), &serve_cfg)?,
Platform::Fullstack | Platform::StaticGeneration => {
fullstack::startup(crate_config.clone(), &serve_cfg)?
}
_ => unreachable!(),
}
Ok(()) Ok(())
} }
pub fn regen_dev_page( pub async fn serve(mut self) -> anyhow::Result<()> {
crate_config: &CrateConfig, let mut dioxus_crate = DioxusCrate::new(&self.build_arguments.target_args)
manifest: Option<&AssetManifest>, .context("Failed to load Dioxus workspace")?;
) -> anyhow::Result<()> {
let serve_html = gen_page(crate_config, manifest, true);
let dist_path = crate_config.out_dir(); self.resolve(&mut dioxus_crate)?;
if !dist_path.is_dir() {
create_dir_all(&dist_path)?;
}
let index_path = dist_path.join("index.html");
let mut file = std::fs::File::create(index_path)?;
file.write_all(serve_html.as_bytes())?;
crate::serve::serve_all(self, dioxus_crate).await?;
Ok(()) Ok(())
} }
} }
impl Deref for Serve {
type Target = Build;
fn deref(&self) -> &Self::Target {
&self.build_arguments
}
}

View file

@ -1,6 +1,6 @@
use std::process::exit; use std::process::exit;
use dioxus_rsx::{BodyNode, CallBody}; use dioxus_rsx::{BodyNode, CallBody, TemplateBody};
use super::*; use super::*;
@ -59,7 +59,7 @@ pub fn convert_html_to_formatted_rsx(dom: &Dom, component: bool) -> String {
fn write_callbody_with_icon_section(mut callbody: CallBody) -> String { fn write_callbody_with_icon_section(mut callbody: CallBody) -> String {
let mut svgs = vec![]; let mut svgs = vec![];
rsx_rosetta::collect_svgs(&mut callbody.roots, &mut svgs); rsx_rosetta::collect_svgs(&mut callbody.body.roots, &mut svgs);
let mut out = write_component_body(dioxus_autofmt::write_block_out(&callbody).unwrap()); let mut out = write_component_body(dioxus_autofmt::write_block_out(&callbody).unwrap());
@ -81,7 +81,8 @@ fn write_svg_section(out: &mut String, svgs: Vec<BodyNode>) {
out.push_str("\n\nmod icons {"); out.push_str("\n\nmod icons {");
out.push_str("\n use super::*;"); out.push_str("\n use super::*;");
for (idx, icon) in svgs.into_iter().enumerate() { for (idx, icon) in svgs.into_iter().enumerate() {
let raw = dioxus_autofmt::write_block_out(&CallBody { roots: vec![icon] }).unwrap(); let raw =
dioxus_autofmt::write_block_out(&CallBody::new(TemplateBody::new(vec![icon]))).unwrap();
out.push_str("\n\n pub fn icon_"); out.push_str("\n\n pub fn icon_");
out.push_str(&idx.to_string()); out.push_str(&idx.to_string());
out.push_str("() -> Element {\n rsx! {"); out.push_str("() -> Element {\n rsx! {");

View file

@ -0,0 +1,370 @@
use crate::build::TargetArgs;
use dioxus_cli_config::{DioxusConfig, Platform};
use krates::cm::Target;
use krates::{cm::TargetKind, Cmd, Krates, NodeId};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use std::{
fmt::{Display, Formatter},
path::PathBuf,
};
use crate::metadata::CargoError;
/// Load the dioxus config from a path
fn load_dioxus_config(
krates: &Krates,
package: NodeId,
) -> Result<Option<DioxusConfig>, CrateConfigError> {
fn acquire_dioxus_toml(dir: &std::path::Path) -> Option<PathBuf> {
["Dioxus.toml", "dioxus.toml"]
.into_iter()
.map(|file| dir.join(file))
.find(|path| path.is_file())
}
// Walk up from the cargo.toml to the root of the workspace looking for Dioxus.toml
let mut current_dir = krates[package]
.manifest_path
.parent()
.unwrap()
.as_std_path()
.to_path_buf()
.canonicalize()?;
let workspace_path = krates
.workspace_root()
.as_std_path()
.to_path_buf()
.canonicalize()?;
let mut dioxus_conf_file = None;
while current_dir.starts_with(&workspace_path) {
// Try to find Dioxus.toml in the current directory
if let Some(new_config) = acquire_dioxus_toml(&current_dir) {
dioxus_conf_file = Some(new_config.as_path().to_path_buf());
break;
}
// If we can't find it, go up a directory
current_dir = current_dir
.parent()
.ok_or(CrateConfigError::CurrentPackageNotFound)?
.to_path_buf();
}
let Some(dioxus_conf_file) = dioxus_conf_file else {
return Ok(None);
};
let cfg = toml::from_str::<DioxusConfig>(&std::fs::read_to_string(&dioxus_conf_file)?)
.map_err(|err| {
CrateConfigError::LoadDioxusConfig(LoadDioxusConfigError {
location: dioxus_conf_file.display().to_string(),
error: err.to_string(),
})
})
.map(Some);
match cfg {
Ok(Some(mut cfg)) => {
let name = cfg.application.name.clone();
if cfg.bundle.identifier.is_none() {
cfg.bundle.identifier = Some(format!("io.github.{name}"));
}
if cfg.bundle.publisher.is_none() {
cfg.bundle.publisher = Some(name);
}
Ok(Some(cfg))
}
cfg => cfg,
}
}
// Find the main package in the workspace
fn find_main_package(package: Option<String>, krates: &Krates) -> Result<NodeId, CrateConfigError> {
let kid = match package {
Some(package) => {
let mut workspace_members = krates.workspace_members();
workspace_members
.find_map(|node| {
if let krates::Node::Krate { id, krate, .. } = node {
if krate.name == package {
return Some(id);
}
}
None
})
.ok_or_else(|| CrateConfigError::PackageNotFound(package.clone()))?
}
None => {
// Otherwise find the package that is the closest parent of the current directory
let current_dir = std::env::current_dir()?;
let current_dir = current_dir.as_path();
// Go through each member and find the path that is a parent of the current directory
let mut closest_parent = None;
for member in krates.workspace_members() {
if let krates::Node::Krate { id, krate, .. } = member {
let member_path = krate.manifest_path.parent().unwrap();
if let Ok(path) = current_dir.strip_prefix(member_path.as_std_path()) {
let len = path.components().count();
match closest_parent {
Some((_, closest_parent_len)) => {
if len < closest_parent_len {
closest_parent = Some((id, len));
}
}
None => {
closest_parent = Some((id, len));
}
}
}
}
}
closest_parent
.map(|(id, _)| id)
.ok_or(CrateConfigError::CurrentPackageNotFound)?
}
};
let package = krates.nid_for_kid(kid).unwrap();
Ok(package)
}
// Contains information about the crate we are currently in and the dioxus config for that crate
#[derive(Clone)]
pub struct DioxusCrate {
pub krates: Arc<Krates>,
pub package: NodeId,
pub dioxus_config: DioxusConfig,
pub target: Target,
}
impl DioxusCrate {
pub fn new(target: &TargetArgs) -> Result<Self, CrateConfigError> {
let mut cmd = Cmd::new();
cmd.features(target.features.clone());
let builder = krates::Builder::new();
let krates = builder.build(cmd, |_| {})?;
let package = find_main_package(target.package.clone(), &krates)?;
let dioxus_config = load_dioxus_config(&krates, package)?.unwrap_or_default();
let package_name = krates[package].name.clone();
let target_kind = if target.example.is_some() {
TargetKind::Example
} else {
TargetKind::Bin
};
let target_name = target
.example
.clone()
.or(target.bin.clone())
.unwrap_or(package_name);
let main_package = &krates[package];
let target = main_package
.targets
.iter()
.find(|target| {
target_name == target.name.as_str() && target.kind.contains(&target_kind)
})
.ok_or(CrateConfigError::TargetNotFound(target_name))?
.clone();
Ok(Self {
krates: Arc::new(krates),
package,
dioxus_config,
target,
})
}
/// Compose an asset directory. Represents the typical "public" directory
/// with publicly available resources (configurable in the `Dioxus.toml`).
pub fn asset_dir(&self) -> PathBuf {
self.workspace_dir()
.join(&self.dioxus_config.application.asset_dir)
}
/// Compose an out directory. Represents the typical "dist" directory that
/// is "distributed" after building an application (configurable in the
/// `Dioxus.toml`).
pub fn out_dir(&self) -> PathBuf {
self.workspace_dir()
.join(&self.dioxus_config.application.out_dir)
}
/// Compose an out directory for the fullstack platform. See `out_dir()`
/// method.
pub fn fullstack_out_dir(&self) -> PathBuf {
self.workspace_dir().join(".dioxus")
}
/// Compose a target directory for the server (fullstack-only?).
pub fn server_target_dir(&self) -> PathBuf {
self.fullstack_out_dir().join("ssr")
}
/// Compose a target directory for the client (fullstack-only?).
pub fn client_target_dir(&self) -> PathBuf {
self.fullstack_out_dir().join("web")
}
/// Get the workspace directory for the crate
pub fn workspace_dir(&self) -> PathBuf {
self.krates.workspace_root().as_std_path().to_path_buf()
}
/// Get the directory of the crate
pub fn crate_dir(&self) -> PathBuf {
self.package()
.manifest_path
.parent()
.unwrap()
.as_std_path()
.to_path_buf()
}
/// Get the main source file of the target
pub fn main_source_file(&self) -> PathBuf {
self.target.src_path.as_std_path().to_path_buf()
}
/// Get the package we are currently in
pub fn package(&self) -> &krates::cm::Package {
&self.krates[self.package]
}
/// Get the name of the package we are compiling
pub fn executable_name(&self) -> &str {
&self.target.name
}
/// Get the type of executable we are compiling
pub fn executable_type(&self) -> krates::cm::TargetKind {
self.target.kind[0].clone()
}
pub fn features_for_platform(&mut self, platform: Platform) -> Vec<String> {
let package = self.package();
// Try to find the feature that activates the dioxus feature for the given platform
let dioxus_feature = platform.feature_name();
let feature = package.features.iter().find_map(|(key, features)| {
// Find a feature that starts with dioxus/ or dioxus?/
for feature in features {
if let Some((_, after_dioxus)) = feature.split_once("dioxus") {
if let Some(dioxus_feature_enabled) =
after_dioxus.trim_start_matches('?').strip_prefix('/')
{
// If that enables the feature we are looking for, return that feature
if dioxus_feature_enabled == dioxus_feature {
return Some(key.clone());
}
}
}
}
None
});
feature.into_iter().collect()
}
/// Check if assets should be pre_compressed. This will only be true in release mode if the user has enabled pre_compress in the web config.
pub fn should_pre_compress_web_assets(&self, release: bool) -> bool {
self.dioxus_config.web.pre_compress && release
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Executable {
pub name: String,
pub ty: ExecutableType,
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum ExecutableType {
Binary,
Lib,
Example,
}
impl ExecutableType {
/// Get the name of the executable if it is a binary or an example.
pub fn executable(&self) -> bool {
matches!(self, Self::Binary | Self::Example)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LoadDioxusConfigError {
location: String,
error: String,
}
impl std::fmt::Display for LoadDioxusConfigError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {}", self.location, self.error)
}
}
impl std::error::Error for LoadDioxusConfigError {}
#[derive(Debug)]
#[non_exhaustive]
pub enum CrateConfigError {
Cargo(CargoError),
Io(std::io::Error),
Toml(toml::de::Error),
LoadDioxusConfig(LoadDioxusConfigError),
TargetNotFound(String),
Krates(krates::Error),
PackageNotFound(String),
CurrentPackageNotFound,
}
impl From<CargoError> for CrateConfigError {
fn from(err: CargoError) -> Self {
Self::Cargo(err)
}
}
impl From<std::io::Error> for CrateConfigError {
fn from(err: std::io::Error) -> Self {
Self::Io(err)
}
}
impl From<toml::de::Error> for CrateConfigError {
fn from(err: toml::de::Error) -> Self {
Self::Toml(err)
}
}
impl From<LoadDioxusConfigError> for CrateConfigError {
fn from(err: LoadDioxusConfigError) -> Self {
Self::LoadDioxusConfig(err)
}
}
impl From<krates::Error> for CrateConfigError {
fn from(err: krates::Error) -> Self {
Self::Krates(err)
}
}
impl Display for CrateConfigError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Cargo(err) => write!(f, "{}", err),
Self::Io(err) => write!(f, "{}", err),
Self::Toml(err) => write!(f, "{}", err),
Self::LoadDioxusConfig(err) => write!(f, "{}", err),
Self::TargetNotFound(target) => {
write!(f, "Failed to find target with name: {}", target)
}
Self::Krates(err) => write!(f, "{}", err),
Self::PackageNotFound(package) => write!(f, "Package not found: {}", package),
Self::CurrentPackageNotFound => write!(f, "Failed to find current package"),
}
}
}
impl std::error::Error for CrateConfigError {}

View file

@ -0,0 +1,2 @@
// The file has been placed there by the build script.
include!(concat!(env!("OUT_DIR"), "/built.rs"));

View file

@ -1,5 +1,7 @@
use thiserror::Error as ThisError; use thiserror::Error as ThisError;
use crate::{metadata::CargoError, CrateConfigError, LoadDioxusConfigError};
pub type Result<T, E = Error> = std::result::Result<T, E>; pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(ThisError, Debug)] #[derive(ThisError, Debug)]
@ -72,20 +74,20 @@ impl From<hyper::Error> for Error {
} }
} }
impl From<dioxus_cli_config::LoadDioxusConfigError> for Error { impl From<LoadDioxusConfigError> for Error {
fn from(e: dioxus_cli_config::LoadDioxusConfigError) -> Self { fn from(e: LoadDioxusConfigError) -> Self {
Self::RuntimeError(e.to_string()) Self::RuntimeError(e.to_string())
} }
} }
impl From<dioxus_cli_config::CargoError> for Error { impl From<CargoError> for Error {
fn from(e: dioxus_cli_config::CargoError) -> Self { fn from(e: CargoError) -> Self {
Self::CargoError(e.to_string()) Self::CargoError(e.to_string())
} }
} }
impl From<dioxus_cli_config::CrateConfigError> for Error { impl From<CrateConfigError> for Error {
fn from(e: dioxus_cli_config::CrateConfigError) -> Self { fn from(e: CrateConfigError) -> Self {
Self::RuntimeError(e.to_string()) Self::RuntimeError(e.to_string())
} }
} }

View file

@ -3,17 +3,22 @@
#![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")] #![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
pub mod assets; pub mod assets;
pub mod builder; pub mod dx_build_info;
pub mod server; pub mod serve;
pub mod tools; pub mod tools;
pub use builder::*;
pub mod cli; pub mod cli;
pub use cli::*; pub use cli::*;
pub mod error; pub mod error;
pub use error::*; pub use error::*;
#[cfg(feature = "plugin")] pub(crate) mod builder;
pub mod plugin;
mod dioxus_crate;
pub use dioxus_crate::*;
mod settings;
pub(crate) use settings::*;
pub(crate) mod metadata;

View file

@ -1,6 +1,5 @@
use dioxus_cli_config::DioxusConfig; use std::env;
use std::{env, path::PathBuf}; use tracing_subscriber::{prelude::*, EnvFilter, Layer};
use tracing_subscriber::EnvFilter;
use anyhow::Context; use anyhow::Context;
use clap::Parser; use clap::Parser;
@ -10,16 +9,11 @@ use Commands::*;
const LOG_ENV: &str = "DIOXUS_LOG"; const LOG_ENV: &str = "DIOXUS_LOG";
fn main() -> anyhow::Result<()> { #[tokio::main]
async fn main() -> anyhow::Result<()> {
let args = Cli::parse(); let args = Cli::parse();
// If {LOG_ENV} is set, default to env, otherwise filter to cli build_tracing();
// and manganis warnings and errors from other crates
let mut filter = EnvFilter::new("error,dx=info,dioxus-cli=info,manganis-cli-support=info");
if env::var(LOG_ENV).is_ok() {
filter = EnvFilter::from_env(LOG_ENV);
}
tracing_subscriber::fmt().with_env_filter(filter).init();
match args.action { match args.action {
Translate(opts) => opts Translate(opts) => opts
@ -32,89 +26,65 @@ fn main() -> anyhow::Result<()> {
Init(opts) => opts Init(opts) => opts
.init() .init()
.context(error_wrapper("Initialising a new project failed")), .context(error_wrapper("Initializing a new project failed")),
Config(opts) => opts Config(opts) => opts
.config() .config()
.context(error_wrapper("Configuring new project failed")), .context(error_wrapper("Configuring new project failed")),
#[cfg(feature = "plugin")]
Plugin(opts) => opts.plugin().context(error_wrapper("Error with plugin")),
Autoformat(opts) => opts Autoformat(opts) => opts
.autoformat() .autoformat()
.context(error_wrapper("Error autoformatting RSX")), .context(error_wrapper("Error autoformatting RSX")),
Check(opts) => opts.check().context(error_wrapper("Error checking RSX")), Check(opts) => opts
.check()
.await
.context(error_wrapper("Error checking RSX")),
Link(opts) => opts Link(opts) => opts
.link() .link()
.context(error_wrapper("Error with linker passthrough")), .context(error_wrapper("Error with linker passthrough")),
action => { Build(mut opts) => opts
let bin = get_bin(args.bin)?; .run()
let _dioxus_config = DioxusConfig::load(Some(bin.clone())) .await
.context("Failed to load Dioxus config because")? .context(error_wrapper("Building project failed")),
.unwrap_or_else(|| {
tracing::info!("You appear to be creating a Dioxus project from scratch; we will use the default config");
DioxusConfig::default()
});
#[cfg(feature = "plugin")] Clean(opts) => opts
use dioxus_cli::plugin::PluginManager; .clean()
.context(error_wrapper("Cleaning project failed")),
#[cfg(feature = "plugin")] Serve(opts) => opts
PluginManager::init(_dioxus_config.plugin) .serve()
.context(error_wrapper("Plugin system initialization failed"))?; .await
.context(error_wrapper("Serving project failed")),
match action { Bundle(opts) => opts
Build(opts) => opts .bundle()
.build(Some(bin.clone()), None, None) .await
.context(error_wrapper("Building project failed")), .context(error_wrapper("Bundling project failed")),
Clean(opts) => opts
.clean(Some(bin.clone()))
.context(error_wrapper("Cleaning project failed")),
Serve(opts) => opts
.serve(Some(bin.clone()))
.context(error_wrapper("Serving project failed")),
Bundle(opts) => opts
.bundle(Some(bin.clone()))
.context(error_wrapper("Bundling project failed")),
_ => unreachable!(),
}
}
} }
} }
fn get_bin(bin: Option<String>) -> Result<PathBuf> {
let metadata = cargo_metadata::MetadataCommand::new()
.exec()
.map_err(Error::CargoMetadata)?;
let package = if let Some(bin) = bin {
metadata
.workspace_packages()
.into_iter()
.find(|p| p.name == bin)
.ok_or(Error::CargoError(format!("no such package: {}", bin)))?
} else {
metadata
.root_package()
.ok_or(Error::CargoError("no root package?".to_string()))?
};
let crate_dir = package
.manifest_path
.parent()
.ok_or(Error::CargoError("couldn't take parent dir".to_string()))?;
Ok(crate_dir.into())
}
/// Simplifies error messages that use the same pattern. /// Simplifies error messages that use the same pattern.
fn error_wrapper(message: &str) -> String { fn error_wrapper(message: &str) -> String {
format!("🚫 {message}:") format!("🚫 {message}:")
} }
fn build_tracing() {
// If {LOG_ENV} is set, default to env, otherwise filter to cli
// and manganis warnings and errors from other crates
let mut filter = EnvFilter::new("error,dx=info,dioxus-cli=info,manganis-cli-support=info");
if env::var(LOG_ENV).is_ok() {
filter = EnvFilter::from_env(LOG_ENV);
}
let sub =
tracing_subscriber::registry().with(tracing_subscriber::fmt::layer().with_filter(filter));
#[cfg(feature = "tokio-console")]
sub.with(console_subscriber::spawn()).init();
#[cfg(not(feature = "tokio-console"))]
sub.init();
}

View file

@ -0,0 +1,64 @@
//! Utilities for working with cargo and rust files
use std::error::Error;
use std::{
env,
fmt::{Display, Formatter},
fs,
path::{Path, PathBuf},
};
#[derive(Debug, Clone)]
pub struct CargoError {
msg: String,
}
impl CargoError {
pub fn new(msg: String) -> Self {
Self { msg }
}
}
impl Display for CargoError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "CargoError: {}", self.msg)
}
}
impl Error for CargoError {}
/// How many parent folders are searched for a `Cargo.toml`
const MAX_ANCESTORS: u32 = 10;
/// Returns the root of the crate that the command is run from
///
/// If the command is run from the workspace root, this will return the top-level Cargo.toml
pub(crate) fn crate_root() -> Result<PathBuf, CargoError> {
// From the current directory we work our way up, looking for `Cargo.toml`
env::current_dir()
.ok()
.and_then(|mut wd| {
for _ in 0..MAX_ANCESTORS {
if contains_manifest(&wd) {
return Some(wd);
}
if !wd.pop() {
break;
}
}
None
})
.ok_or_else(|| {
CargoError::new("Failed to find directory containing Cargo.toml".to_string())
})
}
/// Checks if the directory contains `Cargo.toml`
fn contains_manifest(path: &Path) -> bool {
fs::read_dir(path)
.map(|entries| {
entries
.filter_map(Result::ok)
.any(|ent| &ent.file_name() == "Cargo.toml")
})
.unwrap_or(false)
}

View file

@ -1,65 +0,0 @@
use std::process::{Command, Stdio};
use mlua::{FromLua, UserData};
#[derive(Debug, Clone, Copy)]
enum StdioFromString {
Inherit,
Piped,
Null,
}
impl<'lua> FromLua<'lua> for StdioFromString {
fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua mlua::Lua) -> mlua::Result<Self> {
if let mlua::Value::String(v) = lua_value {
let v = v.to_str().unwrap();
return Ok(match v.to_lowercase().as_str() {
"inherit" => Self::Inherit,
"piped" => Self::Piped,
"null" => Self::Null,
_ => Self::Inherit,
});
}
Ok(Self::Inherit)
}
}
impl StdioFromString {
pub fn to_stdio(self) -> Stdio {
match self {
StdioFromString::Inherit => Stdio::inherit(),
StdioFromString::Piped => Stdio::piped(),
StdioFromString::Null => Stdio::null(),
}
}
}
pub struct PluginCommander;
impl UserData for PluginCommander {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function(
"exec",
|_, args: (Vec<String>, StdioFromString, StdioFromString)| {
let cmd = args.0;
let stdout = args.1;
let stderr = args.2;
if cmd.is_empty() {
return Ok(());
}
let cmd_name = cmd.first().unwrap();
let mut command = Command::new(cmd_name);
let t = cmd
.iter()
.enumerate()
.filter(|(i, _)| *i > 0)
.map(|v| v.1.clone())
.collect::<Vec<String>>();
command.args(t);
command.stdout(stdout.to_stdio()).stderr(stderr.to_stdio());
command.output()?;
Ok(())
},
);
}
fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(_fields: &mut F) {}
}

View file

@ -1,13 +0,0 @@
use mlua::UserData;
use crate::tools::app_path;
pub struct PluginDirs;
impl UserData for PluginDirs {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function("plugins_dir", |_, ()| {
let path = app_path().join("plugins");
Ok(path.to_str().unwrap().to_string())
});
}
}

View file

@ -1,85 +0,0 @@
use std::{
fs::{create_dir, create_dir_all, remove_dir_all, File},
io::{Read, Write},
path::PathBuf,
};
use crate::tools::extract_zip;
use flate2::read::GzDecoder;
use mlua::UserData;
use tar::Archive;
pub struct PluginFileSystem;
impl UserData for PluginFileSystem {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function("create_dir", |_, args: (String, bool)| {
let path = args.0;
let recursive = args.1;
let path = PathBuf::from(path);
if !path.exists() {
let v = if recursive {
create_dir_all(path)
} else {
create_dir(path)
};
return Ok(v.is_ok());
}
Ok(true)
});
methods.add_function("remove_dir", |_, path: String| {
let path = PathBuf::from(path);
let r = remove_dir_all(path);
Ok(r.is_ok())
});
methods.add_function("file_get_content", |_, path: String| {
let path = PathBuf::from(path);
let mut file = std::fs::File::open(path)?;
let mut buffer = String::new();
file.read_to_string(&mut buffer)?;
Ok(buffer)
});
methods.add_function("file_set_content", |_, args: (String, String)| {
let path = args.0;
let content = args.1;
let path = PathBuf::from(path);
let file = std::fs::File::create(path);
if file.is_err() {
return Ok(false);
}
if file.unwrap().write_all(content.as_bytes()).is_err() {
return Ok(false);
}
Ok(true)
});
methods.add_function("unzip_file", |_, args: (String, String)| {
let file = PathBuf::from(args.0);
let target = PathBuf::from(args.1);
let res = extract_zip(&file, &target);
if res.is_err() {
return Ok(false);
}
Ok(true)
});
methods.add_function("untar_gz_file", |_, args: (String, String)| {
let file = PathBuf::from(args.0);
let target = PathBuf::from(args.1);
let tar_gz = if let Ok(v) = File::open(file) {
v
} else {
return Ok(false);
};
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
if archive.unpack(&target).is_err() {
return Ok(false);
}
Ok(true)
});
}
}

View file

@ -1,27 +0,0 @@
use mlua::UserData;
pub struct PluginLogger;
impl UserData for PluginLogger {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function("trace", |_, info: String| {
tracing::trace!("{}", info);
Ok(())
});
methods.add_function("info", |_, info: String| {
tracing::info!("{}", info);
Ok(())
});
methods.add_function("debug", |_, info: String| {
tracing::debug!("{}", info);
Ok(())
});
methods.add_function("warn", |_, info: String| {
tracing::warn!("{}", info);
Ok(())
});
methods.add_function("error", |_, info: String| {
tracing::error!("{}", info);
Ok(())
});
}
}

View file

@ -1,233 +0,0 @@
use mlua::{FromLua, Function, ToLua};
pub mod command;
pub mod dirs;
pub mod fs;
pub mod log;
pub mod network;
pub mod os;
pub mod path;
#[derive(Debug, Clone)]
pub struct PluginInfo<'lua> {
pub name: String,
pub repository: String,
pub author: String,
pub version: String,
pub inner: PluginInner,
pub on_init: Option<Function<'lua>>,
pub build: PluginBuildInfo<'lua>,
pub serve: PluginServeInfo<'lua>,
}
impl<'lua> FromLua<'lua> for PluginInfo<'lua> {
fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua mlua::Lua) -> mlua::Result<Self> {
let mut res = Self {
name: String::default(),
repository: String::default(),
author: String::default(),
version: String::from("0.1.0"),
inner: Default::default(),
on_init: None,
build: Default::default(),
serve: Default::default(),
};
if let mlua::Value::Table(tab) = lua_value {
if let Ok(v) = tab.get::<_, String>("name") {
res.name = v;
}
if let Ok(v) = tab.get::<_, String>("repository") {
res.repository = v;
}
if let Ok(v) = tab.get::<_, String>("author") {
res.author = v;
}
if let Ok(v) = tab.get::<_, String>("version") {
res.version = v;
}
if let Ok(v) = tab.get::<_, PluginInner>("inner") {
res.inner = v;
}
if let Ok(v) = tab.get::<_, Function>("on_init") {
res.on_init = Some(v);
}
if let Ok(v) = tab.get::<_, PluginBuildInfo>("build") {
res.build = v;
}
if let Ok(v) = tab.get::<_, PluginServeInfo>("serve") {
res.serve = v;
}
}
Ok(res)
}
}
impl<'lua> ToLua<'lua> for PluginInfo<'lua> {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
let res = lua.create_table()?;
res.set("name", self.name.to_string())?;
res.set("repository", self.repository.to_string())?;
res.set("author", self.author.to_string())?;
res.set("version", self.version.to_string())?;
res.set("inner", self.inner)?;
if let Some(e) = self.on_init {
res.set("on_init", e)?;
}
res.set("build", self.build)?;
res.set("serve", self.serve)?;
Ok(mlua::Value::Table(res))
}
}
#[derive(Debug, Clone, Default)]
pub struct PluginInner {
pub plugin_dir: String,
pub from_loader: bool,
}
impl<'lua> FromLua<'lua> for PluginInner {
fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua mlua::Lua) -> mlua::Result<Self> {
let mut res = Self {
plugin_dir: String::new(),
from_loader: false,
};
if let mlua::Value::Table(t) = lua_value {
if let Ok(v) = t.get::<_, String>("plugin_dir") {
res.plugin_dir = v;
}
if let Ok(v) = t.get::<_, bool>("from_loader") {
res.from_loader = v;
}
}
Ok(res)
}
}
impl<'lua> ToLua<'lua> for PluginInner {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
let res = lua.create_table()?;
res.set("plugin_dir", self.plugin_dir)?;
res.set("from_loader", self.from_loader)?;
Ok(mlua::Value::Table(res))
}
}
#[derive(Debug, Clone, Default)]
pub struct PluginBuildInfo<'lua> {
pub on_start: Option<Function<'lua>>,
pub on_finish: Option<Function<'lua>>,
}
impl<'lua> FromLua<'lua> for PluginBuildInfo<'lua> {
fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua mlua::Lua) -> mlua::Result<Self> {
let mut res = Self {
on_start: None,
on_finish: None,
};
if let mlua::Value::Table(t) = lua_value {
if let Ok(v) = t.get::<_, Function>("on_start") {
res.on_start = Some(v);
}
if let Ok(v) = t.get::<_, Function>("on_finish") {
res.on_finish = Some(v);
}
}
Ok(res)
}
}
impl<'lua> ToLua<'lua> for PluginBuildInfo<'lua> {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
let res = lua.create_table()?;
if let Some(v) = self.on_start {
res.set("on_start", v)?;
}
if let Some(v) = self.on_finish {
res.set("on_finish", v)?;
}
Ok(mlua::Value::Table(res))
}
}
#[derive(Debug, Clone, Default)]
pub struct PluginServeInfo<'lua> {
pub interval: i32,
pub on_start: Option<Function<'lua>>,
pub on_interval: Option<Function<'lua>>,
pub on_rebuild: Option<Function<'lua>>,
pub on_shutdown: Option<Function<'lua>>,
}
impl<'lua> FromLua<'lua> for PluginServeInfo<'lua> {
fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua mlua::Lua) -> mlua::Result<Self> {
let mut res = Self::default();
if let mlua::Value::Table(tab) = lua_value {
if let Ok(v) = tab.get::<_, i32>("interval") {
res.interval = v;
}
if let Ok(v) = tab.get::<_, Function>("on_start") {
res.on_start = Some(v);
}
if let Ok(v) = tab.get::<_, Function>("on_interval") {
res.on_interval = Some(v);
}
if let Ok(v) = tab.get::<_, Function>("on_rebuild") {
res.on_rebuild = Some(v);
}
if let Ok(v) = tab.get::<_, Function>("on_shutdown") {
res.on_shutdown = Some(v);
}
}
Ok(res)
}
}
impl<'lua> ToLua<'lua> for PluginServeInfo<'lua> {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
let res = lua.create_table()?;
res.set("interval", self.interval)?;
if let Some(v) = self.on_start {
res.set("on_start", v)?;
}
if let Some(v) = self.on_interval {
res.set("on_interval", v)?;
}
if let Some(v) = self.on_rebuild {
res.set("on_rebuild", v)?;
}
if let Some(v) = self.on_shutdown {
res.set("on_shutdown", v)?;
}
Ok(mlua::Value::Table(res))
}
}

View file

@ -1,27 +0,0 @@
use std::{io::Cursor, path::PathBuf};
use mlua::UserData;
pub struct PluginNetwork;
impl UserData for PluginNetwork {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function("download_file", |_, args: (String, String)| {
let url = args.0;
let path = args.1;
let resp = reqwest::blocking::get(url);
if let Ok(resp) = resp {
let mut content = Cursor::new(resp.bytes().unwrap());
let file = std::fs::File::create(PathBuf::from(path));
if file.is_err() {
return Ok(false);
}
let mut file = file.unwrap();
let res = std::io::copy(&mut content, &mut file);
return Ok(res.is_ok());
}
Ok(false)
});
}
}

View file

@ -1,18 +0,0 @@
use mlua::UserData;
pub struct PluginOS;
impl UserData for PluginOS {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_function("current_platform", |_, ()| {
if cfg!(target_os = "windows") {
Ok("windows")
} else if cfg!(target_os = "macos") {
Ok("macos")
} else if cfg!(target_os = "linux") {
Ok("linux")
} else {
panic!("unsupported platformm");
}
});
}
}

View file

@ -1,41 +0,0 @@
use std::path::PathBuf;
use mlua::{UserData, Variadic};
pub struct PluginPath;
impl UserData for PluginPath {
fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {
// join function
methods.add_function("join", |_, args: Variadic<String>| {
let mut path = PathBuf::new();
for i in args {
path = path.join(i);
}
Ok(path.to_str().unwrap().to_string())
});
// parent function
methods.add_function("parent", |_, path: String| {
let current_path = PathBuf::from(&path);
let parent = current_path.parent();
if let Some(parent) = parent {
Ok(parent.to_str().unwrap().to_string())
} else {
Ok(path)
}
});
methods.add_function("exists", |_, path: String| {
let path = PathBuf::from(path);
Ok(path.exists())
});
methods.add_function("is_dir", |_, path: String| {
let path = PathBuf::from(path);
Ok(path.is_dir())
});
methods.add_function("is_file", |_, path: String| {
let path = PathBuf::from(path);
Ok(path.is_file())
});
}
}

View file

@ -1,328 +0,0 @@
use std::{
io::{Read, Write},
path::PathBuf,
sync::Mutex,
};
use crate::tools::{app_path, clone_repo};
use dioxus_cli_config::CrateConfig;
use mlua::{Lua, Table};
use serde_json::json;
use self::{
interface::{
command::PluginCommander, dirs::PluginDirs, fs::PluginFileSystem, log::PluginLogger,
network::PluginNetwork, os::PluginOS, path::PluginPath, PluginInfo,
},
types::PluginConfig,
};
pub mod interface;
mod types;
lazy_static::lazy_static! {
static ref LUA: Mutex<Lua> = Mutex::new(Lua::new());
}
pub struct PluginManager;
impl PluginManager {
pub fn init(config: toml::Value) -> anyhow::Result<()> {
let config = PluginConfig::from_toml_value(config);
if !config.available {
return Ok(());
}
let lua = LUA.lock().unwrap();
let manager = lua.create_table().unwrap();
let name_index = lua.create_table().unwrap();
let plugin_dir = Self::init_plugin_dir();
let api = lua.create_table().unwrap();
api.set("log", PluginLogger).unwrap();
api.set("command", PluginCommander).unwrap();
api.set("network", PluginNetwork).unwrap();
api.set("dirs", PluginDirs).unwrap();
api.set("fs", PluginFileSystem).unwrap();
api.set("path", PluginPath).unwrap();
api.set("os", PluginOS).unwrap();
lua.globals().set("plugin_lib", api).unwrap();
lua.globals()
.set("library_dir", plugin_dir.to_str().unwrap())
.unwrap();
lua.globals().set("config_info", config.clone())?;
let mut index: u32 = 1;
let dirs = std::fs::read_dir(&plugin_dir)?;
let mut path_list = dirs
.filter(|v| v.is_ok())
.map(|v| (v.unwrap().path(), false))
.collect::<Vec<(PathBuf, bool)>>();
for i in &config.loader {
let path = PathBuf::from(i);
if !path.is_dir() {
// for loader dir, we need check first, because we need give a error log.
tracing::error!("Plugin loader: {:?} path is not a exists directory.", path);
}
path_list.push((path, true));
}
for entry in path_list {
let plugin_dir = entry.0.to_path_buf();
if plugin_dir.is_dir() {
let init_file = plugin_dir.join("init.lua");
if init_file.is_file() {
let mut file = std::fs::File::open(init_file).unwrap();
let mut buffer = String::new();
file.read_to_string(&mut buffer).unwrap();
let current_plugin_dir = plugin_dir.to_str().unwrap().to_string();
let from_loader = entry.1;
lua.globals()
.set("_temp_plugin_dir", current_plugin_dir.clone())?;
lua.globals().set("_temp_from_loader", from_loader)?;
let info = lua.load(&buffer).eval::<PluginInfo>();
match info {
Ok(mut info) => {
if name_index.contains_key(info.name.clone()).unwrap_or(false)
&& !from_loader
{
// found same name plugin, intercept load
tracing::warn!(
"Plugin {} has been intercepted. [mulit-load]",
info.name
);
continue;
}
info.inner.plugin_dir = current_plugin_dir;
info.inner.from_loader = from_loader;
// call `on_init` if file "dcp.json" not exists
let dcp_file = plugin_dir.join("dcp.json");
if !dcp_file.is_file() {
if let Some(func) = info.clone().on_init {
let result = func.call::<_, bool>(());
match result {
Ok(true) => {
// plugin init success, create `dcp.json` file.
let mut file = std::fs::File::create(dcp_file).unwrap();
let value = json!({
"name": info.name,
"author": info.author,
"repository": info.repository,
"version": info.version,
"generate_time": chrono::Local::now().timestamp(),
});
let buffer =
serde_json::to_string_pretty(&value).unwrap();
let buffer = buffer.as_bytes();
file.write_all(buffer).unwrap();
// insert plugin-info into plugin-manager
if let Ok(index) =
name_index.get::<_, u32>(info.name.clone())
{
let _ = manager.set(index, info.clone());
} else {
let _ = manager.set(index, info.clone());
index += 1;
let _ = name_index.set(info.name, index);
}
}
Ok(false) => {
tracing::warn!(
"Plugin init function result is `false`, init failed."
);
}
Err(e) => {
tracing::warn!("Plugin init failed: {e}");
}
}
}
} else if let Ok(index) = name_index.get::<_, u32>(info.name.clone()) {
let _ = manager.set(index, info.clone());
} else {
let _ = manager.set(index, info.clone());
index += 1;
let _ = name_index.set(info.name, index);
}
}
Err(_e) => {
let dir_name = plugin_dir.file_name().unwrap().to_str().unwrap();
tracing::error!("Plugin '{dir_name}' load failed.");
}
}
}
}
}
lua.globals().set("manager", manager).unwrap();
Ok(())
}
pub fn on_build_start(crate_config: &CrateConfig, platform: &str) -> anyhow::Result<()> {
let lua = LUA.lock().unwrap();
if !lua.globals().contains_key("manager")? {
return Ok(());
}
let manager = lua.globals().get::<_, Table>("manager")?;
let args = lua.create_table()?;
args.set("name", crate_config.dioxus_config.application.name.clone())?;
args.set("platform", platform)?;
args.set("out_dir", crate_config.out_dir().to_str().unwrap())?;
args.set("asset_dir", crate_config.asset_dir().to_str().unwrap())?;
for i in 1..(manager.len()? as i32 + 1) {
let info = manager.get::<i32, PluginInfo>(i)?;
if let Some(func) = info.build.on_start {
func.call::<Table, ()>(args.clone())?;
}
}
Ok(())
}
pub fn on_build_finish(crate_config: &CrateConfig, platform: &str) -> anyhow::Result<()> {
let lua = LUA.lock().unwrap();
if !lua.globals().contains_key("manager")? {
return Ok(());
}
let manager = lua.globals().get::<_, Table>("manager")?;
let args = lua.create_table()?;
args.set("name", crate_config.dioxus_config.application.name.clone())?;
args.set("platform", platform)?;
args.set("out_dir", crate_config.out_dir().to_str().unwrap())?;
args.set("asset_dir", crate_config.asset_dir().to_str().unwrap())?;
for i in 1..(manager.len()? as i32 + 1) {
let info = manager.get::<i32, PluginInfo>(i)?;
if let Some(func) = info.build.on_finish {
func.call::<Table, ()>(args.clone())?;
}
}
Ok(())
}
pub fn on_serve_start(crate_config: &CrateConfig) -> anyhow::Result<()> {
let lua = LUA.lock().unwrap();
if !lua.globals().contains_key("manager")? {
return Ok(());
}
let manager = lua.globals().get::<_, Table>("manager")?;
let args = lua.create_table()?;
args.set("name", crate_config.dioxus_config.application.name.clone())?;
for i in 1..(manager.len()? as i32 + 1) {
let info = manager.get::<i32, PluginInfo>(i)?;
if let Some(func) = info.serve.on_start {
func.call::<Table, ()>(args.clone())?;
}
}
Ok(())
}
pub fn on_serve_rebuild(timestamp: i64, files: Vec<PathBuf>) -> anyhow::Result<()> {
let lua = LUA.lock().unwrap();
let manager = lua.globals().get::<_, Table>("manager")?;
let args = lua.create_table()?;
args.set("timestamp", timestamp)?;
let files: Vec<String> = files
.iter()
.map(|v| v.to_str().unwrap().to_string())
.collect();
args.set("changed_files", files)?;
for i in 1..(manager.len()? as i32 + 1) {
let info = manager.get::<i32, PluginInfo>(i)?;
if let Some(func) = info.serve.on_rebuild {
func.call::<Table, ()>(args.clone())?;
}
}
Ok(())
}
pub fn on_serve_shutdown(crate_config: &CrateConfig) -> anyhow::Result<()> {
let lua = LUA.lock().unwrap();
if !lua.globals().contains_key("manager")? {
return Ok(());
}
let manager = lua.globals().get::<_, Table>("manager")?;
let args = lua.create_table()?;
args.set("name", crate_config.dioxus_config.application.name.clone())?;
for i in 1..(manager.len()? as i32 + 1) {
let info = manager.get::<i32, PluginInfo>(i)?;
if let Some(func) = info.serve.on_shutdown {
func.call::<Table, ()>(args.clone())?;
}
}
Ok(())
}
pub fn init_plugin_dir() -> PathBuf {
let app_path = app_path();
let plugin_path = app_path.join("plugins");
if !plugin_path.is_dir() {
tracing::info!("📖 Start to init plugin library ...");
let url = "https://github.com/DioxusLabs/cli-plugin-library";
if let Err(err) = clone_repo(&plugin_path, url) {
tracing::error!("Failed to init plugin dir, error caused by {}. ", err);
}
}
plugin_path
}
pub fn plugin_list() -> Vec<String> {
let mut res = vec![];
if let Ok(lua) = LUA.lock() {
let list = lua
.load(mlua::chunk!(
local list = {}
for key, value in ipairs(manager) do
table.insert(list, {name = value.name, loader = value.inner.from_loader})
end
return list
))
.eval::<Vec<Table>>()
.unwrap_or_default();
for i in list {
let name = i.get::<_, String>("name").unwrap();
let loader = i.get::<_, bool>("loader").unwrap();
let text = if loader {
format!("{name} [:loader]")
} else {
name
};
res.push(text);
}
}
res
}
}

View file

@ -1,138 +0,0 @@
use std::collections::HashMap;
use mlua::ToLua;
#[derive(Debug, Clone)]
pub struct PluginConfig {
pub available: bool,
pub loader: Vec<String>,
pub config_info: HashMap<String, HashMap<String, Value>>,
}
impl<'lua> ToLua<'lua> for PluginConfig {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
let table = lua.create_table()?;
table.set("available", self.available)?;
table.set("loader", self.loader)?;
let config_info = lua.create_table()?;
for (name, data) in self.config_info {
config_info.set(name, data)?;
}
table.set("config_info", config_info)?;
Ok(mlua::Value::Table(table))
}
}
impl PluginConfig {
pub fn from_toml_value(val: toml::Value) -> Self {
if let toml::Value::Table(tab) = val {
let available = tab
.get::<_>("available")
.unwrap_or(&toml::Value::Boolean(true));
let available = available.as_bool().unwrap_or(true);
let mut loader = vec![];
if let Some(origin) = tab.get("loader") {
if origin.is_array() {
for i in origin.as_array().unwrap() {
loader.push(i.as_str().unwrap_or_default().to_string());
}
}
}
let mut config_info = HashMap::new();
for (name, value) in tab {
if name == "available" || name == "loader" {
continue;
}
if let toml::Value::Table(value) = value {
let mut map = HashMap::new();
for (item, info) in value {
map.insert(item, Value::from_toml(info));
}
config_info.insert(name, map);
}
}
Self {
available,
loader,
config_info,
}
} else {
Self {
available: false,
loader: vec![],
config_info: HashMap::new(),
}
}
}
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub enum Value {
String(String),
Integer(i64),
Float(f64),
Boolean(bool),
Array(Vec<Value>),
Table(HashMap<String, Value>),
}
impl Value {
pub fn from_toml(origin: toml::Value) -> Self {
match origin {
cargo_toml::Value::String(s) => Value::String(s),
cargo_toml::Value::Integer(i) => Value::Integer(i),
cargo_toml::Value::Float(f) => Value::Float(f),
cargo_toml::Value::Boolean(b) => Value::Boolean(b),
cargo_toml::Value::Datetime(d) => Value::String(d.to_string()),
cargo_toml::Value::Array(a) => {
let mut v = vec![];
for i in a {
v.push(Value::from_toml(i));
}
Value::Array(v)
}
cargo_toml::Value::Table(t) => {
let mut h = HashMap::new();
for (n, v) in t {
h.insert(n, Value::from_toml(v));
}
Value::Table(h)
}
}
}
}
impl<'lua> ToLua<'lua> for Value {
fn to_lua(self, lua: &'lua mlua::Lua) -> mlua::Result<mlua::Value<'lua>> {
Ok(match self {
Value::String(s) => mlua::Value::String(lua.create_string(&s)?),
Value::Integer(i) => mlua::Value::Integer(i),
Value::Float(f) => mlua::Value::Number(f),
Value::Boolean(b) => mlua::Value::Boolean(b),
Value::Array(a) => {
let table = lua.create_table()?;
for (i, v) in a.iter().enumerate() {
table.set(i, v.clone())?;
}
mlua::Value::Table(table)
}
Value::Table(t) => {
let table = lua.create_table()?;
for (i, v) in t.iter() {
table.set(i.clone(), v.clone())?;
}
mlua::Value::Table(table)
}
})
}
}

View file

@ -0,0 +1,167 @@
use crate::builder::BuildRequest;
use crate::builder::BuildResult;
use crate::builder::UpdateBuildProgress;
use crate::dioxus_crate::DioxusCrate;
use crate::serve::Serve;
use crate::Result;
use dioxus_cli_config::Platform;
use futures_channel::mpsc::UnboundedReceiver;
use futures_util::future::OptionFuture;
use futures_util::stream::select_all;
use futures_util::StreamExt;
use std::process::Stdio;
use tokio::{
process::{Child, Command},
task::JoinHandle,
};
/// A handle to ongoing builds and then the spawned tasks themselves
pub struct Builder {
/// The results of the build
build_results: Option<JoinHandle<Result<Vec<BuildResult>>>>,
/// The progress of the builds
build_progress: Vec<(Platform, UnboundedReceiver<UpdateBuildProgress>)>,
/// The application we are building
config: DioxusCrate,
/// The arguments for the build
serve: Serve,
/// The children of the build process
pub children: Vec<(Platform, Child)>,
}
impl Builder {
/// Create a new builder
pub fn new(config: &DioxusCrate, serve: &Serve) -> Self {
let serve = serve.clone();
let config = config.clone();
Self {
build_results: None,
build_progress: Vec::new(),
config: config.clone(),
serve,
children: Vec::new(),
}
}
/// Start a new build - killing the current one if it exists
pub fn build(&mut self) {
self.shutdown();
let build_requests =
BuildRequest::create(true, &self.config, self.serve.build_arguments.clone());
let mut set = tokio::task::JoinSet::new();
for build_request in build_requests {
let (mut tx, rx) = futures_channel::mpsc::unbounded();
self.build_progress
.push((build_request.build_arguments.platform(), rx));
set.spawn(async move {
let res = build_request.build(tx.clone()).await;
if let Err(err) = &res {
let _ = tx.start_send(UpdateBuildProgress {
stage: crate::builder::Stage::Finished,
update: crate::builder::UpdateStage::Failed(err.to_string()),
});
}
res
});
}
self.build_results = Some(tokio::spawn(async move {
let mut all_results = Vec::new();
while let Some(result) = set.join_next().await {
let res = result.map_err(|err| {
crate::Error::Unique(format!("Panic while building project: {err:?}"))
})??;
all_results.push(res);
}
Ok(all_results)
}));
}
/// Wait for any new updates to the builder - either it completed or gave us a message etc
pub async fn wait(&mut self) -> Result<BuilderUpdate> {
// Wait for build progress
let mut next = select_all(
self.build_progress
.iter_mut()
.map(|(platform, rx)| rx.map(move |update| (*platform, update))),
);
let results: OptionFuture<_> = self.build_results.as_mut().into();
// Wait for the next build result
tokio::select! {
Some(build_results) = results => {
self.build_results = None;
// If we have a build result, bubble it up to the main loop
let build_results = build_results.map_err(|_| crate::Error::Unique("Build join failed".to_string()))??;
Ok(BuilderUpdate::Ready { results: build_results })
}
Some((platform, update)) = next.next() => {
// If we have a build progress, send it to the screen
Ok(BuilderUpdate::Progress { platform, update })
}
else => {
std::future::pending::<()>().await;
unreachable!("Pending cannot resolve")
},
}
}
/// Shutdown the current build process
pub(crate) fn shutdown(&mut self) {
for (_, mut child) in self.children.drain(..) {
// Gracefully shtudown the desktop app
// It might have a receiver to do some cleanup stuff
if let Some(pid) = child.id() {
// on unix, we can send a signal to the process to shut down
#[cfg(unix)]
{
_ = Command::new("kill")
.args(["-s", "TERM", &pid.to_string()])
.stderr(Stdio::null())
.stdout(Stdio::null())
.spawn();
}
// on windows, use the `taskkill` command
#[cfg(windows)]
{
_ = Command::new("taskkill")
.args(["/F", "/PID", &pid.to_string()])
.stderr(Stdio::null())
.stdout(Stdio::null())
.spawn();
}
}
// Todo: add a timeout here to kill the process if it doesn't shut down within a reasonable time
_ = child.start_kill();
}
if let Some(tasks) = self.build_results.take() {
tasks.abort();
}
self.build_progress.clear();
}
}
pub enum BuilderUpdate {
Progress {
platform: Platform,
update: UpdateBuildProgress,
},
Ready {
results: Vec<BuildResult>,
},
}

View file

@ -1,44 +1,25 @@
use super::{ use dioxus_core::{internal::HotReloadLiteral, Template};
hot_reload_diff::{diff_rsx, DiffResult}, use dioxus_rsx::{
ChangedRsx, hot_reload::{diff_rsx, ChangedRsx},
}; CallBody, HotReloadedTemplate, HotReloadingContext,
use crate::{CallBody, HotReloadingContext};
use dioxus_core::{
prelude::{TemplateAttribute, TemplateNode},
Template,
}; };
use krates::cm::MetadataCommand; use krates::cm::MetadataCommand;
use krates::Cmd; use krates::Cmd;
pub use proc_macro2::TokenStream;
pub use std::collections::HashMap; pub use std::collections::HashMap;
pub use std::sync::Mutex; use std::{ffi::OsStr, path::PathBuf};
pub use std::time::SystemTime;
use std::{collections::HashSet, ffi::OsStr, marker::PhantomData, path::PathBuf};
pub use std::{fs, io, path::Path}; pub use std::{fs, io, path::Path};
pub use std::{fs::File, io::Read}; pub use std::{fs::File, io::Read};
use syn::spanned::Spanned; use syn::spanned::Spanned;
pub enum UpdateResult { pub struct FileMap {
UpdatedRsx(Vec<Template>), pub map: HashMap<PathBuf, CachedSynFile>,
NeedsRebuild,
}
/// The result of building a FileMap
pub struct FileMapBuildResult<Ctx: HotReloadingContext> {
/// The FileMap that was built
pub map: FileMap<Ctx>,
/// Any errors that occurred while building the FileMap that were not fatal /// Any errors that occurred while building the FileMap that were not fatal
pub errors: Vec<io::Error>, pub errors: Vec<io::Error>,
}
pub struct FileMap<Ctx: HotReloadingContext> { pub in_workspace: HashMap<PathBuf, Option<PathBuf>>,
pub map: HashMap<PathBuf, CachedSynFile>,
in_workspace: HashMap<PathBuf, Option<PathBuf>>, pub changed_lits: HashMap<String, HotReloadLiteral>,
phantom: PhantomData<Ctx>,
} }
/// A cached file that has been parsed /// A cached file that has been parsed
@ -46,17 +27,15 @@ pub struct FileMap<Ctx: HotReloadingContext> {
/// We store the templates found in this file /// We store the templates found in this file
pub struct CachedSynFile { pub struct CachedSynFile {
pub raw: String, pub raw: String,
pub path: PathBuf,
pub templates: HashMap<&'static str, Template>, pub templates: HashMap<&'static str, Template>,
pub tracked_assets: HashSet<PathBuf>,
} }
impl<Ctx: HotReloadingContext> FileMap<Ctx> { impl FileMap {
/// Create a new FileMap from a crate directory /// Create a new FileMap from a crate directory
/// ///
/// TODO: this should be created with a gitignore filter /// TODO: this should be created with a gitignore filter
pub fn create(path: PathBuf) -> io::Result<FileMapBuildResult<Ctx>> { pub fn create<Ctx: HotReloadingContext>(path: PathBuf) -> io::Result<FileMap> {
Self::create_with_filter(path, |p| { Self::create_with_filter::<Ctx>(path, |p| {
// skip some stuff we know is large by default // skip some stuff we know is large by default
p.file_name() == Some(OsStr::new("target")) p.file_name() == Some(OsStr::new("target"))
|| p.file_name() == Some(OsStr::new("node_modules")) || p.file_name() == Some(OsStr::new("node_modules"))
@ -64,42 +43,44 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
} }
/// Create a new FileMap from a crate directory /// Create a new FileMap from a crate directory
pub fn create_with_filter( ///
/// Takes a filter that when returns true, the file will be filtered out (ie not tracked)
/// Note that this is inverted from a typical .filter() method.
pub fn create_with_filter<Ctx: HotReloadingContext>(
crate_dir: PathBuf, crate_dir: PathBuf,
mut filter: impl FnMut(&Path) -> bool, mut filter: impl FnMut(&Path) -> bool,
) -> io::Result<FileMapBuildResult<Ctx>> { ) -> io::Result<FileMap> {
let FileMapSearchResult { map, errors } = find_rs_files(crate_dir.clone(), &mut filter); let FileMapSearchResult { map, errors } = find_rs_files(crate_dir.clone(), &mut filter);
let mut map = Self { let mut map = Self {
map, map,
errors,
in_workspace: HashMap::new(), in_workspace: HashMap::new(),
phantom: PhantomData, changed_lits: HashMap::new(),
}; };
map.load_assets(crate_dir.as_path()); map.load_assets::<Ctx>(crate_dir.as_path());
Ok(FileMapBuildResult { errors, map }) Ok(map)
} }
/// Start watching assets for changes /// Start watching assets for changes
/// ///
/// This just diffs every file against itself and populates the tracked assets as it goes /// This just diffs every file against itself and populates the tracked assets as it goes
pub fn load_assets(&mut self, crate_dir: &Path) { pub fn load_assets<Ctx: HotReloadingContext>(&mut self, crate_dir: &Path) {
let keys = self.map.keys().cloned().collect::<Vec<_>>(); let keys = self.map.keys().cloned().collect::<Vec<_>>();
for file in keys { for file in keys {
_ = self.update_rsx(file.as_path(), crate_dir); _ = self.update_rsx::<Ctx>(file.as_path(), crate_dir);
} }
} }
/// Try to update the rsx in a file /// Try to update the rsx in a file
pub fn update_rsx( pub fn update_rsx<Ctx: HotReloadingContext>(
&mut self, &mut self,
file_path: &Path, file_path: &Path,
crate_dir: &Path, crate_dir: &Path,
) -> Result<UpdateResult, HotreloadError> { ) -> Result<Vec<HotReloadedTemplate>, HotreloadError> {
let mut file = File::open(file_path)?; let src = std::fs::read_to_string(file_path)?;
let mut src = String::new();
file.read_to_string(&mut src)?;
// If we can't parse the contents we want to pass it off to the build system to tell the user that there's a syntax error // If we can't parse the contents we want to pass it off to the build system to tell the user that there's a syntax error
let syntax = syn::parse_file(&src).map_err(|_err| HotreloadError::Parse)?; let syntax = syn::parse_file(&src).map_err(|_err| HotreloadError::Parse)?;
@ -109,17 +90,16 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
// Get the cached file if it exists, otherwise try to create it // Get the cached file if it exists, otherwise try to create it
let Some(old_cached) = self.map.get_mut(file_path) else { let Some(old_cached) = self.map.get_mut(file_path) else {
// if this is a new file, rebuild the project // if this is a new file, rebuild the project
let FileMapBuildResult { map, mut errors } = let mut map = FileMap::create::<Ctx>(crate_dir.to_path_buf())?;
FileMap::<Ctx>::create(crate_dir.to_path_buf())?;
if let Some(err) = errors.pop() { if let Some(err) = map.errors.pop() {
return Err(HotreloadError::Failure(err)); return Err(HotreloadError::Failure(err));
} }
// merge the new map into the old map // merge the new map into the old map
self.map.extend(map.map); self.map.extend(map.map);
return Ok(UpdateResult::NeedsRebuild); return Err(HotreloadError::Notreloadable);
}; };
// If the cached file is not a valid rsx file, rebuild the project, forcing errors // If the cached file is not a valid rsx file, rebuild the project, forcing errors
@ -131,26 +111,22 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
// If the changes were just some rsx, we can just update the template // If the changes were just some rsx, we can just update the template
// //
// However... if the changes involved code in the rsx itself, this should actually be a CodeChanged // However... if the changes involved code in the rsx itself, this should actually be a CodeChanged
DiffResult::RsxChanged { Some(rsx_calls) => rsx_calls,
rsx_calls: instances,
} => instances,
// If the changes were some code, we should insert the file into the map and rebuild // If the changes were some code, we should insert the file into the map and rebuild
// todo: not sure we even need to put the cached file into the map, but whatever // todo: not sure we even need to put the cached file into the map, but whatever
DiffResult::CodeChanged(_) => { None => {
let cached_file = CachedSynFile { let cached_file = CachedSynFile {
raw: src.clone(), raw: src.clone(),
path: file_path.to_path_buf(),
templates: HashMap::new(), templates: HashMap::new(),
tracked_assets: HashSet::new(),
}; };
self.map.insert(file_path.to_path_buf(), cached_file); self.map.insert(file_path.to_path_buf(), cached_file);
return Ok(UpdateResult::NeedsRebuild); return Err(HotreloadError::Notreloadable);
} }
}; };
let mut messages: Vec<Template> = Vec::new(); let mut out_templates = vec![];
for calls in instances.into_iter() { for calls in instances.into_iter() {
let ChangedRsx { old, new } = calls; let ChangedRsx { old, new } = calls;
@ -158,7 +134,7 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
let old_start = old.span().start(); let old_start = old.span().start();
let old_parsed = syn::parse2::<CallBody>(old.tokens); let old_parsed = syn::parse2::<CallBody>(old.tokens);
let new_parsed = syn::parse2::<CallBody>(new); let new_parsed = syn::parse2::<CallBody>(new.tokens);
let (Ok(old_call_body), Ok(new_call_body)) = (old_parsed, new_parsed) else { let (Ok(old_call_body), Ok(new_call_body)) = (old_parsed, new_parsed) else {
continue; continue;
}; };
@ -179,105 +155,45 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
// TODO: we could consider arena allocating the templates and dropping them when the connection is closed // TODO: we could consider arena allocating the templates and dropping them when the connection is closed
let leaked_location = Box::leak(template_location(old_start, file).into_boxed_str()); let leaked_location = Box::leak(template_location(old_start, file).into_boxed_str());
// Retuns Some(template) if the template is hotreloadable // Returns a list of templates that are hotreloadable
// dynamic changes are not hot reloadable and force a rebuild let hotreload_result = dioxus_rsx::hotreload::HotReloadedTemplate::new::<Ctx>(
let hotreloadable_template = &old_call_body,
new_call_body.update_template::<Ctx>(Some(old_call_body), leaked_location); &new_call_body,
leaked_location,
self.changed_lits.clone(),
);
// if the template is not hotreloadable, we need to do a full rebuild // if the template is not hotreloadable, we need to do a full rebuild
let Some(template) = hotreloadable_template else { let Some(mut results) = hotreload_result else {
return Ok(UpdateResult::NeedsRebuild); return Err(HotreloadError::Notreloadable);
}; };
// dioxus cannot handle empty templates... // self.changed_lits
// todo: I think it can? or we just skip them nowa // .extend(std::mem::take(&mut results.changed_lits));
if template.roots.is_empty() {
continue;
}
// if the template is the same, don't send it // Be careful to not send the bad templates
if let Some(old_template) = old_cached.templates.get(template.name) { results.templates.retain(|template| {
if old_template == &template { // dioxus cannot handle empty templates...
continue; if template.roots.is_empty() {
} return false;
};
// update the cached file
old_cached.templates.insert(template.name, template);
// Track any new assets
old_cached
.tracked_assets
.extend(Self::populate_assets(template));
messages.push(template);
}
Ok(UpdateResult::UpdatedRsx(messages))
}
fn populate_assets(template: Template) -> HashSet<PathBuf> {
fn collect_assetlike_attrs(node: &TemplateNode, asset_urls: &mut HashSet<PathBuf>) {
if let TemplateNode::Element {
attrs, children, ..
} = node
{
for attr in attrs.iter() {
if let TemplateAttribute::Static { name, value, .. } = attr {
if *name == "src" || *name == "href" {
asset_urls.insert(PathBuf::from(*value));
}
}
} }
for child in children.iter() { // if the template is the same, don't send it
collect_assetlike_attrs(child, asset_urls); if old_cached.templates.get(template.name) == Some(template) {
} return false;
} };
// Update the most recent idea of the template
// This lets us know if the template has changed so we don't need to send it
old_cached.templates.insert(template.name, *template);
true
});
out_templates.push(results);
} }
let mut asset_urls = HashSet::new(); Ok(out_templates)
for node in template.roots {
collect_assetlike_attrs(node, &mut asset_urls);
}
asset_urls
}
/// add the template to an existing file in the filemap if it exists
/// create a new file if it doesn't exist
pub fn insert(&mut self, path: PathBuf, template: Template) {
let tracked_assets = Self::populate_assets(template);
if self.map.contains_key(&path) {
let entry = self.map.get_mut(&path).unwrap();
entry.tracked_assets.extend(tracked_assets);
entry.templates.insert(template.name, template);
} else {
self.map.insert(
path.clone(),
CachedSynFile {
raw: String::new(),
path,
tracked_assets,
templates: HashMap::from([(template.name, template)]),
},
);
}
}
pub fn tracked_assets(&self) -> HashSet<PathBuf> {
self.map
.values()
.flat_map(|file| file.tracked_assets.iter().cloned())
.collect()
}
pub fn is_tracking_asset(&self, path: &PathBuf) -> Option<&CachedSynFile> {
self.map
.values()
.find(|file| file.tracked_assets.contains(path))
} }
fn child_in_workspace(&mut self, crate_dir: &Path) -> io::Result<Option<PathBuf>> { fn child_in_workspace(&mut self, crate_dir: &Path) -> io::Result<Option<PathBuf>> {
@ -314,9 +230,9 @@ pub fn template_location(old_start: proc_macro2::LineColumn, file: &Path) -> Str
path path
+ ":" + ":"
+ &line.to_string() + line.to_string().as_str()
+ ":" + ":"
+ &column.to_string() + column.to_string().as_str()
// the byte index doesn't matter, but dioxus needs it // the byte index doesn't matter, but dioxus needs it
+ ":0" + ":0"
} }
@ -357,9 +273,7 @@ fn find_rs_files(root: PathBuf, filter: &mut impl FnMut(&Path) -> bool) -> FileM
Ok(_) => { Ok(_) => {
let cached_file = CachedSynFile { let cached_file = CachedSynFile {
raw: src.clone(), raw: src.clone(),
path: root.clone(),
templates: HashMap::new(), templates: HashMap::new(),
tracked_assets: HashSet::new(),
}; };
// track assets while we're here // track assets while we're here
@ -379,7 +293,7 @@ fn find_rs_files(root: PathBuf, filter: &mut impl FnMut(&Path) -> bool) -> FileM
pub enum HotreloadError { pub enum HotreloadError {
Failure(io::Error), Failure(io::Error),
Parse, Parse,
NoPreviousBuild, Notreloadable,
} }
impl std::fmt::Display for HotreloadError { impl std::fmt::Display for HotreloadError {
@ -387,7 +301,7 @@ impl std::fmt::Display for HotreloadError {
match self { match self {
Self::Failure(err) => write!(f, "Failed to parse file: {}", err), Self::Failure(err) => write!(f, "Failed to parse file: {}", err),
Self::Parse => write!(f, "Failed to parse file"), Self::Parse => write!(f, "Failed to parse file"),
Self::NoPreviousBuild => write!(f, "No previous build found"), Self::Notreloadable => write!(f, "Template is not hotreloadable"),
} }
} }
} }

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,157 @@
use crate::cli::serve::Serve;
use crate::dioxus_crate::DioxusCrate;
use crate::Result;
use dioxus_cli_config::Platform;
use tokio::task::yield_now;
mod builder;
mod hot_reloading_file_map;
mod logs_tab;
mod output;
mod proxy;
mod server;
mod watcher;
use builder::*;
use output::*;
use server::*;
use watcher::*;
/// For *all* builds the CLI spins up a dedicated webserver, file watcher, and build infrastructure to serve the project.
///
/// This includes web, desktop, mobile, fullstack, etc.
///
/// Platform specifics:
/// - Web: we need to attach a filesystem server to our devtools webserver to serve the project. We
/// want to emulate GithubPages here since most folks are deploying there and expect things like
/// basepath to match.
/// - Fullstack: We spin up the same dev server but in this case the fullstack server itself needs to
/// proxy all dev requests to our dev server
/// - Desktop: We spin up the dev server but without a filesystem server.
/// - Mobile: Basically the same as desktop.
///
/// Notes:
/// - All filesystem changes are tracked here
/// - We send all updates to connected websocket connections. Even desktop connects via the websocket
/// - Right now desktop compiles tokio-tungstenite to do the connection but we could in theory reuse
/// the websocket logic from the webview for thinner builds.
///
/// Todos(Jon):
/// - I'd love to be able to configure the CLI while it's running so we can change settingaon the fly.
/// This would require some light refactoring and potentially pulling in something like ratatui.
/// - Build a custom subscriber for logs by tools within this
/// - Handle logs from the build engine separately?
/// - Consume logs from the wasm for web/fullstack
/// - I want us to be able to detect a `server_fn` in the project and then upgrade from a static server
/// to a dynamic one on the fly.
pub async fn serve_all(serve: Serve, dioxus_crate: DioxusCrate) -> Result<()> {
let mut server = Server::start(&serve, &dioxus_crate);
let mut watcher = Watcher::start(&dioxus_crate);
let mut screen = Output::start(&serve)
.await
.expect("Failed to open terminal logger");
let mut builder = Builder::new(&dioxus_crate, &serve);
// Start the first build
builder.build();
loop {
// Make sure we don't hog the CPU: these loop { select! {} } blocks can starve the executor
yield_now().await;
// Draw the state of the server to the screen
screen.render(&serve, &dioxus_crate, &builder, &server, &watcher);
// And then wait for any updates before redrawing
tokio::select! {
// rebuild the project or hotreload it
_ = watcher.wait() => {
if !watcher.pending_changes() {
continue;
}
let changed_files = watcher.dequeue_changed_files(&dioxus_crate);
// if change is hotreloadable, hotreload it
// and then send that update to all connected clients
if let Some(hr) = watcher.attempt_hot_reload(&dioxus_crate, changed_files) {
// Only send a hotreload message for templates and assets - otherwise we'll just get a full rebuild
if hr.templates.is_empty() && hr.assets.is_empty() {
continue
}
server.send_hotreload(hr).await;
} else {
// If the change is not binary patchable, rebuild the project
// We're going to kick off a new build, interrupting the current build if it's ongoing
builder.build();
// Tell the server to show a loading page for any new requests
server.start_build().await;
}
}
// reload the page
msg = server.wait() => {
// Run the server in the background
// Waiting for updates here lets us tap into when clients are added/removed
if let Some(msg) = msg {
screen.new_ws_message(Platform::Web, msg);
}
}
// Handle updates from the build engine
application = builder.wait() => {
// Wait for logs from the build engine
// These will cause us to update the screen
// We also can check the status of the builds here in case we have multiple ongoing builds
match application {
Ok(BuilderUpdate::Progress { platform, update }) => {
screen.new_build_logs(platform, update);
server.update_build_status(screen.build_progress.progress()).await;
}
Ok(BuilderUpdate::Ready { results }) => {
if !results.is_empty() {
builder.children.clear();
}
// If we have a build result, open it
for build_result in results.iter() {
let child = build_result.open(&serve.server_arguments, server.fullstack_address());
match child {
Ok(Some(child_proc)) => builder.children.push((build_result.platform,child_proc)),
Err(_e) => break,
_ => {}
}
}
// Make sure we immediately capture the stdout/stderr of the executable -
// otherwise it'll clobber our terminal output
screen.new_ready_app(&mut builder, results);
// And then finally tell the server to reload
server.send_reload().await;
},
Err(err) => {
server.send_build_error(err).await;
}
}
}
// Handle input from the user using our settings
res = screen.wait() => {
if res.is_err() {
break;
}
}
}
}
// Run our cleanup logic here - maybe printing as we go?
// todo: more printing, logging, error handling in this phase
_ = screen.shutdown();
_ = server.shutdown().await;
builder.shutdown();
Ok(())
}

View file

@ -0,0 +1,841 @@
use crate::{
builder::{BuildMessage, MessageType, Stage, UpdateBuildProgress},
dioxus_crate::DioxusCrate,
};
use crate::{
builder::{BuildResult, UpdateStage},
serve::Serve,
};
use core::panic;
use crossterm::{
event::{Event, EventStream, KeyCode, KeyModifiers, MouseEventKind},
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
tty::IsTty,
ExecutableCommand,
};
use dioxus_cli_config::Platform;
use dioxus_hot_reload::ClientMsg;
use futures_util::{future::select_all, Future, StreamExt};
use ratatui::{prelude::*, widgets::*, TerminalOptions, Viewport};
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
io::{self, stdout},
pin::Pin,
rc::Rc,
time::{Duration, Instant},
};
use tokio::{
io::{AsyncBufReadExt, BufReader, Lines},
process::{ChildStderr, ChildStdout},
};
use tracing::Level;
use super::{Builder, Server, Watcher};
#[derive(Default)]
pub struct BuildProgress {
build_logs: HashMap<Platform, ActiveBuild>,
}
impl BuildProgress {
pub fn progress(&self) -> f64 {
self.build_logs
.values()
.min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
.map(|build| match build.stage {
Stage::Initializing => 0.0,
Stage::InstallingWasmTooling => 0.0,
Stage::Compiling => build.progress,
Stage::OptimizingWasm | Stage::OptimizingAssets | Stage::Finished => 1.0,
})
.unwrap_or_default()
}
}
pub struct Output {
term: Rc<RefCell<Option<TerminalBackend>>>,
// optional since when there's no tty there's no eventstream to read from - just stdin
events: Option<EventStream>,
_rustc_version: String,
_rustc_nightly: bool,
_dx_version: String,
interactive: bool,
pub(crate) build_progress: BuildProgress,
running_apps: HashMap<Platform, RunningApp>,
is_cli_release: bool,
platform: Platform,
num_lines_with_wrapping: u16,
term_height: u16,
scroll: u16,
fly_modal_open: bool,
anim_start: Instant,
tab: Tab,
}
#[derive(PartialEq, Eq, Clone, Copy)]
enum Tab {
Console,
BuildLog,
}
type TerminalBackend = Terminal<CrosstermBackend<io::Stdout>>;
impl Output {
pub async fn start(cfg: &Serve) -> io::Result<Self> {
let interactive = std::io::stdout().is_tty() && cfg.interactive.unwrap_or(true);
let mut events = None;
if interactive {
enable_raw_mode()?;
stdout().execute(EnterAlternateScreen)?;
// workaround for ci where the terminal is not fully initialized
// this stupid bug
// https://github.com/crossterm-rs/crossterm/issues/659
events = Some(EventStream::new());
};
// set the panic hook to fix the terminal
set_fix_term_hook();
let term: Option<TerminalBackend> = Terminal::with_options(
CrosstermBackend::new(stdout()),
TerminalOptions {
viewport: Viewport::Fullscreen,
},
)
.ok();
// todo: re-enable rustc version
// let rustc_version = rustc_version().await;
// let rustc_nightly = rustc_version.contains("nightly") || cfg.target_args.nightly;
let _rustc_version = String::from("1.0.0");
let _rustc_nightly = false;
let mut dx_version = String::new();
dx_version.push_str(env!("CARGO_PKG_VERSION"));
let is_cli_release = crate::dx_build_info::PROFILE == "release";
if !is_cli_release {
if let Some(hash) = crate::dx_build_info::GIT_COMMIT_HASH_SHORT {
let hash = &hash.trim_start_matches('g')[..4];
dx_version.push('-');
dx_version.push_str(hash);
}
}
let platform = cfg.build_arguments.platform.expect("To be resolved by now");
Ok(Self {
term: Rc::new(RefCell::new(term)),
events,
_rustc_version,
_rustc_nightly,
_dx_version: dx_version,
interactive,
is_cli_release,
platform,
fly_modal_open: false,
build_progress: Default::default(),
running_apps: HashMap::new(),
scroll: 0,
term_height: 0,
num_lines_with_wrapping: 0,
anim_start: Instant::now(),
tab: Tab::BuildLog,
})
}
/// Wait for either the ctrl_c handler or the next event
///
/// Why is the ctrl_c handler here?
///
/// Also tick animations every few ms
pub async fn wait(&mut self) -> io::Result<()> {
// sorry lord
let user_input = match self.events.as_mut() {
Some(events) => {
let pinned: Pin<Box<dyn Future<Output = Option<Result<Event, _>>>>> =
Box::pin(events.next());
pinned
}
None => Box::pin(futures_util::future::pending()) as Pin<Box<dyn Future<Output = _>>>,
};
let has_running_apps = !self.running_apps.is_empty();
let next_stdout = self.running_apps.values_mut().map(|app| {
let future = async move {
let (stdout, stderr) = match &mut app.stdout {
Some(stdout) => (stdout.stdout.next_line(), stdout.stderr.next_line()),
None => return futures_util::future::pending().await,
};
tokio::select! {
Ok(Some(line)) = stdout => (app.result.platform, Some(line), None),
Ok(Some(line)) = stderr => (app.result.platform, None, Some(line)),
else => futures_util::future::pending().await,
}
};
Box::pin(future)
});
let next_stdout = async {
if has_running_apps {
select_all(next_stdout).await.0
} else {
futures_util::future::pending().await
}
};
let animation_timeout = tokio::time::sleep(Duration::from_millis(300));
tokio::select! {
(platform, stdout, stderr) = next_stdout => {
if let Some(stdout) = stdout {
self.running_apps.get_mut(&platform).unwrap().stdout.as_mut().unwrap().stdout_line.push_str(&stdout);
self.push_log(platform, BuildMessage {
level: Level::INFO,
message: MessageType::Text(stdout),
source: Some("app".to_string()),
})
}
if let Some(stderr) = stderr {
self.running_apps.get_mut(&platform).unwrap().stdout.as_mut().unwrap().stderr_line.push_str(&stderr);
self.push_log(platform, BuildMessage {
level: Level::ERROR,
message: MessageType::Text(stderr),
source: Some("app".to_string()),
})
}
},
event = user_input => {
self.handle_events(event.unwrap().unwrap()).await?;
// self.handle_input(event.unwrap().unwrap())?;
}
_ = animation_timeout => {}
}
Ok(())
}
pub fn shutdown(&mut self) -> io::Result<()> {
// if we're a tty then we need to disable the raw mode
if self.interactive {
disable_raw_mode()?;
stdout().execute(LeaveAlternateScreen)?;
self.drain_print_logs();
}
Ok(())
}
/// Emit the build logs as println! statements such that the terminal has the same output as cargo
///
/// This is used when the terminal is shutdown and we want the build logs in the terminal. Old
/// versions of the cli would just eat build logs making debugging issues harder than they needed
/// to be.
fn drain_print_logs(&mut self) {
// todo: print the build info here for the most recent build, and then the logs of the most recent build
for (platform, build) in self.build_progress.build_logs.iter() {
if build.messages.is_empty() {
continue;
}
for message in build.messages.iter() {
match &message.message {
MessageType::Cargo(diagnostic) => {
println!(
"{platform}: {}",
diagnostic.rendered.as_deref().unwrap_or_default()
)
}
MessageType::Text(t) => println!("{platform}: {t}"),
}
}
}
}
pub fn handle_input(&mut self, input: Event) -> io::Result<()> {
// handle ctrlc
if let Event::Key(key) = input {
if let KeyCode::Char('c') = key.code {
if key.modifiers.contains(KeyModifiers::CONTROL) {
return Err(io::Error::new(io::ErrorKind::Interrupted, "Ctrl-C"));
}
}
}
if let Event::Key(key) = input {
if let KeyCode::Char('/') = key.code {
self.fly_modal_open = !self.fly_modal_open;
}
}
match input {
Event::Mouse(mouse) if mouse.kind == MouseEventKind::ScrollUp => {
self.scroll = self.scroll.saturating_sub(1);
}
Event::Mouse(mouse) if mouse.kind == MouseEventKind::ScrollDown => {
self.scroll += 1;
}
Event::Key(key) if key.code == KeyCode::Up => {
self.scroll = self.scroll.saturating_sub(1);
}
Event::Key(key) if key.code == KeyCode::Down => {
self.scroll += 1;
}
Event::Key(key) if key.code == KeyCode::Char('r') => {}
Event::Key(key) if key.code == KeyCode::Char('o') => {
// todo: open the app
}
Event::Key(key) if key.code == KeyCode::Char('c') => {
// clear
}
Event::Key(key) if key.code == KeyCode::Char('0') => {
self.tab = Tab::Console;
self.scroll = 0;
}
Event::Key(key) if key.code == KeyCode::Char('1') => {
self.tab = Tab::BuildLog;
self.scroll = 0;
}
Event::Resize(_width, _height) => {
// nothing, it should take care of itself
}
_ => {}
}
if self.scroll
> self
.num_lines_with_wrapping
.saturating_sub(self.term_height + 1)
{
self.scroll = self
.num_lines_with_wrapping
.saturating_sub(self.term_height + 1);
}
Ok(())
}
pub fn new_ws_message(&mut self, platform: Platform, message: axum::extract::ws::Message) {
if let axum::extract::ws::Message::Text(text) = message {
let msg = serde_json::from_str::<ClientMsg>(text.as_str());
match msg {
Ok(ClientMsg::Log { level, messages }) => {
self.push_log(
platform,
BuildMessage {
level: match level.as_str() {
"info" => Level::INFO,
"warn" => Level::WARN,
"error" => Level::ERROR,
"debug" => Level::DEBUG,
_ => Level::INFO,
},
message: MessageType::Text(
// todo: the js console is giving us a list of params, not formatted text
// we need to translate its styling into our own
messages.first().unwrap_or(&String::new()).clone(),
),
source: Some("app".to_string()),
},
);
}
Err(err) => {
self.push_log(
platform,
BuildMessage {
level: Level::ERROR,
source: Some("app".to_string()),
message: MessageType::Text(format!("Error parsing message: {err}")),
},
);
}
}
}
}
// todo: re-enable
#[allow(unused)]
fn is_snapped(&self, _platform: Platform) -> bool {
true
// let prev_scrol = self
// .num_lines_with_wrapping
// .saturating_sub(self.term_height);
// prev_scrol == self.scroll
}
pub fn scroll_to_bottom(&mut self) {
self.scroll = (self.num_lines_with_wrapping).saturating_sub(self.term_height);
}
pub fn push_log(&mut self, platform: Platform, message: BuildMessage) {
let snapped = self.is_snapped(platform);
if let Some(build) = self.build_progress.build_logs.get_mut(&platform) {
build.stdout_logs.push(message);
}
if snapped {
self.scroll_to_bottom();
}
}
pub fn new_build_logs(&mut self, platform: Platform, update: UpdateBuildProgress) {
let snapped = self.is_snapped(platform);
// when the build is finished, switch to the console
if update.stage == Stage::Finished {
self.tab = Tab::Console;
}
self.build_progress
.build_logs
.entry(platform)
.or_default()
.update(update);
if snapped {
self.scroll_to_bottom();
}
}
pub fn new_ready_app(&mut self, build_engine: &mut Builder, results: Vec<BuildResult>) {
for result in results {
let out = build_engine
.children
.iter_mut()
.find_map(|(platform, child)| {
if platform == &result.platform {
let stdout = child.stdout.take().unwrap();
let stderr = child.stderr.take().unwrap();
Some((stdout, stderr))
} else {
None
}
});
let platform = result.platform;
let stdout = out.map(|(stdout, stderr)| RunningAppOutput {
stdout: BufReader::new(stdout).lines(),
stderr: BufReader::new(stderr).lines(),
stdout_line: String::new(),
stderr_line: String::new(),
});
let app = RunningApp { result, stdout };
self.running_apps.insert(platform, app);
// Finish the build progress for the platform that just finished building
if let Some(build) = self.build_progress.build_logs.get_mut(&platform) {
build.stage = Stage::Finished;
}
}
}
pub fn render(
&mut self,
_opts: &Serve,
_config: &DioxusCrate,
_build_engine: &Builder,
server: &Server,
_watcher: &Watcher,
) {
// just drain the build logs
if !self.interactive {
self.drain_print_logs();
return;
}
// Keep the animation track in terms of 100ms frames - the frame should be a number between 0 and 10
// todo: we want to use this somehow to animate things...
let elapsed = self.anim_start.elapsed().as_millis() as f32;
let num_frames = elapsed / 100.0;
let _frame_step = (num_frames % 10.0) as usize;
_ = self
.term
.clone()
.borrow_mut()
.as_mut()
.unwrap()
.draw(|frame| {
// a layout that has a title with stats about the program and then the actual console itself
let body = Layout::default()
.direction(Direction::Vertical)
.constraints(
[
// Title
Constraint::Length(1),
// Body
Constraint::Min(0),
]
.as_ref(),
)
.split(frame.size());
// Split the body into a left and a right
let console = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Fill(1), Constraint::Length(14)].as_ref())
.split(body[1]);
let listening_len = "listening at http://127.0.0.1:8080".len() + 3;
let listening_len = if listening_len > body[0].width as usize {
0
} else {
listening_len
};
let header = Layout::default()
.direction(Direction::Horizontal)
.constraints(
[
Constraint::Fill(1),
Constraint::Length(listening_len as u16),
]
.as_ref(),
)
.split(body[0]);
// // Render a border for the header
// frame.render_widget(Block::default().borders(Borders::BOTTOM), body[0]);
// Render the metadata
let mut spans = vec![
Span::from(if self.is_cli_release { "dx" } else { "dx-dev" }).green(),
Span::from(" ").green(),
Span::from("serve").green(),
Span::from(" | ").white(),
Span::from(self.platform.to_string()).green(),
Span::from(" | ").white(),
];
// If there is build progress, display that next to the platform
if !self.build_progress.build_logs.is_empty() {
if self
.build_progress
.build_logs
.values()
.any(|b| b.failed.is_some())
{
spans.push(Span::from("build failed ❌").red());
} else {
spans.push(Span::from("status: ").green());
let build = self
.build_progress
.build_logs
.values()
.min_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap();
spans.extend_from_slice(&build.spans(Rect::new(
0,
0,
build.max_layout_size(),
1,
)));
}
}
frame.render_widget(Paragraph::new(Line::from(spans)).left_aligned(), header[0]);
// Split apart the body into a center and a right side
// We only want to show the sidebar if there's enough space
if listening_len > 0 {
frame.render_widget(
Paragraph::new(Line::from(vec![
Span::from("listening at ").dark_gray(),
Span::from(format!("http://{}", server.ip).as_str()).gray(),
])),
header[1],
);
}
// Draw the tabs in the right region of the console
// First draw the left border
frame.render_widget(
Paragraph::new(vec![
{
let mut line = Line::from(" [0] console").dark_gray();
if self.tab == Tab::Console {
line.style = Style::default().fg(Color::LightYellow);
}
line
},
{
let mut line = Line::from(" [1] build").dark_gray();
if self.tab == Tab::BuildLog {
line.style = Style::default().fg(Color::LightYellow);
}
line
},
Line::from(" ").gray(),
Line::from(" [/] more").gray(),
Line::from(" [r] reload").gray(),
Line::from(" [r] clear").gray(),
Line::from(" [o] open").gray(),
Line::from(" [h] hide").gray(),
])
.left_aligned()
.block(
Block::default()
.borders(Borders::LEFT | Borders::TOP)
.border_set(symbols::border::Set {
top_left: symbols::line::NORMAL.horizontal_down,
..symbols::border::PLAIN
}),
),
console[1],
);
// We're going to assemble a text buffer directly and then let the paragraph widgets
// handle the wrapping and scrolling
let mut paragraph_text: Text<'_> = Text::default();
for platform in self.build_progress.build_logs.keys() {
let build = self.build_progress.build_logs.get(platform).unwrap();
let msgs = match self.tab {
Tab::Console => &build.stdout_logs,
Tab::BuildLog => &build.messages,
};
for span in msgs.iter() {
use ansi_to_tui::IntoText;
match &span.message {
MessageType::Text(line) => {
for line in line.lines() {
let text = line.into_text().unwrap_or_default();
for line in text.lines {
let mut out_line = vec![Span::from("[app] ").dark_gray()];
for span in line.spans {
out_line.push(span);
}
let newline = Line::from(out_line);
paragraph_text.push_line(newline);
}
}
}
MessageType::Cargo(diagnostic) => {
let diagnostic = diagnostic.rendered.as_deref().unwrap_or_default();
for line in diagnostic.lines() {
paragraph_text.extend(line.into_text().unwrap_or_default());
}
}
};
}
}
let paragraph = Paragraph::new(paragraph_text)
.left_aligned()
.wrap(Wrap { trim: false });
self.term_height = console[0].height;
self.num_lines_with_wrapping = paragraph.line_count(console[0].width) as u16;
let scrollbar = Scrollbar::new(ScrollbarOrientation::VerticalRight)
.begin_symbol(None)
.end_symbol(None)
.track_symbol(None)
.thumb_symbol("");
let mut scrollbar_state = ScrollbarState::new(
self.num_lines_with_wrapping
.saturating_sub(self.term_height) as usize,
)
.position(self.scroll as usize);
let paragraph = paragraph.scroll((self.scroll, 0));
paragraph
.block(Block::new().borders(Borders::TOP))
.render(console[0], frame.buffer_mut());
// and the scrollbar, those are separate widgets
frame.render_stateful_widget(
scrollbar,
console[0].inner(Margin {
// todo: dont use margin - just push down the body based on its top border
// using an inner vertical margin of 1 unit makes the scrollbar inside the block
vertical: 1,
horizontal: 0,
}),
&mut scrollbar_state,
);
// render the fly modal
self.render_fly_modal(frame, console[0]);
});
}
async fn handle_events(&mut self, event: Event) -> io::Result<()> {
let mut events = vec![event];
// Collect all the events within the next 10ms in one stream
loop {
let next = self.events.as_mut().unwrap().next();
tokio::select! {
msg = next => events.push(msg.unwrap().unwrap()),
_ = tokio::time::sleep(Duration::from_millis(1)) => break
}
}
// Debounce events within the same frame
let mut handled = HashSet::new();
for event in events {
if !handled.contains(&event) {
self.handle_input(event.clone())?;
handled.insert(event);
}
}
Ok(())
}
fn render_fly_modal(&mut self, frame: &mut Frame, area: Rect) {
if !self.fly_modal_open {
return;
}
// Create a frame slightly smaller than the area
let panel = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Fill(1)].as_ref())
.split(area)[0];
// Wipe the panel
frame.render_widget(Clear, panel);
frame.render_widget(Block::default().borders(Borders::ALL), panel);
let modal = Paragraph::new(
"Hello world!\nHello world!\nHello world!\nHello world!\nHello world!\n",
)
.alignment(Alignment::Center);
frame.render_widget(modal, panel);
}
}
#[derive(Default, Debug, PartialEq)]
pub struct ActiveBuild {
stage: Stage,
messages: Vec<BuildMessage>,
stdout_logs: Vec<BuildMessage>,
progress: f64,
failed: Option<String>,
}
impl ActiveBuild {
fn update(&mut self, update: UpdateBuildProgress) {
match update.update {
UpdateStage::Start => {
self.stage = update.stage;
self.progress = 0.0;
self.failed = None;
}
UpdateStage::AddMessage(message) => {
self.messages.push(message);
}
UpdateStage::SetProgress(progress) => {
self.progress = progress;
}
UpdateStage::Failed(failed) => {
self.stage = Stage::Finished;
self.failed = Some(failed.clone());
}
}
}
fn spans(&self, area: Rect) -> Vec<Span> {
let mut spans = Vec::new();
let message = match self.stage {
Stage::Initializing => "initializing... ",
Stage::InstallingWasmTooling => "installing wasm tools... ",
Stage::Compiling => "compiling... ",
Stage::OptimizingWasm => "optimizing wasm... ",
Stage::OptimizingAssets => "optimizing assets... ",
Stage::Finished => "finished! 🎉 ",
};
let progress = format!("{}%", (self.progress * 100.0) as u8);
if area.width >= self.max_layout_size() {
spans.push(Span::from(message).light_yellow());
if self.stage != Stage::Finished {
spans.push(Span::from(progress).white());
}
} else {
spans.push(Span::from(progress).white());
}
spans
}
fn max_layout_size(&self) -> u16 {
let progress_size = 4;
let stage_size = self.stage.to_string().len() as u16;
let brace_size = 2;
progress_size + stage_size + brace_size
}
}
impl PartialOrd for ActiveBuild {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(
self.stage
.cmp(&other.stage)
.then(self.progress.partial_cmp(&other.progress).unwrap()),
)
}
}
fn set_fix_term_hook() {
let original_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |info| {
_ = disable_raw_mode();
_ = stdout().execute(LeaveAlternateScreen);
original_hook(info);
}));
}
// todo: re-enable
#[allow(unused)]
async fn rustc_version() -> String {
tokio::process::Command::new("rustc")
.arg("--version")
.output()
.await
.ok()
.map(|o| o.stdout)
.and_then(|o| {
let out = String::from_utf8(o).unwrap();
out.split_ascii_whitespace().nth(1).map(|v| v.to_string())
})
.unwrap_or_else(|| "<unknown>".to_string())
}
pub struct RunningApp {
result: BuildResult,
stdout: Option<RunningAppOutput>,
}
struct RunningAppOutput {
stdout: Lines<BufReader<ChildStdout>>,
stderr: Lines<BufReader<ChildStderr>>,
stdout_line: String,
stderr_line: String,
}

View file

@ -1,16 +1,20 @@
use crate::Result; use crate::{Error, Result};
use dioxus_cli_config::WebProxyConfig; use dioxus_cli_config::WebProxyConfig;
use anyhow::{anyhow, Context}; use anyhow::{anyhow, Context};
use axum::{http::StatusCode, routing::any, Router}; use axum::body::Body as MyBody;
use axum::body::Body;
use axum::{
http::StatusCode,
routing::{any, MethodRouter},
Router,
};
use hyper::{Request, Response, Uri}; use hyper::{Request, Response, Uri};
use hyper_util::{ use hyper_util::{
client::legacy::{self, connect::HttpConnector}, client::legacy::{self, connect::HttpConnector},
rt::TokioExecutor, rt::TokioExecutor,
}; };
use axum::body::Body as MyBody;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ProxyClient { struct ProxyClient {
inner: legacy::Client<hyper_rustls::HttpsConnector<HttpConnector>, MyBody>, inner: legacy::Client<hyper_rustls::HttpsConnector<HttpConnector>, MyBody>,
@ -19,6 +23,7 @@ struct ProxyClient {
impl ProxyClient { impl ProxyClient {
fn new(url: Uri) -> Self { fn new(url: Uri) -> Self {
let _ = rustls::crypto::aws_lc_rs::default_provider().install_default();
let https = hyper_rustls::HttpsConnectorBuilder::new() let https = hyper_rustls::HttpsConnectorBuilder::new()
.with_native_roots() .with_native_roots()
.unwrap() .unwrap()
@ -63,28 +68,7 @@ pub fn add_proxy(mut router: Router, proxy: &WebProxyConfig) -> Result<Router> {
))); )));
} }
let client = ProxyClient::new(url); let method_router = proxy_to(url, false, handle_proxy_error);
let method_router = any(move |mut req: Request<MyBody>| async move {
// Prevent request loops
if req.headers().get("x-proxied-by-dioxus").is_some() {
return Err((
StatusCode::NOT_FOUND,
"API is sharing a loopback with the dev server. Try setting a different port on the API config."
.to_string(),
));
}
req.headers_mut().insert(
"x-proxied-by-dioxus",
"true".parse().expect("header value is valid"),
);
client
.send(req)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))
});
// api/*path // api/*path
router = router.route( router = router.route(
@ -107,6 +91,48 @@ pub fn add_proxy(mut router: Router, proxy: &WebProxyConfig) -> Result<Router> {
Ok(router) Ok(router)
} }
pub(crate) fn proxy_to(
url: Uri,
nocache: bool,
handle_error: fn(Error) -> Response<Body>,
) -> MethodRouter {
let client = ProxyClient::new(url);
any(move |mut req: Request<MyBody>| async move {
// Prevent request loops
if req.headers().get("x-proxied-by-dioxus").is_some() {
return Err(Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::from(
"API is sharing a loopback with the dev server. Try setting a different port on the API config.",
))
.unwrap());
}
req.headers_mut().insert(
"x-proxied-by-dioxus",
"true".parse().expect("header value is valid"),
);
if nocache {
crate::serve::insert_no_cache_headers(req.headers_mut());
}
client.send(req).await.map_err(handle_error)
})
}
fn handle_proxy_error(e: Error) -> axum::http::Response<axum::body::Body> {
tracing::error!("Proxy error: {}", e);
axum::http::Response::builder()
.status(axum::http::StatusCode::INTERNAL_SERVER_ERROR)
.body(axum::body::Body::from(format!(
"Proxy connection failed: {:#?}",
e
)))
.unwrap()
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {

View file

@ -0,0 +1,587 @@
use crate::dioxus_crate::DioxusCrate;
use crate::serve::Serve;
use crate::{Error, Result};
use axum::extract::{Request, State};
use axum::middleware::{self, Next};
use axum::{
body::Body,
extract::{
ws::{Message, WebSocket},
Extension, WebSocketUpgrade,
},
http::{
header::{HeaderName, HeaderValue, CACHE_CONTROL, EXPIRES, PRAGMA},
Method, Response, StatusCode,
},
response::IntoResponse,
routing::{get, get_service},
Router,
};
use axum_server::tls_rustls::RustlsConfig;
use dioxus_cli_config::{Platform, WebHttpsConfig};
use dioxus_hot_reload::{DevserverMsg, HotReloadMsg};
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
use futures_util::{stream::FuturesUnordered, StreamExt};
use hyper::header::ACCEPT;
use hyper::HeaderMap;
use serde::{Deserialize, Serialize};
use std::net::TcpListener;
use std::path::Path;
use std::sync::Arc;
use std::sync::RwLock;
use std::{
convert::Infallible,
fs, io,
net::{IpAddr, SocketAddr},
process::Command,
};
use tokio::task::JoinHandle;
use tower::ServiceBuilder;
use tower_http::{
cors::{Any, CorsLayer},
services::fs::{ServeDir, ServeFileSystemResponseBody},
ServiceBuilderExt,
};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type", content = "data")]
enum Status {
Building { progress: f64 },
BuildError { error: String },
Ready,
}
#[derive(Debug, Clone)]
struct SharedStatus(Arc<RwLock<Status>>);
impl SharedStatus {
fn new(status: Status) -> Self {
Self(Arc::new(RwLock::new(status)))
}
fn set(&self, status: Status) {
*self.0.write().unwrap() = status;
}
fn get(&self) -> Status {
self.0.read().unwrap().clone()
}
}
pub(crate) struct Server {
pub hot_reload_sockets: Vec<WebSocket>,
pub build_status_sockets: Vec<WebSocket>,
pub ip: SocketAddr,
pub new_hot_reload_sockets: UnboundedReceiver<WebSocket>,
pub new_build_status_sockets: UnboundedReceiver<WebSocket>,
_server_task: JoinHandle<Result<()>>,
/// We proxy (not hot reloading) fullstack requests to this port
pub fullstack_port: Option<u16>,
build_status: SharedStatus,
}
impl Server {
pub fn start(serve: &Serve, cfg: &DioxusCrate) -> Self {
let (hot_reload_sockets_tx, hot_reload_sockets_rx) = futures_channel::mpsc::unbounded();
let (build_status_sockets_tx, build_status_sockets_rx) = futures_channel::mpsc::unbounded();
let build_status = SharedStatus::new(Status::Building { progress: 0.0 });
let addr = serve.server_arguments.address.address();
let start_browser = serve.server_arguments.open.unwrap_or_default();
// If we're serving a fullstack app, we need to find a port to proxy to
let fullstack_port = if matches!(
serve.build_arguments.platform(),
Platform::Fullstack | Platform::StaticGeneration
) {
get_available_port(addr.ip())
} else {
None
};
let fullstack_address = fullstack_port.map(|port| SocketAddr::new(addr.ip(), port));
let router = setup_router(
serve,
cfg,
hot_reload_sockets_tx,
build_status_sockets_tx,
fullstack_address,
build_status.clone(),
)
.unwrap();
// Actually just start the server, cloning in a few bits of config
let web_config = cfg.dioxus_config.web.https.clone();
let base_path = cfg.dioxus_config.web.app.base_path.clone();
let _server_task = tokio::spawn(async move {
let web_config = web_config.clone();
// HTTPS
// Before console info so it can stop if mkcert isn't installed or fails
// todo: this is the only async thing here - might be nice to
let rustls: Option<RustlsConfig> = get_rustls(&web_config).await.unwrap();
// Open the browser
if start_browser {
open_browser(base_path, addr, rustls.is_some());
}
// Start the server with or without rustls
if let Some(rustls) = rustls {
axum_server::bind_rustls(addr, rustls)
.serve(router.into_make_service())
.await?
} else {
// Create a TCP listener bound to the address
axum::serve(
tokio::net::TcpListener::bind(&addr).await?,
router.into_make_service(),
)
.await?
}
Ok(())
});
Self {
hot_reload_sockets: Default::default(),
build_status_sockets: Default::default(),
new_hot_reload_sockets: hot_reload_sockets_rx,
new_build_status_sockets: build_status_sockets_rx,
_server_task,
ip: addr,
fullstack_port,
build_status,
}
}
async fn send_build_status(&mut self) {
let mut i = 0;
while i < self.build_status_sockets.len() {
let socket = &mut self.build_status_sockets[i];
if send_build_status_to(&self.build_status, socket)
.await
.is_err()
{
self.build_status_sockets.remove(i);
} else {
i += 1;
}
}
}
pub async fn start_build(&mut self) {
self.build_status.set(Status::Building { progress: 0.0 });
self.send_build_status().await;
}
pub async fn update_build_status(&mut self, progress: f64) {
if !matches!(self.build_status.get(), Status::Building { .. }) {
return;
}
self.build_status.set(Status::Building { progress });
self.send_build_status().await;
}
pub async fn send_hotreload(&mut self, reload: HotReloadMsg) {
let msg = DevserverMsg::HotReload(reload);
let msg = serde_json::to_string(&msg).unwrap();
// Send the changes to any connected clients
let mut i = 0;
while i < self.hot_reload_sockets.len() {
let socket = &mut self.hot_reload_sockets[i];
if socket.send(Message::Text(msg.clone())).await.is_err() {
self.hot_reload_sockets.remove(i);
} else {
i += 1;
}
}
}
/// Wait for new clients to be connected and then save them
pub async fn wait(&mut self) -> Option<Message> {
let mut new_hot_reload_socket = self.new_hot_reload_sockets.next();
let mut new_build_status_socket = self.new_build_status_sockets.next();
let mut new_message = self
.hot_reload_sockets
.iter_mut()
.enumerate()
.map(|(idx, socket)| async move { (idx, socket.next().await) })
.collect::<FuturesUnordered<_>>();
tokio::select! {
new_hot_reload_socket = &mut new_hot_reload_socket => {
if let Some(new_socket) = new_hot_reload_socket {
drop(new_message);
self.hot_reload_sockets.push(new_socket);
return None;
} else {
panic!("Could not receive a socket - the devtools could not boot - the port is likely already in use");
}
}
new_build_status_socket = &mut new_build_status_socket => {
if let Some(mut new_socket) = new_build_status_socket {
drop(new_message);
// Update the socket with the current status
if send_build_status_to(&self.build_status, &mut new_socket).await.is_ok() {
self.build_status_sockets.push(new_socket);
}
return None;
} else {
panic!("Could not receive a socket - the devtools could not boot - the port is likely already in use");
}
}
Some((idx, message)) = new_message.next() => {
match message {
Some(Ok(message)) => return Some(message),
_ => {
drop(new_message);
_ = self.hot_reload_sockets.remove(idx);
}
}
}
}
None
}
pub async fn send_build_error(&mut self, error: Error) {
let error = error.to_string();
self.build_status.set(Status::BuildError {
error: ansi_to_html::convert(&error).unwrap_or(error),
});
self.send_build_status().await;
}
pub async fn send_reload(&mut self) {
self.build_status.set(Status::Ready);
self.send_build_status().await;
for socket in self.hot_reload_sockets.iter_mut() {
_ = socket
.send(Message::Text(
serde_json::to_string(&DevserverMsg::FullReload).unwrap(),
))
.await;
}
}
/// Send a shutdown message to all connected clients
pub async fn send_shutdown(&mut self) {
for socket in self.hot_reload_sockets.iter_mut() {
_ = socket
.send(Message::Text(
serde_json::to_string(&DevserverMsg::Shutdown).unwrap(),
))
.await;
}
}
pub async fn shutdown(&mut self) {
self.send_shutdown().await;
for socket in self.hot_reload_sockets.drain(..) {
_ = socket.close().await;
}
}
/// Get the address the fullstack server should run on if we're serving a fullstack app
pub fn fullstack_address(&self) -> Option<SocketAddr> {
self.fullstack_port
.map(|port| SocketAddr::new(self.ip.ip(), port))
}
}
/// Sets up and returns a router
///
/// Steps include:
/// - Setting up cors
/// - Setting up the proxy to the endpoint specified in the config
/// - Setting up the file serve service
/// - Setting up the websocket endpoint for devtools
fn setup_router(
serve: &Serve,
config: &DioxusCrate,
hot_reload_sockets: UnboundedSender<WebSocket>,
build_status_sockets: UnboundedSender<WebSocket>,
fullstack_address: Option<SocketAddr>,
build_status: SharedStatus,
) -> Result<Router> {
let mut router = Router::new();
let platform = serve.build_arguments.platform();
// Setup proxy for the endpoint specified in the config
for proxy_config in config.dioxus_config.web.proxy.iter() {
router = super::proxy::add_proxy(router, proxy_config)?;
}
// Setup base path redirection
if let Some(base_path) = config.dioxus_config.web.app.base_path.clone() {
let base_path = format!("/{}", base_path.trim_matches('/'));
router = Router::new()
.nest(&base_path, router)
.fallback(get(move || async move {
format!("Outside of the base path: {}", base_path)
}));
}
// server the dir if it's web, otherwise let the fullstack server itself handle it
match platform {
Platform::Web => {
// Route file service to output the .wasm and assets if this is a web build
router = router.fallback(build_serve_dir(serve, config));
}
Platform::Fullstack | Platform::StaticGeneration => {
// For fullstack and static generation, forward all requests to the server
let address = fullstack_address.unwrap();
router = router.fallback(super::proxy::proxy_to(
format!("http://{address}").parse().unwrap(),
true,
|error| {
Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from(format!(
"Backend connection failed. The backend is likely still starting up. Please try again in a few seconds. Error: {:#?}",
error
)))
.unwrap()
},
));
}
_ => {}
}
// Setup middleware to intercept html requests if the build status is "Building"
router = router.layer(middleware::from_fn_with_state(
build_status,
build_status_middleware,
));
// Setup websocket endpoint - and pass in the extension layer immediately after
router = router.nest(
"/_dioxus",
Router::new()
.route(
"/",
get(
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
},
),
)
.layer(Extension(hot_reload_sockets))
.route(
"/build_status",
get(
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
},
),
)
.layer(Extension(build_status_sockets)),
);
// Setup cors
router = router.layer(
CorsLayer::new()
// allow `GET` and `POST` when accessing the resource
.allow_methods([Method::GET, Method::POST])
// allow requests from any origin
.allow_origin(Any)
.allow_headers(Any),
);
Ok(router)
}
fn build_serve_dir(serve: &Serve, cfg: &DioxusCrate) -> axum::routing::MethodRouter {
static CORS_UNSAFE: (HeaderValue, HeaderValue) = (
HeaderValue::from_static("unsafe-none"),
HeaderValue::from_static("unsafe-none"),
);
static CORS_REQUIRE: (HeaderValue, HeaderValue) = (
HeaderValue::from_static("require-corp"),
HeaderValue::from_static("same-origin"),
);
let (coep, coop) = match serve.server_arguments.cross_origin_policy {
true => CORS_REQUIRE.clone(),
false => CORS_UNSAFE.clone(),
};
let out_dir = cfg.out_dir();
let index_on_404 = cfg.dioxus_config.web.watcher.index_on_404;
get_service(
ServiceBuilder::new()
.override_response_header(
HeaderName::from_static("cross-origin-embedder-policy"),
coep,
)
.override_response_header(HeaderName::from_static("cross-origin-opener-policy"), coop)
.and_then({
let out_dir = out_dir.clone();
move |response| async move { Ok(no_cache(index_on_404, &out_dir, response)) }
})
.service(ServeDir::new(out_dir)),
)
.handle_error(|error: Infallible| async move {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Unhandled internal error: {}", error),
)
})
}
fn no_cache(
index_on_404: bool,
out_dir: &Path,
response: Response<ServeFileSystemResponseBody>,
) -> Response<Body> {
// By default we just decompose into the response
let mut response = response.into_response();
// If there's a 404 and we're supposed to index on 404, upgrade that failed request to the index.html
// We migth want to isnert a header here saying we *did* that but oh well
if response.status() == StatusCode::NOT_FOUND && index_on_404 {
let body = Body::from(std::fs::read_to_string(out_dir.join("index.html")).unwrap());
response = Response::builder()
.status(StatusCode::OK)
.body(body)
.unwrap();
};
insert_no_cache_headers(response.headers_mut());
response
}
pub fn insert_no_cache_headers(headers: &mut HeaderMap) {
headers.insert(CACHE_CONTROL, HeaderValue::from_static("no-cache"));
headers.insert(PRAGMA, HeaderValue::from_static("no-cache"));
headers.insert(EXPIRES, HeaderValue::from_static("0"));
}
/// Returns an enum of rustls config
pub async fn get_rustls(web_config: &WebHttpsConfig) -> Result<Option<RustlsConfig>> {
if web_config.enabled != Some(true) {
return Ok(None);
}
let (cert_path, key_path) = match web_config.mkcert {
Some(true) => get_rustls_with_mkcert(web_config)?,
_ => get_rustls_without_mkcert(web_config)?,
};
Ok(Some(
RustlsConfig::from_pem_file(cert_path, key_path).await?,
))
}
pub fn get_rustls_with_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
const DEFAULT_KEY_PATH: &str = "ssl/key.pem";
const DEFAULT_CERT_PATH: &str = "ssl/cert.pem";
// Get paths to store certs, otherwise use ssl/item.pem
let key_path = web_config
.key_path
.clone()
.unwrap_or(DEFAULT_KEY_PATH.to_string());
let cert_path = web_config
.cert_path
.clone()
.unwrap_or(DEFAULT_CERT_PATH.to_string());
// Create ssl directory if using defaults
if key_path == DEFAULT_KEY_PATH && cert_path == DEFAULT_CERT_PATH {
_ = fs::create_dir("ssl");
}
let cmd = Command::new("mkcert")
.args([
"-install",
"-key-file",
&key_path,
"-cert-file",
&cert_path,
"localhost",
"::1",
"127.0.0.1",
])
.spawn();
match cmd {
Err(e) => {
match e.kind() {
io::ErrorKind::NotFound => tracing::error!("mkcert is not installed. See https://github.com/FiloSottile/mkcert#installation for installation instructions."),
e => tracing::error!("an error occurred while generating mkcert certificates: {}", e.to_string()),
};
return Err("failed to generate mkcert certificates".into());
}
Ok(mut cmd) => {
cmd.wait()?;
}
}
Ok((cert_path, key_path))
}
pub fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
// get paths to cert & key
if let (Some(key), Some(cert)) = (web_config.key_path.clone(), web_config.cert_path.clone()) {
Ok((cert, key))
} else {
// missing cert or key
Err("https is enabled but cert or key path is missing".into())
}
}
/// Open the browser to the address
pub(crate) fn open_browser(base_path: Option<String>, address: SocketAddr, https: bool) {
let protocol = if https { "https" } else { "http" };
let base_path = match base_path.as_deref() {
Some(base_path) => format!("/{}", base_path.trim_matches('/')),
None => "".to_owned(),
};
_ = open::that(format!("{protocol}://{address}{base_path}"));
}
fn get_available_port(address: IpAddr) -> Option<u16> {
(8000..9000).find(|port| TcpListener::bind((address, *port)).is_ok())
}
/// Middleware that intercepts html requests if the status is "Building" and returns a loading page instead
async fn build_status_middleware(
state: State<SharedStatus>,
request: Request,
next: Next,
) -> axum::response::Response {
// If the request is for html, and the status is "Building", return the loading page instead of the contents of the response
let accepts = request.headers().get(ACCEPT);
let accepts_html = accepts
.and_then(|v| v.to_str().ok())
.map(|v| v.contains("text/html"));
if let Some(true) = accepts_html {
let status = state.get();
if status != Status::Ready {
let html = include_str!("../../assets/loading.html");
return axum::response::Response::builder()
.status(StatusCode::OK)
.body(Body::from(html))
.unwrap();
}
}
next.run(request).await
}
async fn send_build_status_to(
build_status: &SharedStatus,
socket: &mut WebSocket,
) -> Result<(), axum::Error> {
let msg = serde_json::to_string(&build_status.get()).unwrap();
socket.send(Message::Text(msg)).await
}

View file

@ -0,0 +1,283 @@
use std::path::PathBuf;
use crate::dioxus_crate::DioxusCrate;
use crate::serve::hot_reloading_file_map::FileMap;
use dioxus_hot_reload::HotReloadMsg;
use dioxus_html::HtmlCtx;
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
use futures_util::StreamExt;
use ignore::gitignore::Gitignore;
use notify::{event::ModifyKind, EventKind, RecommendedWatcher};
/// This struct stores the file watcher and the filemap for the project.
///
/// This is where we do workspace discovery and recursively listen for changes in Rust files and asset
/// directories.
pub struct Watcher {
_tx: UnboundedSender<notify::Event>,
rx: UnboundedReceiver<notify::Event>,
_last_update_time: i64,
_watcher: RecommendedWatcher,
queued_events: Vec<notify::Event>,
file_map: FileMap,
ignore: Gitignore,
}
impl Watcher {
pub fn start(config: &DioxusCrate) -> Self {
let (tx, rx) = futures_channel::mpsc::unbounded();
// Extend the watch path to include:
// - the assets directory - this is so we can hotreload CSS and other assets by default
// - the Cargo.toml file - this is so we can hotreload the project if the user changes dependencies
// - the Dioxus.toml file - this is so we can hotreload the project if the user changes the Dioxus config
let mut allow_watch_path = config.dioxus_config.web.watcher.watch_path.clone();
allow_watch_path.push(config.dioxus_config.application.asset_dir.clone());
allow_watch_path.push("Cargo.toml".to_string().into());
allow_watch_path.push("Dioxus.toml".to_string().into());
allow_watch_path.dedup();
let crate_dir = config.crate_dir();
let gitignore_file_path = crate_dir.join(".gitignore");
let mut builder = ignore::gitignore::GitignoreBuilder::new(gitignore_file_path);
let excluded_paths = vec![
".git",
".github",
".vscode",
"target",
"node_modules",
"dist",
".dioxus",
];
for path in excluded_paths {
builder.add(path);
}
let ignore = builder.build().unwrap();
// Create the file watcher
let mut watcher = notify::recommended_watcher({
let tx = tx.clone();
move |info: notify::Result<notify::Event>| {
if let Ok(e) = info {
match e.kind {
// An event emitted when the metadata of a file or folder is changed.
EventKind::Modify(ModifyKind::Data(_)) |
EventKind::Create(_) |
EventKind::Remove(_) => {
_ = tx.unbounded_send(e);
},
_ => {}
}
}
}
})
.expect("Failed to create file watcher.\nEnsure you have the required permissions to watch the specified directories.");
// Watch the specified paths
// todo: make sure we don't double-watch paths if they're nested
for sub_path in allow_watch_path {
let path = &config.crate_dir().join(sub_path);
// If the path is ignored, don't watch it
if ignore.matched(path, path.is_dir()).is_ignore() {
continue;
}
let mode = notify::RecursiveMode::Recursive;
use notify::Watcher;
if let Err(err) = watcher.watch(path, mode) {
tracing::warn!("Failed to watch path: {}", err);
}
}
// Probe the entire project looking for our rsx calls
// Whenever we get an update from the file watcher, we'll try to hotreload against this file map
let file_map = FileMap::create_with_filter::<HtmlCtx>(config.crate_dir(), |path| {
ignore.matched(path, path.is_dir()).is_ignore()
})
.unwrap();
Self {
_tx: tx,
rx,
_watcher: watcher,
file_map,
ignore,
queued_events: Vec::new(),
_last_update_time: chrono::Local::now().timestamp(),
}
}
/// A cancel safe handle to the file watcher
///
/// todo: this should be simpler logic?
pub async fn wait(&mut self) {
// Pull off any queued events in succession
while let Ok(Some(event)) = self.rx.try_next() {
self.queued_events.push(event);
}
if !self.queued_events.is_empty() {
return;
}
// If there are no queued events, wait for the next event
if let Some(event) = self.rx.next().await {
self.queued_events.push(event);
}
}
/// Deques changed files from the event queue, doing the proper intelligent filtering
pub fn dequeue_changed_files(&mut self, config: &DioxusCrate) -> Vec<PathBuf> {
let mut all_mods: Vec<PathBuf> = vec![];
// Decompose the events into a list of all the files that have changed
for event in self.queued_events.drain(..) {
// We only care about modify/crate/delete events
match event.kind {
EventKind::Modify(ModifyKind::Data(_)) => {}
EventKind::Modify(ModifyKind::Name(_)) => {}
EventKind::Create(_) => {}
EventKind::Remove(_) => {}
_ => continue,
}
for path in event.paths {
all_mods.push(path.clone());
}
}
let mut modified_files = vec![];
// For the non-rust files, we want to check if it's an asset file
// This would mean the asset lives somewhere under the /assets directory or is referenced by magnanis in the linker
// todo: mg integration here
let _asset_dir = config
.dioxus_config
.application
.asset_dir
.canonicalize()
.ok();
for path in all_mods.iter() {
if path.extension().is_none() {
continue;
}
// Workaround for notify and vscode-like editor:
// when edit & save a file in vscode, there will be two notifications,
// the first one is a file with empty content.
// filter the empty file notification to avoid false rebuild during hot-reload
if let Ok(metadata) = std::fs::metadata(path) {
if metadata.len() == 0 {
continue;
}
}
// If the extension is a backup file, or a hidden file, ignore it completely (no rebuilds)
if is_backup_file(path.to_path_buf()) {
tracing::trace!("Ignoring backup file: {:?}", path);
continue;
}
// If the path is ignored, don't watch it
if self.ignore.matched(path, path.is_dir()).is_ignore() {
continue;
}
modified_files.push(path.clone());
}
modified_files
}
pub fn attempt_hot_reload(
&mut self,
config: &DioxusCrate,
modified_files: Vec<PathBuf>,
) -> Option<HotReloadMsg> {
// If we have any changes to the rust files, we need to update the file map
let crate_dir = config.crate_dir();
let mut templates = vec![];
// Prepare the hotreload message we need to send
let mut edited_rust_files = Vec::new();
let mut assets = Vec::new();
let mut unknown_files = Vec::new();
for path in modified_files {
// for various assets that might be linked in, we just try to hotreloading them forcefully
// That is, unless they appear in an include! macro, in which case we need to a full rebuild....
let Some(ext) = path.extension().and_then(|v| v.to_str()) else {
continue;
};
match ext {
"rs" => edited_rust_files.push(path),
_ if path.starts_with("assets") => assets.push(path),
_ => unknown_files.push(path),
}
}
for rust_file in edited_rust_files {
let hotreloaded_templates = self
.file_map
.update_rsx::<HtmlCtx>(&rust_file, &crate_dir)
.ok()?;
templates.extend(hotreloaded_templates);
}
Some(HotReloadMsg {
templates,
assets,
unknown_files,
})
}
/// Ensure the changes we've received from the queue are actually legit changes to either assets or
/// rust code. We don't care about changes otherwise, unless we get a signle elsewhere to do a full rebuild
pub fn pending_changes(&mut self) -> bool {
!self.queued_events.is_empty()
}
}
fn is_backup_file(path: PathBuf) -> bool {
// If there's a tilde at the end of the file, it's a backup file
if let Some(name) = path.file_name() {
if let Some(name) = name.to_str() {
if name.ends_with('~') {
return true;
}
}
}
// if the file is hidden, it's a backup file
if let Some(name) = path.file_name() {
if let Some(name) = name.to_str() {
if name.starts_with('.') {
return true;
}
}
}
false
}
#[test]
fn test_is_backup_file() {
assert!(is_backup_file(PathBuf::from("examples/test.rs~")));
assert!(is_backup_file(PathBuf::from("examples/.back")));
assert!(is_backup_file(PathBuf::from("test.rs~")));
assert!(is_backup_file(PathBuf::from(".back")));
assert!(!is_backup_file(PathBuf::from("val.rs")));
assert!(!is_backup_file(PathBuf::from(
"/Users/jonkelley/Development/Tinkering/basic_05_example/src/lib.rs"
)));
assert!(!is_backup_file(PathBuf::from("exmaples/val.rs")));
}

View file

@ -1,373 +0,0 @@
use crate::server::SharedFileMap;
use crate::{
cfg::ConfigOptsServe,
server::{
output::{print_console_info, PrettierOptions},
setup_file_watcher, Platform,
},
BuildResult, Result,
};
use dioxus_cli_config::CrateConfig;
use dioxus_hot_reload::HotReloadMsg;
use dioxus_html::HtmlCtx;
use dioxus_rsx::hot_reload::*;
use interprocess::local_socket::LocalSocketListener;
use std::{
fs::create_dir_all,
process::{Child, Command},
sync::{Arc, RwLock},
};
#[cfg(feature = "plugin")]
use crate::plugin::PluginManager;
use super::HotReloadState;
pub fn startup(config: CrateConfig, serve: &ConfigOptsServe) -> Result<()> {
startup_with_platform::<DesktopPlatform>(config, serve)
}
pub(crate) fn startup_with_platform<P: Platform + Send + 'static>(
config: CrateConfig,
serve_cfg: &ConfigOptsServe,
) -> Result<()> {
set_ctrl_c(&config);
let file_map = match config.hot_reload {
true => {
let FileMapBuildResult { map, errors } =
FileMap::<HtmlCtx>::create(config.crate_dir.clone()).unwrap();
for err in errors {
tracing::error!("{}", err);
}
let file_map = Arc::new(Mutex::new(map));
Some(file_map.clone())
}
false => None,
};
let hot_reload_state = HotReloadState {
receiver: Default::default(),
file_map,
};
serve::<P>(config, serve_cfg, hot_reload_state)?;
Ok(())
}
fn set_ctrl_c(config: &CrateConfig) {
// ctrl-c shutdown checker
let _crate_config = config.clone();
let _ = ctrlc::set_handler(move || {
#[cfg(feature = "plugin")]
let _ = PluginManager::on_serve_shutdown(&_crate_config);
std::process::exit(0);
});
}
/// Start the server without hot reload
fn serve<P: Platform + Send + 'static>(
config: CrateConfig,
serve: &ConfigOptsServe,
hot_reload_state: HotReloadState,
) -> Result<()> {
let platform = RwLock::new(P::start(&config, serve, Vec::new())?);
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async move {
let hot_reload: tokio::task::JoinHandle<Result<()>> = tokio::spawn({
let hot_reload_state = hot_reload_state.clone();
async move {
match hot_reload_state.file_map.clone() {
Some(file_map) => {
// The open interprocess sockets
start_desktop_hot_reload(hot_reload_state, file_map).await?;
}
None => {
std::future::pending::<()>().await;
}
}
Ok(())
}
});
tracing::info!("🚀 Starting development server...");
// We got to own watcher so that it exists for the duration of serve
// Otherwise full reload won't work.
let _watcher = setup_file_watcher(
{
let config = config.clone();
let serve = serve.clone();
move || {
platform
.write()
.unwrap()
.rebuild(&config, &serve, Vec::new())
}
},
&config,
None,
hot_reload_state,
)
.await?;
hot_reload.await.unwrap()?;
Ok(())
})
}
async fn start_desktop_hot_reload(
hot_reload_state: HotReloadState,
file_map: SharedFileMap,
) -> Result<()> {
let metadata = cargo_metadata::MetadataCommand::new()
.no_deps()
.exec()
.unwrap();
let target_dir = metadata.target_directory.as_std_path();
let _ = create_dir_all(target_dir); // `_all` is for good measure and future-proofness.
let path = target_dir.join("dioxusin");
clear_paths(&path);
let listener = if cfg!(windows) {
LocalSocketListener::bind("@dioxusin")
} else {
LocalSocketListener::bind(path)
};
match listener {
Ok(local_socket_stream) => {
let aborted = Arc::new(Mutex::new(false));
// States
// The open interprocess sockets
let channels = Arc::new(Mutex::new(Vec::new()));
// listen for connections
std::thread::spawn({
let channels = channels.clone();
let aborted = aborted.clone();
move || {
loop {
//accept() will block the thread when local_socket_stream is in blocking mode (default)
match local_socket_stream.accept() {
Ok(mut connection) => {
// send any templates than have changed before the socket connected
let templates: Vec<_> = {
file_map
.lock()
.unwrap()
.map
.values()
.flat_map(|v| v.templates.values().copied())
.collect()
};
for template in templates {
if !send_msg(
HotReloadMsg::UpdateTemplate(template),
&mut connection,
) {
continue;
}
}
channels.lock().unwrap().push(connection);
println!("Connected to hot reloading 🚀");
}
Err(err) => {
let error_string = err.to_string();
// Filter out any error messages about a operation that may block and an error message that triggers on some operating systems that says "Waiting for a process to open the other end of the pipe" without WouldBlock being set
let display_error = err.kind() != std::io::ErrorKind::WouldBlock
&& !error_string.contains("Waiting for a process");
if display_error {
println!("Error connecting to hot reloading: {} (Hot reloading is a feature of the dioxus-cli. If you are not using the CLI, this error can be ignored)", err);
}
}
}
if *aborted.lock().unwrap() {
break;
}
}
}
});
let mut hot_reload_rx = hot_reload_state.receiver.subscribe();
while let Ok(msg) = hot_reload_rx.recv().await {
let channels = &mut *channels.lock().unwrap();
let mut i = 0;
while i < channels.len() {
let channel = &mut channels[i];
if send_msg(msg.clone(), channel) {
i += 1;
} else {
channels.remove(i);
}
}
}
}
Err(error) => println!("failed to connect to hot reloading\n{error}"),
}
Ok(())
}
fn clear_paths(file_socket_path: &std::path::Path) {
if cfg!(unix) {
// On unix, if you force quit the application, it can leave the file socket open
// This will cause the local socket listener to fail to open
// We check if the file socket is already open from an old session and then delete it
if file_socket_path.exists() {
let _ = std::fs::remove_file(file_socket_path);
}
}
}
fn send_msg(msg: HotReloadMsg, channel: &mut impl std::io::Write) -> bool {
if let Ok(msg) = serde_json::to_string(&msg) {
if channel.write_all(msg.as_bytes()).is_err() {
return false;
}
if channel.write_all(&[b'\n']).is_err() {
return false;
}
true
} else {
false
}
}
fn run_desktop(
args: &Vec<String>,
env: Vec<(String, String)>,
result: BuildResult,
) -> Result<(RAIIChild, BuildResult)> {
let active = "DIOXUS_ACTIVE";
let child = RAIIChild(
Command::new(
result
.executable
.clone()
.ok_or(anyhow::anyhow!("No executable found after desktop build"))?,
)
.args(args)
.env(active, "true")
.envs(env)
.spawn()?,
);
Ok((child, result))
}
pub(crate) struct DesktopPlatform {
args: Vec<String>,
currently_running_child: RAIIChild,
skip_assets: bool,
}
impl DesktopPlatform {
/// `rust_flags` argument is added because it is used by the
/// `DesktopPlatform`'s implementation of the `Platform::start()`.
pub fn start_with_options(
build_result: BuildResult,
config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<Self> {
let (child, first_build_result) = run_desktop(&serve.args, env, build_result)?;
tracing::info!("🚀 Starting development server...");
// Print serve info
print_console_info(
config,
PrettierOptions {
changed: vec![],
warnings: first_build_result.warnings,
elapsed_time: first_build_result.elapsed_time,
},
None,
);
Ok(Self {
args: serve.args.clone(),
currently_running_child: child,
skip_assets: serve.skip_assets,
})
}
/// `rust_flags` argument is added because it is used by the
/// `DesktopPlatform`'s implementation of the `Platform::rebuild()`.
pub fn rebuild_with_options(
&mut self,
config: &CrateConfig,
rust_flags: Option<String>,
env: Vec<(String, String)>,
) -> Result<BuildResult> {
// Gracefully shtudown the desktop app
// It might have a receiver to do some cleanup stuff
let pid = self.currently_running_child.0.id();
// on unix, we can send a signal to the process to shut down
#[cfg(unix)]
{
_ = Command::new("kill")
.args(["-s", "TERM", &pid.to_string()])
.spawn();
}
// on windows, use the `taskkill` command
#[cfg(windows)]
{
_ = Command::new("taskkill")
.args(["/F", "/PID", &pid.to_string()])
.spawn();
}
// Todo: add a timeout here to kill the process if it doesn't shut down within a reasonable time
self.currently_running_child.0.wait()?;
let build_result =
crate::builder::build_desktop(config, true, self.skip_assets, rust_flags)?;
let (child, result) = run_desktop(&self.args, env, build_result)?;
self.currently_running_child = child;
Ok(result)
}
}
impl Platform for DesktopPlatform {
fn start(
config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<Self> {
let build_result = crate::builder::build_desktop(config, true, serve.skip_assets, None)?;
DesktopPlatform::start_with_options(build_result, config, serve, env)
}
fn rebuild(
&mut self,
config: &CrateConfig,
_: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<BuildResult> {
// See `rebuild_with_options()`'s docs for the explanation why the code
// was moved there.
// Since desktop platform doesn't use `rust_flags`, this argument is
// explicitly set to `None`.
DesktopPlatform::rebuild_with_options(self, config, None, env)
}
}
struct RAIIChild(Child);
impl Drop for RAIIChild {
fn drop(&mut self) {
let _ = self.0.kill();
}
}

View file

@ -1,172 +0,0 @@
use dioxus_cli_config::CrateConfig;
use crate::{
cfg::{ConfigOptsBuild, ConfigOptsServe},
BuildResult, Result,
};
use super::{
desktop::{self, DesktopPlatform},
Platform,
};
static CLIENT_RUST_FLAGS: &str = "-C debuginfo=none -C strip=debuginfo";
// The `opt-level=2` increases build times, but can noticeably decrease time
// between saving changes and being able to interact with an app. The "overall"
// time difference (between having and not having the optimization) can be
// almost imperceptible (~1 s) but also can be very noticeable (~6 s) — depends
// on setup (hardware, OS, browser, idle load).
static SERVER_RUST_FLAGS: &str = "-C opt-level=2";
static DEBUG_RUST_FLAG: &str = "-C debug-assertions";
fn rust_flags(build: &ConfigOptsBuild, base_flags: &str) -> String {
let mut rust_flags = base_flags.to_string();
if !build.release {
rust_flags += " ";
rust_flags += DEBUG_RUST_FLAG;
};
rust_flags
}
pub fn client_rust_flags(build: &ConfigOptsBuild) -> String {
rust_flags(build, CLIENT_RUST_FLAGS)
}
pub fn server_rust_flags(build: &ConfigOptsBuild) -> String {
rust_flags(build, SERVER_RUST_FLAGS)
}
pub fn startup(config: CrateConfig, serve: &ConfigOptsServe) -> Result<()> {
desktop::startup_with_platform::<FullstackPlatform>(config, serve)
}
fn start_web_build_thread(
config: &CrateConfig,
serve: &ConfigOptsServe,
) -> std::thread::JoinHandle<Result<()>> {
let serve = serve.clone();
let target_directory = config.client_target_dir();
std::fs::create_dir_all(&target_directory).unwrap();
std::thread::spawn(move || build_web(serve, &target_directory))
}
fn make_desktop_config(config: &CrateConfig, serve: &ConfigOptsServe) -> CrateConfig {
let mut desktop_config = config.clone();
if !serve.force_sequential {
desktop_config.target_dir = config.server_target_dir();
}
let desktop_feature = serve.server_feature.clone();
let features = &mut desktop_config.features;
match features {
Some(features) => {
features.push(desktop_feature);
}
None => desktop_config.features = Some(vec![desktop_feature]),
};
desktop_config
}
fn add_serve_options_to_env(serve: &ConfigOptsServe, env: &mut Vec<(String, String)>) {
env.push((
dioxus_cli_config::__private::SERVE_ENV.to_string(),
serde_json::to_string(&serve.server_arguments).unwrap(),
));
}
struct FullstackPlatform {
serve: ConfigOptsServe,
desktop: desktop::DesktopPlatform,
server_rust_flags: String,
}
impl Platform for FullstackPlatform {
fn start(
config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<Self>
where
Self: Sized,
{
let thread_handle = start_web_build_thread(config, serve);
let desktop_config = make_desktop_config(config, serve);
let server_rust_flags = server_rust_flags(&serve.clone().into());
let mut desktop_env = env.clone();
add_serve_options_to_env(serve, &mut desktop_env);
let build_result = crate::builder::build_desktop(
&desktop_config,
true,
serve.skip_assets,
Some(server_rust_flags.clone()),
)?;
thread_handle
.join()
.map_err(|_| anyhow::anyhow!("Failed to join thread"))??;
// Only start the server after the web build is finished
let desktop =
DesktopPlatform::start_with_options(build_result, &desktop_config, serve, desktop_env)?;
if serve.open.unwrap_or_default() {
crate::server::web::open_browser(
config,
serve
.server_arguments
.addr
.unwrap_or(std::net::IpAddr::V4(std::net::Ipv4Addr::new(0, 0, 0, 0))),
serve.server_arguments.port,
false,
);
}
Ok(Self {
desktop,
serve: serve.clone(),
server_rust_flags,
})
}
fn rebuild(
&mut self,
crate_config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<BuildResult> {
let thread_handle = start_web_build_thread(crate_config, &self.serve);
let desktop_config = make_desktop_config(crate_config, &self.serve);
let mut desktop_env = env.clone();
add_serve_options_to_env(serve, &mut desktop_env);
let result = self.desktop.rebuild_with_options(
&desktop_config,
Some(self.server_rust_flags.clone()),
desktop_env,
);
thread_handle
.join()
.map_err(|_| anyhow::anyhow!("Failed to join thread"))??;
result
}
}
fn build_web(serve: ConfigOptsServe, target_directory: &std::path::Path) -> Result<()> {
let mut web_config: ConfigOptsBuild = serve.into();
let web_feature = web_config.client_feature.clone();
let features = &mut web_config.features;
match features {
Some(features) => {
features.push(web_feature);
}
None => web_config.features = Some(vec![web_feature]),
};
web_config.platform = Some(dioxus_cli_config::Platform::Web);
crate::cli::build::Build {
build: web_config.clone(),
}
.build(
None,
(!web_config.force_sequential).then_some(target_directory),
Some(client_rust_flags(&web_config)),
)
}

View file

@ -1,361 +0,0 @@
use crate::{cfg::ConfigOptsServe, BuildResult, Result};
use dioxus_cli_config::CrateConfig;
use cargo_metadata::diagnostic::Diagnostic;
use dioxus_hot_reload::{HotReloadMsg, HotReloadReceiver};
use dioxus_html::HtmlCtx;
use dioxus_rsx::hot_reload::*;
use fs_extra::dir::CopyOptions;
use notify::{RecommendedWatcher, Watcher};
use std::{path::PathBuf, sync::Arc};
mod output;
use output::*;
pub mod desktop;
pub mod fullstack;
pub mod web;
#[derive(Clone)]
pub struct HotReloadState {
/// The receiver for hot reload messages
pub receiver: HotReloadReceiver,
/// The file map that tracks the state of the projecta
pub file_map: Option<SharedFileMap>,
}
type SharedFileMap = Arc<Mutex<FileMap<HtmlCtx>>>;
/// Sets up a file watcher.
///
/// Will attempt to hotreload HTML, RSX (.rs), and CSS
async fn setup_file_watcher<F: Fn() -> Result<BuildResult> + Send + 'static>(
build_with: F,
config: &CrateConfig,
web_info: Option<WebServerInfo>,
hot_reload: HotReloadState,
) -> Result<RecommendedWatcher> {
let mut last_update_time = chrono::Local::now().timestamp();
// file watcher: check file change
let mut allow_watch_path = config.dioxus_config.web.watcher.watch_path.clone();
// Extend the watch path to include the assets directory - this is so we can hotreload CSS and other assets by default
allow_watch_path.push(config.dioxus_config.application.asset_dir.clone());
// Extend the watch path to include Cargo.toml and Dioxus.toml
allow_watch_path.push("Cargo.toml".to_string().into());
allow_watch_path.push("Dioxus.toml".to_string().into());
allow_watch_path.dedup();
// Create the file watcher
let mut watcher = notify::recommended_watcher({
let watcher_config = config.clone();
move |info: notify::Result<notify::Event>| {
let Ok(e) = info else {
return;
};
watch_event(
e,
&mut last_update_time,
&hot_reload,
&watcher_config,
&build_with,
&web_info,
);
}
})
.expect("Failed to create file watcher - please ensure you have the required permissions to watch the specified directories.");
// Watch the specified paths
for sub_path in allow_watch_path {
let path = &config.crate_dir.join(sub_path);
let mode = notify::RecursiveMode::Recursive;
if let Err(err) = watcher.watch(path, mode) {
tracing::warn!("Failed to watch path: {}", err);
}
}
Ok(watcher)
}
fn watch_event<F>(
event: notify::Event,
last_update_time: &mut i64,
hot_reload: &HotReloadState,
config: &CrateConfig,
build_with: &F,
web_info: &Option<WebServerInfo>,
) where
F: Fn() -> Result<BuildResult> + Send + 'static,
{
// Ensure that we're tracking only modifications
if !matches!(
event.kind,
notify::EventKind::Create(_) | notify::EventKind::Remove(_) | notify::EventKind::Modify(_)
) {
return;
}
// Ensure that we're not rebuilding too frequently
if chrono::Local::now().timestamp() <= *last_update_time {
return;
}
// By default we want to not do a full rebuild, and instead let the hot reload system invalidate it
let mut needs_full_rebuild = false;
if let Some(file_map) = &hot_reload.file_map {
hotreload_files(
hot_reload,
file_map,
&mut needs_full_rebuild,
&event,
config,
);
}
if needs_full_rebuild {
full_rebuild(build_with, last_update_time, config, event, web_info);
}
}
fn full_rebuild<F>(
build_with: &F,
last_update_time: &mut i64,
config: &CrateConfig,
event: notify::Event,
web_info: &Option<WebServerInfo>,
) where
F: Fn() -> Result<BuildResult> + Send + 'static,
{
match build_with() {
Ok(res) => {
*last_update_time = chrono::Local::now().timestamp();
#[allow(clippy::redundant_clone)]
print_console_info(
config,
PrettierOptions {
changed: event.paths.clone(),
warnings: res.warnings,
elapsed_time: res.elapsed_time,
},
web_info.clone(),
);
}
Err(e) => {
*last_update_time = chrono::Local::now().timestamp();
tracing::error!("{:?}", e);
}
}
}
fn hotreload_files(
hot_reload: &HotReloadState,
file_map: &SharedFileMap,
needs_full_rebuild: &mut bool,
event: &notify::Event,
config: &CrateConfig,
) {
// find changes to the rsx in the file
let mut rsx_file_map = file_map.lock().unwrap();
let mut messages: Vec<HotReloadMsg> = Vec::new();
for path in &event.paths {
// Attempt to hotreload this file
let is_potentially_reloadable = hotreload_file(
path,
config,
&rsx_file_map,
&mut messages,
needs_full_rebuild,
);
// If the file was not hotreloaded, continue
if is_potentially_reloadable.is_none() {
continue;
}
// If the file was hotreloaded, update the file map in place
match rsx_file_map.update_rsx(path, &config.crate_dir) {
Ok(UpdateResult::UpdatedRsx(msgs)) => {
messages.extend(msgs.into_iter().map(HotReloadMsg::UpdateTemplate));
}
// If the file was not updated, we need to do a full rebuild
Ok(UpdateResult::NeedsRebuild) => {
tracing::trace!("Needs full rebuild because file changed: {:?}", path);
*needs_full_rebuild = true;
}
// Not necessarily a fatal error, but we should log it
Err(err) => tracing::error!("{}", err),
}
}
// If full rebuild, extend the file map with the new file map
// This will wipe away any previous cached changed templates
if *needs_full_rebuild {
// Reset the file map to the new state of the project
let FileMapBuildResult {
map: new_file_map,
errors,
} = FileMap::<HtmlCtx>::create(config.crate_dir.clone()).unwrap();
for err in errors {
tracing::error!("{}", err);
}
*rsx_file_map = new_file_map;
return;
}
for msg in messages {
hot_reload.receiver.send_message(msg);
}
}
fn hotreload_file(
path: &Path,
config: &CrateConfig,
rsx_file_map: &std::sync::MutexGuard<'_, FileMap<HtmlCtx>>,
messages: &mut Vec<HotReloadMsg>,
needs_full_rebuild: &mut bool,
) -> Option<()> {
// for various assets that might be linked in, we just try to hotreloading them forcefully
// That is, unless they appear in an include! macro, in which case we need to a full rebuild....
let ext = path.extension().and_then(|v| v.to_str())?;
// Workaround for notify and vscode-like editor:
// when edit & save a file in vscode, there will be two notifications,
// the first one is a file with empty content.
// filter the empty file notification to avoid false rebuild during hot-reload
if let Ok(metadata) = fs::metadata(path) {
if metadata.len() == 0 {
return None;
}
}
// If the extension is a backup file, or a hidden file, ignore it completely (no rebuilds)
if is_backup_file(path) {
tracing::trace!("Ignoring backup file: {:?}", path);
return None;
}
// Attempt to hotreload css in the asset directory
// Currently no other assets are hotreloaded, but in theory we could hotreload pngs/jpegs, etc
//
// All potential hotreloadable mime types:
// "bin" |"css" | "csv" | "html" | "ico" | "js" | "json" | "jsonld" | "mjs" | "rtf" | "svg" | "mp4"
if ext == "css" {
let asset_dir = config
.crate_dir
.join(&config.dioxus_config.application.asset_dir);
// Only if the CSS is in the asset directory, and we're tracking it, do we hotreload it
// Otherwise, we need to do a full rebuild since the user might be doing an include_str! on it
if attempt_css_reload(path, asset_dir, rsx_file_map, config, messages).is_none() {
*needs_full_rebuild = true;
}
return None;
}
// If the file is not rsx or css and we've already not needed a full rebuild, return
if ext != "rs" && ext != "css" {
*needs_full_rebuild = true;
return None;
}
Some(())
}
fn attempt_css_reload(
path: &Path,
asset_dir: PathBuf,
rsx_file_map: &std::sync::MutexGuard<'_, FileMap<HtmlCtx>>,
config: &CrateConfig,
messages: &mut Vec<HotReloadMsg>,
) -> Option<()> {
// If the path is not in the asset directory, return
if !path.starts_with(asset_dir) {
return None;
}
// Get the local path of the asset (ie var.css or some_dir/var.css as long as the dir is under the asset dir)
let local_path = local_path_of_asset(path)?;
// Make sure we're actually tracking this asset...
_ = rsx_file_map.is_tracking_asset(&local_path)?;
// copy the asset over to the output directory
// todo this whole css hotreloading should be less hacky and more robust
_ = fs_extra::copy_items(
&[path],
config.out_dir(),
&CopyOptions::new().overwrite(true),
);
messages.push(HotReloadMsg::UpdateAsset(local_path));
Some(())
}
fn local_path_of_asset(path: &Path) -> Option<PathBuf> {
path.file_name()?.to_str()?.to_string().parse().ok()
}
pub(crate) trait Platform {
fn start(
config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<Self>
where
Self: Sized;
fn rebuild(
&mut self,
config: &CrateConfig,
serve: &ConfigOptsServe,
env: Vec<(String, String)>,
) -> Result<BuildResult>;
}
fn is_backup_file(path: &Path) -> bool {
// If there's a tilde at the end of the file, it's a backup file
if let Some(name) = path.file_name() {
if let Some(name) = name.to_str() {
if name.ends_with('~') {
return true;
}
}
}
// if the file is hidden, it's a backup file
if let Some(name) = path.file_name() {
if let Some(name) = name.to_str() {
if name.starts_with('.') {
return true;
}
}
}
false
}
#[test]
fn test_is_backup_file() {
assert!(is_backup_file(&PathBuf::from("examples/test.rs~")));
assert!(is_backup_file(&PathBuf::from("examples/.back")));
assert!(is_backup_file(&PathBuf::from("test.rs~")));
assert!(is_backup_file(&PathBuf::from(".back")));
assert!(!is_backup_file(&PathBuf::from("val.rs")));
assert!(!is_backup_file(&PathBuf::from(
"/Users/jonkelley/Development/Tinkering/basic_05_example/src/lib.rs"
)));
assert!(!is_backup_file(&PathBuf::from("exmaples/val.rs")));
}

View file

@ -1,157 +0,0 @@
use crate::server::Diagnostic;
use colored::Colorize;
use dioxus_cli_config::{crate_root, CrateConfig};
use std::{net::IpAddr, path::PathBuf, process::Command};
#[derive(Debug, Default)]
pub struct PrettierOptions {
pub changed: Vec<PathBuf>,
pub warnings: Vec<Diagnostic>,
pub elapsed_time: u128,
}
#[derive(Debug, Clone)]
pub struct WebServerInfo {
pub ip: IpAddr,
pub port: u16,
}
pub fn print_console_info(
config: &CrateConfig,
options: PrettierOptions,
web_info: Option<WebServerInfo>,
) {
// Don't clear the screen if the user has set the DIOXUS_LOG environment variable to "trace" so that we can see the logs
if Some("trace") != std::env::var("DIOXUS_LOG").ok().as_deref() {
if let Ok(native_clearseq) = Command::new(if cfg!(target_os = "windows") {
"cls"
} else {
"clear"
})
.output()
{
print!("{}", String::from_utf8_lossy(&native_clearseq.stdout));
} else {
// Try ANSI-Escape characters
print!("\x1b[2J\x1b[H");
}
}
let mut profile = if config.release { "Release" } else { "Debug" }.to_string();
if config.custom_profile.is_some() {
profile = config.custom_profile.as_ref().unwrap().to_string();
}
let hot_reload = if config.hot_reload { "RSX" } else { "Normal" };
let crate_root = crate_root().unwrap();
let custom_html_file = if crate_root.join("index.html").is_file() {
"Custom [index.html]"
} else {
"None"
};
let url_rewrite = if config.dioxus_config.web.watcher.index_on_404 {
"True"
} else {
"False"
};
let proxies = &config.dioxus_config.web.proxy;
if options.changed.is_empty() {
println!(
"{} @ v{} [{}]",
"Dioxus".bold().green(),
clap::crate_version!(),
chrono::Local::now().format("%H:%M:%S").to_string().dimmed()
);
} else {
println!(
"Project Reloaded: {}\n",
format!(
"Changed {} files. [{}]",
options.changed.len(),
chrono::Local::now().format("%H:%M:%S").to_string().dimmed()
)
.purple()
.bold()
);
}
if let Some(WebServerInfo { ip, port }) = web_info {
let https = config.dioxus_config.web.https.enabled == Some(true);
let prefix = if https { "https://" } else { "http://" };
println!(
" > Local address: {}",
format!("{prefix}localhost:{}/", port).blue()
);
println!(
" > Network address: {}",
format!("{prefix}{}:{}/", ip, port).blue()
);
println!(
" > HTTPS: {}",
if https {
"Enabled".to_string().green()
} else {
"Disabled".to_string().red()
}
);
}
println!();
println!(" > Hot Reload Mode: {}", hot_reload.cyan());
println!(
" > Watching: [ {} ]",
config
.dioxus_config
.web
.watcher
.watch_path
.iter()
.cloned()
.chain(["Cargo.toml", "Dioxus.toml"].iter().map(PathBuf::from))
.map(|f| f.display().to_string())
.collect::<Vec<String>>()
.join(", ")
.cyan()
);
if !proxies.is_empty() {
println!(" > Proxies :");
for proxy in proxies {
println!(" - {}", proxy.backend.blue());
}
}
println!(" > Custom index.html: {}", custom_html_file.green());
println!(" > Serve index.html on 404: {}", url_rewrite.purple());
println!();
println!(
" > Build Features: [ {} ]",
config
.features
.clone()
.unwrap_or_default()
.join(", ")
.green()
);
println!(" > Build Profile: {}", profile.green());
println!(
" > Build took: {} millis",
options.elapsed_time.to_string().green().bold()
);
println!();
if options.warnings.is_empty() {
tracing::info!("{}\n", "A perfect compilation!".green().bold());
} else {
tracing::warn!(
"{}",
format!(
"There were {} warning messages during the build. Run `cargo check` to see them.",
options.warnings.len() - 1
)
.yellow()
.bold()
);
}
}

View file

@ -1,219 +0,0 @@
use crate::{
builder,
cfg::ConfigOptsServe,
serve::Serve,
server::{
output::{print_console_info, PrettierOptions, WebServerInfo},
setup_file_watcher,
},
BuildResult, Result,
};
use dioxus_cli_config::CrateConfig;
use dioxus_rsx::hot_reload::*;
use std::{
net::{IpAddr, SocketAddr, UdpSocket},
sync::Arc,
};
mod proxy;
mod server;
use server::*;
use super::HotReloadState;
pub fn startup(config: CrateConfig, serve_cfg: &ConfigOptsServe) -> Result<()> {
set_ctrlc_handler(&config);
let ip = serve_cfg
.server_arguments
.addr
.or_else(get_ip)
.unwrap_or(IpAddr::V4(std::net::Ipv4Addr::new(0, 0, 0, 0)));
let hot_reload_state = build_hotreload_filemap(&config);
serve(ip, config, hot_reload_state, serve_cfg)
}
/// Start the server without hot reload
pub fn serve(
ip: IpAddr,
config: CrateConfig,
hot_reload_state: HotReloadState,
opts: &ConfigOptsServe,
) -> Result<()> {
let skip_assets = opts.skip_assets;
let port = opts.server_arguments.port;
// Since web platform doesn't use `rust_flags`, this argument is explicitly
// set to `None`.
let first_build_result = crate::builder::build_web(&config, skip_assets, None)?;
// generate dev-index page
Serve::regen_dev_page(&config, first_build_result.assets.as_ref())?;
tracing::info!("🚀 Starting development server...");
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async move {
// We got to own watcher so that it exists for the duration of serve
// Otherwise full reload won't work.
let _watcher = setup_file_watcher(
{
let config = config.clone();
let hot_reload_state = hot_reload_state.clone();
move || build(&config, &hot_reload_state, skip_assets)
},
&config,
Some(WebServerInfo { ip, port }),
hot_reload_state.clone(),
)
.await?;
// HTTPS
// Before console info so it can stop if mkcert isn't installed or fails
let rustls_config = get_rustls(&config).await?;
// Print serve info
print_console_info(
&config,
PrettierOptions {
changed: vec![],
warnings: first_build_result.warnings,
elapsed_time: first_build_result.elapsed_time,
},
Some(WebServerInfo { ip, port }),
);
// Router
let router = setup_router(config.clone(), hot_reload_state).await?;
// Start server
start_server(
ip,
port,
router,
opts.open.unwrap_or_default(),
rustls_config,
&config,
)
.await?;
Ok(())
})
}
/// Starts dx serve with no hot reload
async fn start_server(
ip: IpAddr,
port: u16,
router: axum::Router,
start_browser: bool,
rustls: Option<axum_server::tls_rustls::RustlsConfig>,
config: &CrateConfig,
) -> Result<()> {
// If plugins, call on_serve_start event
#[cfg(feature = "plugin")]
crate::plugin::PluginManager::on_serve_start(config)?;
let addr: SocketAddr = SocketAddr::from((ip, port));
// Open the browser
if start_browser {
open_browser(config, ip, port, rustls.is_some());
}
let svc = router.into_make_service();
// Start the server with or without rustls
match rustls {
Some(rustls) => axum_server::bind_rustls(addr, rustls).serve(svc).await?,
None => {
// Create a TCP listener bound to the address
let listener = tokio::net::TcpListener::bind(&addr).await?;
axum::serve(listener, svc).await?
}
}
Ok(())
}
/// Open the browser to the address
pub(crate) fn open_browser(config: &CrateConfig, ip: IpAddr, port: u16, https: bool) {
let protocol = if https { "https" } else { "http" };
let base_path = match config.dioxus_config.web.app.base_path.as_deref() {
Some(base_path) => format!("/{}", base_path.trim_matches('/')),
None => "".to_owned(),
};
_ = open::that(format!("{protocol}://{ip}:{port}{base_path}"));
}
/// Get the network ip
fn get_ip() -> Option<IpAddr> {
let socket = match UdpSocket::bind("0.0.0.0:0") {
Ok(s) => s,
Err(_) => return None,
};
match socket.connect("8.8.8.8:80") {
Ok(()) => (),
Err(_) => return None,
};
match socket.local_addr() {
Ok(addr) => Some(addr.ip()),
Err(_) => None,
}
}
fn build(
config: &CrateConfig,
hot_reload_state: &HotReloadState,
skip_assets: bool,
) -> Result<BuildResult> {
// Since web platform doesn't use `rust_flags`, this argument is explicitly
// set to `None`.
let result = std::panic::catch_unwind(|| builder::build_web(config, skip_assets, None))
.map_err(|e| anyhow::anyhow!("Build failed: {e:?}"))?;
// change the websocket reload state to true;
// the page will auto-reload.
if config.dioxus_config.web.watcher.reload_html {
if let Ok(assets) = result.as_ref().map(|x| x.assets.as_ref()) {
let _ = Serve::regen_dev_page(config, assets);
}
}
hot_reload_state.receiver.reload();
result
}
fn set_ctrlc_handler(config: &CrateConfig) {
// ctrl-c shutdown checker
let _crate_config = config.clone();
let _ = ctrlc::set_handler(move || {
#[cfg(feature = "plugin")]
let _ = crate::plugin::PluginManager::on_serve_shutdown(&_crate_config);
std::process::exit(0);
});
}
fn build_hotreload_filemap(config: &CrateConfig) -> HotReloadState {
HotReloadState {
file_map: config.hot_reload.then(|| {
let FileMapBuildResult { map, errors } =
FileMap::create(config.crate_dir.clone()).unwrap();
for err in errors {
tracing::error!("{}", err);
}
Arc::new(Mutex::new(map))
}),
receiver: Default::default(),
}
}

View file

@ -1,206 +0,0 @@
use crate::{server::HotReloadState, Result};
use axum::{
body::Body,
extract::Extension,
http::{
self,
header::{HeaderName, HeaderValue},
Method, Response, StatusCode,
},
response::IntoResponse,
routing::{get, get_service},
Router,
};
use axum_server::tls_rustls::RustlsConfig;
use dioxus_cli_config::{CrateConfig, WebHttpsConfig};
use dioxus_hot_reload::HotReloadRouterExt;
use std::{fs, io, process::Command};
use tower::ServiceBuilder;
use tower_http::{
cors::{Any, CorsLayer},
services::fs::{ServeDir, ServeFileSystemResponseBody},
ServiceBuilderExt,
};
/// Sets up and returns a router
pub async fn setup_router(config: CrateConfig, hot_reload: HotReloadState) -> Result<Router> {
// Setup cors
let cors = CorsLayer::new()
// allow `GET` and `POST` when accessing the resource
.allow_methods([Method::GET, Method::POST])
// allow requests from any origin
.allow_origin(Any)
.allow_headers(Any);
let (coep, coop) = if config.cross_origin_policy {
(
HeaderValue::from_static("require-corp"),
HeaderValue::from_static("same-origin"),
)
} else {
(
HeaderValue::from_static("unsafe-none"),
HeaderValue::from_static("unsafe-none"),
)
};
// Create file service
let file_service_config = config.clone();
let file_service = ServiceBuilder::new()
.override_response_header(
HeaderName::from_static("cross-origin-embedder-policy"),
coep,
)
.override_response_header(HeaderName::from_static("cross-origin-opener-policy"), coop)
.and_then(move |response| async move { Ok(no_cache(file_service_config, response)) })
.service(ServeDir::new(config.out_dir()));
// Setup router
let mut router = Router::new();
// Setup proxy
for proxy_config in config.dioxus_config.web.proxy {
router = super::proxy::add_proxy(router, &proxy_config)?;
}
// Route file service
router = router.fallback(get_service(file_service).handle_error(
|error: std::convert::Infallible| async move {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Unhandled internal error: {}", error),
)
},
));
router = if let Some(base_path) = config.dioxus_config.web.app.base_path.clone() {
let base_path = format!("/{}", base_path.trim_matches('/'));
Router::new()
.nest(&base_path, router)
.fallback(get(move || {
let base_path = base_path.clone();
async move { format!("Outside of the base path: {}", base_path) }
}))
} else {
router
};
// Setup websocket
router = router.connect_hot_reload();
// Setup routes
router = router
.layer(cors)
.layer(Extension(hot_reload.receiver.clone()));
Ok(router)
}
fn no_cache(
file_service_config: CrateConfig,
response: Response<ServeFileSystemResponseBody>,
) -> Response<Body> {
let mut response = if file_service_config.dioxus_config.web.watcher.index_on_404
&& response.status() == StatusCode::NOT_FOUND
{
let body = Body::from(
// TODO: Cache/memoize this.
std::fs::read_to_string(file_service_config.out_dir().join("index.html"))
.ok()
.unwrap(),
);
Response::builder()
.status(StatusCode::OK)
.body(body)
.unwrap()
} else {
response.into_response()
};
let headers = response.headers_mut();
headers.insert(
http::header::CACHE_CONTROL,
HeaderValue::from_static("no-cache"),
);
headers.insert(http::header::PRAGMA, HeaderValue::from_static("no-cache"));
headers.insert(http::header::EXPIRES, HeaderValue::from_static("0"));
response
}
const DEFAULT_KEY_PATH: &str = "ssl/key.pem";
const DEFAULT_CERT_PATH: &str = "ssl/cert.pem";
/// Returns an enum of rustls config and a bool if mkcert isn't installed
pub async fn get_rustls(config: &CrateConfig) -> Result<Option<RustlsConfig>> {
let web_config = &config.dioxus_config.web.https;
if web_config.enabled != Some(true) {
return Ok(None);
}
let (cert_path, key_path) = if let Some(true) = web_config.mkcert {
// mkcert, use it
get_rustls_with_mkcert(web_config)?
} else {
// if mkcert not specified or false, don't use it
get_rustls_without_mkcert(web_config)?
};
Ok(Some(
RustlsConfig::from_pem_file(cert_path, key_path).await?,
))
}
pub fn get_rustls_with_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
// Get paths to store certs, otherwise use ssl/item.pem
let key_path = web_config
.key_path
.clone()
.unwrap_or(DEFAULT_KEY_PATH.to_string());
let cert_path = web_config
.cert_path
.clone()
.unwrap_or(DEFAULT_CERT_PATH.to_string());
// Create ssl directory if using defaults
if key_path == DEFAULT_KEY_PATH && cert_path == DEFAULT_CERT_PATH {
_ = fs::create_dir("ssl");
}
let cmd = Command::new("mkcert")
.args([
"-install",
"-key-file",
&key_path,
"-cert-file",
&cert_path,
"localhost",
"::1",
"127.0.0.1",
])
.spawn();
match cmd {
Err(e) => {
match e.kind() {
io::ErrorKind::NotFound => tracing::error!("mkcert is not installed. See https://github.com/FiloSottile/mkcert#installation for installation instructions."),
e => tracing::error!("an error occurred while generating mkcert certificates: {}", e.to_string()),
};
return Err("failed to generate mkcert certificates".into());
}
Ok(mut cmd) => {
cmd.wait()?;
}
}
Ok((cert_path, key_path))
}
pub fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
// get paths to cert & key
if let (Some(key), Some(cert)) = (web_config.key_path.clone(), web_config.cert_path.clone()) {
Ok((cert, key))
} else {
// missing cert or key
Err("https is enabled but cert or key path is missing".into())
}
}

View file

@ -1,4 +1,3 @@
use crate::CrateConfigError;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
fs, fs,
@ -7,6 +6,8 @@ use std::{
}; };
use tracing::{debug, error, warn}; use tracing::{debug, error, warn};
use crate::CrateConfigError;
const GLOBAL_SETTINGS_FILE_NAME: &str = "dioxus/settings.toml"; const GLOBAL_SETTINGS_FILE_NAME: &str = "dioxus/settings.toml";
/// Describes cli settings from project or global level. /// Describes cli settings from project or global level.
@ -25,6 +26,11 @@ pub struct CliSettings {
} }
impl CliSettings { impl CliSettings {
/// Load the settings from the local, global, or default config in that order
pub fn load() -> Self {
Self::from_global().unwrap_or_default()
}
/// Get the current settings structure from global. /// Get the current settings structure from global.
pub fn from_global() -> Option<Self> { pub fn from_global() -> Option<Self> {
let Some(path) = dirs::data_local_dir() else { let Some(path) = dirs::data_local_dir() else {
@ -84,4 +90,13 @@ impl CliSettings {
Some(path.join(GLOBAL_SETTINGS_FILE_NAME)) Some(path.join(GLOBAL_SETTINGS_FILE_NAME))
} }
/// Modify the settings toml file
pub fn modify_settings(with: impl FnOnce(&mut CliSettings)) -> Result<(), CrateConfigError> {
let mut settings = Self::load();
with(&mut settings);
settings.save()?;
Ok(())
}
} }

View file

@ -17,12 +17,13 @@ proc-macro2 = { version = "1.0" }
quote = { workspace = true } quote = { workspace = true }
syn = { workspace = true, features = ["full", "extra-traits", "visit"] } syn = { workspace = true, features = ["full", "extra-traits", "visit"] }
dioxus-rsx = { workspace = true } dioxus-rsx = { workspace = true }
convert_case = "^0.6.0" convert_case = { workspace = true }
prettyplease = "0.2.15" prettyplease = "0.2.15"
# testing # testing
[dev-dependencies] [dev-dependencies]
dioxus = { workspace = true } dioxus = { workspace = true }
dioxus-html = { workspace = true, features = ["serialize"]}
rustversion = "1.0" rustversion = "1.0"
tokio = { workspace = true, features = ["full", "time"] } tokio = { workspace = true, features = ["full", "time"] }
trybuild = "1.0" trybuild = "1.0"

View file

@ -1,5 +1,6 @@
error: missing trailing comma error: Attributes must be separated by commas
--> tests/rsx/trailing-comma-0.rs:9:20 = help: Did you forget a comma?
--> tests/rsx/trailing-comma-0.rs:9:13
| |
9 | class: "foo bar" 9 | class: "foo bar"
| ^^^^^^^^^ | ^^^^^

View file

@ -103,64 +103,17 @@ impl VirtualDom {
to: &mut impl WriteMutations, to: &mut impl WriteMutations,
mut template: Template, mut template: Template,
) { ) {
// In debug mode, we check the more complete hashmap by byte index if self.templates.contains_key(template.name) {
#[cfg(debug_assertions)]
{
let (path, byte_index) = template.name.rsplit_once(':').unwrap();
let byte_index = byte_index.parse::<usize>().unwrap();
let mut entry = self.templates.entry(path);
// If we've already seen this template, just return
if let std::collections::hash_map::Entry::Occupied(occupied) = &entry {
if occupied.get().contains_key(&byte_index) {
return;
}
}
// Otherwise, insert it and register it
entry.or_default().insert(byte_index, template);
}
// In release mode, everything is built into the &'static str
#[cfg(not(debug_assertions))]
if !self.templates.insert(template.name) {
return; return;
} }
_ = self.templates.insert(template.name, template);
// If it's all dynamic nodes, then we don't need to register it // If it's all dynamic nodes, then we don't need to register it
if !template.is_completely_dynamic() { if !template.is_completely_dynamic() {
to.register_template(template) to.register_template(template)
} }
} }
#[cfg(debug_assertions)]
/// Insert a new template into the VirtualDom's template registry
pub(crate) fn register_template_first_byte_index(&mut self, mut template: Template) {
// First, make sure we mark the template as seen, regardless if we process it
let (path, _) = template.name.rsplit_once(':').unwrap();
if let Some((_, old_template)) = self
.templates
.entry(path)
.or_default()
.iter_mut()
.min_by_key(|(byte_index, _)| **byte_index)
{
// the byte index of the hot reloaded template could be different
template.name = old_template.name;
*old_template = template;
} else {
// This is a template without any current instances
self.templates
.entry(path)
.or_default()
.insert(usize::MAX, template);
}
// If it's all dynamic nodes, then we don't need to register it
if !template.is_completely_dynamic() {
self.queued_templates.push(template);
}
}
} }
/// We can apply various optimizations to dynamic nodes that are the single child of their parent. /// We can apply various optimizations to dynamic nodes that are the single child of their parent.

View file

@ -24,17 +24,14 @@ impl VNode {
// If hot reloading is enabled, we need to make sure we're using the latest template // If hot reloading is enabled, we need to make sure we're using the latest template
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
let (path, byte_index) = new.template.get().name.rsplit_once(':').unwrap(); let name = new.template.get().name;
if let Some(map) = dom.templates.get(path) { if let Some(template) = dom.templates.get(name).cloned() {
let byte_index = byte_index.parse::<usize>().unwrap(); new.template.set(template);
if let Some(&template) = map.get(&byte_index) { if template != self.template.get() {
new.template.set(template); let mount_id = self.mount.get();
if template != self.template.get() { let parent = dom.mounts[mount_id.0].parent;
let mount_id = self.mount.get(); self.replace(std::slice::from_ref(new), parent, dom, to);
let parent = dom.mounts[mount_id.0].parent; return;
self.replace(std::slice::from_ref(new), parent, dom, to);
return;
}
} }
} }
} }
@ -576,12 +573,7 @@ impl VNode {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
let template = self.template.get(); let template = self.template.get();
let (path, byte_index) = template.name.rsplit_once(':').unwrap(); if let Some(new_template) = dom.templates.get(template.name) {
if let Some(new_template) = dom
.templates
.get(path)
.and_then(|map| map.get(&byte_index.parse().unwrap()))
{
self.template.set(*new_template); self.template.set(*new_template);
} }
}; };
@ -625,6 +617,7 @@ impl VNode {
// Walk the roots, creating nodes and assigning IDs // Walk the roots, creating nodes and assigning IDs
// nodes in an iterator of (dynamic_node_index, path) // nodes in an iterator of (dynamic_node_index, path)
let nodes_sorted = template.breadth_first_node_paths(); let nodes_sorted = template.breadth_first_node_paths();
let attrs_sorted = template.breadth_first_attribute_paths(); let attrs_sorted = template.breadth_first_attribute_paths();
@ -811,6 +804,7 @@ impl VNode {
dynamic_attrbiutes_iter.next_if(from_root_node) dynamic_attrbiutes_iter.next_if(from_root_node)
{ {
let attribute = &self.dynamic_attrs[attribute_idx]; let attribute = &self.dynamic_attrs[attribute_idx];
let id = match last_path { let id = match last_path {
// If the last path was exactly the same, we can reuse the id // If the last path was exactly the same, we can reuse the id
Some((path, id)) if path == attribute_path => id, Some((path, id)) if path == attribute_path => id,

View file

@ -0,0 +1,73 @@
#[doc(hidden)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serialize", serde(bound(deserialize = "'de: 'static")))]
#[derive(Debug, PartialEq, Clone)]
pub struct HotreloadedLiteral {
pub name: String,
pub value: HotReloadLiteral,
}
#[doc(hidden)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serialize", serde(bound(deserialize = "'de: 'static")))]
#[derive(Debug, PartialEq, Clone)]
pub enum HotReloadLiteral {
Fmted(FmtedSegments),
Float(f64),
Int(i64),
Bool(bool),
}
#[doc(hidden)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serialize", serde(bound(deserialize = "'de: 'static")))]
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct FmtedSegments {
pub segments: Vec<FmtSegment>,
}
impl FmtedSegments {
pub fn new(segments: Vec<FmtSegment>) -> Self {
Self { segments }
}
/// Render the formatted string by stitching together the segments
pub fn render_with(&self, dynamic_nodes: Vec<String>) -> String {
let mut out = String::new();
for segment in &self.segments {
match segment {
FmtSegment::Literal { value } => out.push_str(value),
FmtSegment::Dynamic { id } => out.push_str(&dynamic_nodes[*id]),
}
}
out
}
/// Update the segments with new segments
///
/// this will change how we render the formatted string
pub fn update_segments(&mut self, new_segments: Vec<FmtSegment>) {
self.segments = new_segments;
}
}
#[cfg(feature = "serialize")]
use crate::nodes::deserialize_string_leaky;
#[doc(hidden)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))]
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum FmtSegment {
Literal {
#[cfg_attr(
feature = "serialize",
serde(deserialize_with = "deserialize_string_leaky")
)]
value: &'static str,
},
Dynamic {
id: usize,
},
}

View file

@ -27,10 +27,16 @@ mod suspense;
mod tasks; mod tasks;
mod virtual_dom; mod virtual_dom;
mod hotreload_utils;
/// Items exported from this module are used in macros and should not be used directly. /// Items exported from this module are used in macros and should not be used directly.
#[doc(hidden)] #[doc(hidden)]
pub mod internal { pub mod internal {
pub use crate::properties::verify_component_called_as_component; pub use crate::properties::verify_component_called_as_component;
pub use crate::hotreload_utils::{
FmtSegment, FmtedSegments, HotReloadLiteral, HotreloadedLiteral,
};
} }
pub(crate) mod innerlude { pub(crate) mod innerlude {
@ -82,13 +88,13 @@ pub mod prelude {
provide_context, provide_root_context, queue_effect, remove_future, schedule_update, provide_context, provide_root_context, queue_effect, remove_future, schedule_update,
schedule_update_any, spawn, spawn_forever, spawn_isomorphic, suspend, throw_error, schedule_update_any, spawn, spawn_forever, spawn_isomorphic, suspend, throw_error,
try_consume_context, use_after_render, use_before_render, use_drop, use_error_boundary, try_consume_context, use_after_render, use_before_render, use_drop, use_error_boundary,
use_hook, use_hook_with_cleanup, with_owner, AnyValue, Attribute, Callback, CapturedError, use_hook, use_hook_with_cleanup, with_owner, AnyValue, Attribute, Callback, Component,
Component, ComponentFunction, Context, Element, ErrorBoundary, ErrorContext, Event, ComponentFunction, Context, Element, ErrorBoundary, ErrorContext, Event, EventHandler,
EventHandler, Fragment, HasAttributes, IntoAttributeValue, IntoDynNode, Fragment, HasAttributes, IntoAttributeValue, IntoDynNode, OptionStringFromMarker,
OptionStringFromMarker, Properties, ReactiveContext, RenderError, Runtime, RuntimeGuard, Properties, ReactiveContext, RenderError, RenderReturn, Runtime, RuntimeGuard, ScopeId,
ScopeId, ScopeState, SuperFrom, SuperInto, SuspendedFuture, SuspenseBoundary, ScopeState, SuperFrom, SuperInto, SuspendedFuture, SuspenseBoundary, SuspenseBoundaryProps,
SuspenseBoundaryProps, SuspenseContext, SuspenseExtension, Task, Template, SuspenseContext, SuspenseExtension, Task, Template, TemplateAttribute, TemplateNode, VNode,
TemplateAttribute, TemplateNode, VNode, VNodeInner, VirtualDom, VNodeInner, VirtualDom,
}; };
} }

View file

@ -385,7 +385,7 @@ pub struct Template {
} }
#[cfg(feature = "serialize")] #[cfg(feature = "serialize")]
fn deserialize_string_leaky<'a, 'de, D>(deserializer: D) -> Result<&'a str, D::Error> pub(crate) fn deserialize_string_leaky<'a, 'de, D>(deserializer: D) -> Result<&'a str, D::Error>
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
@ -1076,18 +1076,34 @@ impl IntoAttributeValue for String {
} }
} }
impl IntoAttributeValue for f32 {
fn into_value(self) -> AttributeValue {
AttributeValue::Float(self as _)
}
}
impl IntoAttributeValue for f64 { impl IntoAttributeValue for f64 {
fn into_value(self) -> AttributeValue { fn into_value(self) -> AttributeValue {
AttributeValue::Float(self) AttributeValue::Float(self)
} }
} }
impl IntoAttributeValue for i32 {
fn into_value(self) -> AttributeValue {
AttributeValue::Int(self as _)
}
}
impl IntoAttributeValue for i64 { impl IntoAttributeValue for i64 {
fn into_value(self) -> AttributeValue { fn into_value(self) -> AttributeValue {
AttributeValue::Int(self) AttributeValue::Int(self)
} }
} }
impl IntoAttributeValue for i128 {
fn into_value(self) -> AttributeValue {
AttributeValue::Int(self as _)
}
}
impl IntoAttributeValue for bool { impl IntoAttributeValue for bool {
fn into_value(self) -> AttributeValue { fn into_value(self) -> AttributeValue {
AttributeValue::Bool(self) AttributeValue::Bool(self)
@ -1130,7 +1146,7 @@ pub trait HasAttributes {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
pub(crate) fn sort_bfo(paths: &[&'static [u8]]) -> Vec<(usize, &'static [u8])> { pub(crate) fn sort_bfo(paths: &[&'static [u8]]) -> Vec<(usize, &'static [u8])> {
let mut with_indecies = paths.iter().copied().enumerate().collect::<Vec<_>>(); let mut with_indecies = paths.iter().copied().enumerate().collect::<Vec<_>>();
with_indecies.sort_unstable_by(|(_, a), (_, b)| { with_indecies.sort_by(|(_, a), (_, b)| {
let mut a = a.iter(); let mut a = a.iter();
let mut b = b.iter(); let mut b = b.iter();
loop { loop {

View file

@ -164,7 +164,7 @@ pub fn verify_component_called_as_component<C: ComponentFunction<P, M>, P, M>(co
diagnostic::on_unimplemented( diagnostic::on_unimplemented(
message = "`Component<{Props}>` is not implemented for `{Self}`", message = "`Component<{Props}>` is not implemented for `{Self}`",
label = "Component", label = "Component",
note = "Components are functions in the form `fn() -> Element`, `fn(props: Properties) -> Element`, or `#[component] fn(partial_eq1: u32, partial_eq2: u32) -> Element`.", note = "Components are functions in the form `fn() -> Element`, `fn(props: Properties) -> Element`, or `#[component] fn(partial_eq1: u32, partial_eq2: u32) -> Element`.",
note = "You may have forgotten to add `#[component]` to your function to automatically implement the `ComponentFunction` trait." note = "You may have forgotten to add `#[component]` to your function to automatically implement the `ComponentFunction` trait."
) )
)] )]

View file

@ -138,6 +138,8 @@ impl ReactiveContext {
/// Clear all subscribers to this context /// Clear all subscribers to this context
pub fn clear_subscribers(&self) { pub fn clear_subscribers(&self) {
// The key type is mutable, but the hash is stable through mutations because we hash by pointer
#[allow(clippy::mutable_key_type)]
let old_subscribers = std::mem::take(&mut self.inner.write().subscribers); let old_subscribers = std::mem::take(&mut self.inner.write().subscribers);
for subscriber in old_subscribers { for subscriber in old_subscribers {
subscriber.0.lock().unwrap().remove(self); subscriber.0.lock().unwrap().remove(self);
@ -146,6 +148,7 @@ impl ReactiveContext {
/// Update the subscribers /// Update the subscribers
pub(crate) fn update_subscribers(&self) { pub(crate) fn update_subscribers(&self) {
#[allow(clippy::mutable_key_type)]
let subscribers = &self.inner.read().subscribers; let subscribers = &self.inner.read().subscribers;
for subscriber in subscribers.iter() { for subscriber in subscribers.iter() {
subscriber.0.lock().unwrap().insert(*self); subscriber.0.lock().unwrap().insert(*self);

View file

@ -82,7 +82,6 @@ impl Runtime {
{ {
let borrow = self.scope_states.borrow(); let borrow = self.scope_states.borrow();
if let Some(scope) = &borrow[id.0] { if let Some(scope) = &borrow[id.0] {
let _runtime_guard = RuntimeGuard::new(self.clone());
// Manually drop tasks, hooks, and contexts inside of the runtime // Manually drop tasks, hooks, and contexts inside of the runtime
self.on_scope(id, || { self.on_scope(id, || {
// Drop all spawned tasks - order doesn't matter since tasks don't rely on eachother // Drop all spawned tasks - order doesn't matter since tasks don't rely on eachother
@ -112,7 +111,8 @@ impl Runtime {
/// Call this function with the current scope set to the given scope /// Call this function with the current scope set to the given scope
/// ///
/// Useful in a limited number of scenarios /// Useful in a limited number of scenarios
pub fn on_scope<O>(&self, id: ScopeId, f: impl FnOnce() -> O) -> O { pub fn on_scope<O>(self: &Rc<Self>, id: ScopeId, f: impl FnOnce() -> O) -> O {
let _runtime_guard = RuntimeGuard::new(self.clone());
{ {
self.push_scope(id); self.push_scope(id);
} }
@ -164,7 +164,7 @@ impl Runtime {
/// Runs a function with the current runtime /// Runs a function with the current runtime
pub(crate) fn with<R>(f: impl FnOnce(&Runtime) -> R) -> Option<R> { pub(crate) fn with<R>(f: impl FnOnce(&Runtime) -> R) -> Option<R> {
RUNTIMES.with(|stack| stack.borrow().last().map(|r| f(r))) Self::current().map(|r| f(&r))
} }
/// Runs a function with the current scope /// Runs a function with the current scope

View file

@ -18,6 +18,7 @@ use crate::{
}; };
use crate::{Task, VComponent}; use crate::{Task, VComponent};
use futures_util::StreamExt; use futures_util::StreamExt;
use rustc_hash::FxHashMap;
use slab::Slab; use slab::Slab;
use std::collections::BTreeSet; use std::collections::BTreeSet;
use std::{any::Any, rc::Rc}; use std::{any::Any, rc::Rc};
@ -207,13 +208,8 @@ pub struct VirtualDom {
pub(crate) dirty_scopes: BTreeSet<ScopeOrder>, pub(crate) dirty_scopes: BTreeSet<ScopeOrder>,
// Maps a template path to a map of byte indexes to templates // A map of overridden templates?
// if hot reload is enabled, we need to keep track of template overrides pub(crate) templates: FxHashMap<TemplateId, Template>,
#[cfg(debug_assertions)]
pub(crate) templates: rustc_hash::FxHashMap<TemplateId, rustc_hash::FxHashMap<usize, Template>>,
// Otherwise, we just need to keep track of what templates we have registered
#[cfg(not(debug_assertions))]
pub(crate) templates: rustc_hash::FxHashSet<TemplateId>,
// Templates changes that are queued for the next render // Templates changes that are queued for the next render
pub(crate) queued_templates: Vec<Template>, pub(crate) queued_templates: Vec<Template>,
@ -576,17 +572,20 @@ impl VirtualDom {
// we only replace templates if hot reloading is enabled // we only replace templates if hot reloading is enabled
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
self.register_template_first_byte_index(template); // Save the template ID
self.templates.insert(template.name, template);
// Only queue the template to be written if its not completely dynamic
if !template.is_completely_dynamic() {
self.queued_templates.push(template);
}
// iterating a slab is very inefficient, but this is a rare operation that will only happen during development so it's fine // iterating a slab is very inefficient, but this is a rare operation that will only happen during development so it's fine
let mut dirty = Vec::new(); let mut dirty = Vec::new();
for (id, scope) in self.scopes.iter() { for (id, scope) in self.scopes.iter() {
// Recurse into the dynamic nodes of the existing mounted node to see if the template is alive in the tree // Recurse into the dynamic nodes of the existing mounted node to see if the template is alive in the tree
fn check_node_for_templates(node: &crate::VNode, template: Template) -> bool { fn check_node_for_templates(node: &crate::VNode, template: Template) -> bool {
let this_template_name = node.template.get().name.rsplit_once(':').unwrap().0; if node.template.get().name == template.name {
let other_template_name = template.name.rsplit_once(':').unwrap().0;
if this_template_name == other_template_name {
return true; return true;
} }

View file

@ -39,7 +39,12 @@ fn component_swap() {
fn nav_bar() -> Element { fn nav_bar() -> Element {
rsx! { rsx! {
h1 { "NavBar", {(0..3).map(|_| rsx!(nav_link {}))} } h1 {
"NavBar"
for _ in 0..3 {
nav_link {}
}
}
} }
} }

View file

@ -0,0 +1,38 @@
use dioxus::prelude::*;
use dioxus_core::ElementId;
use dioxus_core::Mutation::{AppendChildren, LoadTemplate};
/// Swap out the template and get it back via the mutation
#[test]
fn hotreloads_template() {
let old_rsx = rsx! { "A" };
let name = old_rsx.as_ref().unwrap().template.get().name;
let mut dom = VirtualDom::new_with_props(move |_| old_rsx.clone(), ());
let new_template = Template {
name,
roots: &[TemplateNode::Text { text: "B" }],
node_paths: &[],
attr_paths: &[],
};
dom.replace_template(new_template);
let muts = dom.rebuild_to_vec();
// New template comes out
assert_eq!(muts.templates.len(), 1);
assert_eq!(
muts.edits,
[
LoadTemplate {
name: "packages/core/tests/hotreload.rs:8:19:0",
index: 0,
id: ElementId(1,),
},
AppendChildren { id: ElementId(0,), m: 1 },
]
)
}

View file

@ -6,11 +6,13 @@ fn basic_syntax_is_a_template() -> Element {
let var = 123; let var = 123;
rsx! { rsx! {
div { key: "{asd}", class: "asd", class: "{asd}", class: if true { div {
"{asd}" key: "{asd}",
}, class: if false { class: "asd",
"{asd}" class: "{asd}",
}, onclick: move |_| {}, class: if true { "{asd}" },
class: if false { "{asd}" },
onclick: move |_| {},
div { "{var}" } div { "{var}" }
div { div {
h1 { "var" } h1 { "var" }
@ -40,7 +42,7 @@ fn dual_stream() {
HydrateText { path: &[0, 0], value: "123".to_string(), id: ElementId(2) }, HydrateText { path: &[0, 0], value: "123".to_string(), id: ElementId(2) },
SetAttribute { SetAttribute {
name: "class", name: "class",
value: "asd 123 123".into_value(), value: "asd 123 123 ".into_value(),
id: ElementId(1), id: ElementId(1),
ns: None, ns: None,
}, },

View file

@ -17,9 +17,12 @@ dioxus-html = { workspace = true, features = [
"mounted", "mounted",
"document", "document",
] } ] }
dioxus-signals = { workspace = true, optional = true }
dioxus-interpreter-js = { workspace = true, features = ["binary-protocol"] } dioxus-interpreter-js = { workspace = true, features = ["binary-protocol"] }
dioxus-cli-config = { workspace = true, features = ["read-config"] } dioxus-cli-config = { workspace = true, features = ["read-config"] }
generational-box = { workspace = true } generational-box = { workspace = true }
# hotreload only works on desktop platforms.... mobile is still wip
dioxus-hot-reload = { workspace = true, optional = true, features = ["serve", "client"]}
serde = "1.0.136" serde = "1.0.136"
serde_json = "1.0.79" serde_json = "1.0.79"
@ -50,6 +53,7 @@ urlencoding = "2.1.2"
async-trait = "0.1.68" async-trait = "0.1.68"
tao = { version = "0.26.1", features = ["rwh_05"] } tao = { version = "0.26.1", features = ["rwh_05"] }
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
signal-hook = "0.3.17" signal-hook = "0.3.17"
@ -57,13 +61,20 @@ signal-hook = "0.3.17"
global-hotkey = "0.5.0" global-hotkey = "0.5.0"
rfd = "0.14" rfd = "0.14"
muda = "0.11.3" muda = "0.11.3"
# hotreload only works on desktop platforms.... mobile is still wip
dioxus-hot-reload = { workspace = true, optional = true }
[target.'cfg(target_os = "ios")'.dependencies] [target.'cfg(target_os = "ios")'.dependencies]
objc = "0.2.7" objc = "0.2.7"
objc_id = "0.1.1" objc_id = "0.1.1"
# use rustls on android
[target.'cfg(target_os = "android")'.dependencies]
tokio-tungstenite = { workspace = true, optional = true, features = ["rustls"]}
# use native tls on other platforms
[target.'cfg(not(target_os = "android"))'.dependencies]
tokio-tungstenite = { workspace = true, optional = true, features = ["native-tls"]}
[target.'cfg(target_os = "macos")'.dependencies] [target.'cfg(target_os = "macos")'.dependencies]
cocoa = "0.25" cocoa = "0.25"
core-foundation = "0.9.3" core-foundation = "0.9.3"
@ -75,7 +86,7 @@ tokio_runtime = ["dep:tokio"]
fullscreen = ["wry/fullscreen"] fullscreen = ["wry/fullscreen"]
transparent = ["wry/transparent"] transparent = ["wry/transparent"]
devtools = ["wry/devtools"] devtools = ["wry/devtools"]
hot-reload = ["dep:dioxus-hot-reload"] hot-reload = ["dep:dioxus-hot-reload", "dioxus-signals"]
gnu = [] gnu = []
[package.metadata.docs.rs] [package.metadata.docs.rs]
@ -86,7 +97,7 @@ cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
[dev-dependencies] [dev-dependencies]
dioxus = { workspace = true, features = ["desktop"] } dioxus = { workspace = true, features = ["desktop"] }
exitcode = "1.1.2" exitcode = "1.1.2"
reqwest = { version = "0.11.9", features = ["json"] } reqwest = { workspace = true, features = ["json"] }
http-range = { version = "0.1.5" } http-range = { version = "0.1.5" }
dioxus-ssr = { workspace = true, version = "0.5.0", default-features = false } dioxus-ssr = { workspace = true, version = "0.5.0", default-features = false }
separator = "0.4.1" separator = "0.4.1"

View file

@ -148,14 +148,16 @@ impl App {
not(target_os = "ios") not(target_os = "ios")
))] ))]
pub fn connect_hotreload(&self) { pub fn connect_hotreload(&self) {
let Ok(cfg) = dioxus_cli_config::CURRENT_CONFIG.as_ref() else { let proxy = self.shared.proxy.clone();
return;
};
dioxus_hot_reload::connect_at(cfg.target_dir.join("dioxusin"), { tokio::task::spawn(async move {
let proxy = self.shared.proxy.clone(); let Some(Ok(mut receiver)) = dioxus_hot_reload::NativeReceiver::create_from_cli().await
move |template| { else {
let _ = proxy.send_event(UserWindowEvent::HotReloadEvent(template)); return;
};
while let Some(Ok(msg)) = receiver.next().await {
_ = proxy.send_event(UserWindowEvent::HotReloadEvent(msg));
} }
}); });
} }
@ -327,23 +329,27 @@ impl App {
not(target_os = "android"), not(target_os = "android"),
not(target_os = "ios") not(target_os = "ios")
))] ))]
pub fn handle_hot_reload_msg(&mut self, msg: dioxus_hot_reload::HotReloadMsg) { pub fn handle_hot_reload_msg(&mut self, msg: dioxus_hot_reload::DevserverMsg) {
match msg { match msg {
dioxus_hot_reload::HotReloadMsg::UpdateTemplate(template) => { dioxus_hot_reload::DevserverMsg::HotReload(hr_msg) => {
for webview in self.webviews.values_mut() { for webview in self.webviews.values_mut() {
webview.dom.replace_template(template); dioxus_hot_reload::apply_changes(&mut webview.dom, &hr_msg);
webview.poll_vdom(); webview.poll_vdom();
} }
}
dioxus_hot_reload::HotReloadMsg::Shutdown => {
self.control_flow = ControlFlow::Exit;
}
dioxus_hot_reload::HotReloadMsg::UpdateAsset(_) => { if !hr_msg.assets.is_empty() {
for webview in self.webviews.values_mut() { for webview in self.webviews.values_mut() {
webview.kick_stylsheets(); webview.kick_stylsheets();
}
} }
} }
dioxus_hot_reload::DevserverMsg::FullReload => {
// usually only web gets this message - what are we supposed to do?
// Maybe we could just binary patch ourselves in place without losing window state?
}
dioxus_hot_reload::DevserverMsg::Shutdown => {
self.control_flow = ControlFlow::Exit;
}
} }
} }
@ -458,12 +464,9 @@ impl App {
monitor: monitor.name().unwrap().to_string(), monitor: monitor.name().unwrap().to_string(),
}; };
// Yes... I know... we're loading a file that might not be ours... but it's a debug feature
if let Ok(state) = serde_json::to_string(&state) { if let Ok(state) = serde_json::to_string(&state) {
// Write this to the target dir so we can pick back up in resume_from_state _ = std::fs::write(restore_file(), state);
if let Ok(cfg) = dioxus_cli_config::CURRENT_CONFIG.as_ref() {
let path = cfg.target_dir.join("window_state.json");
_ = std::fs::write(path, state);
}
} }
} }
} }
@ -471,18 +474,13 @@ impl App {
// Write this to the target dir so we can pick back up // Write this to the target dir so we can pick back up
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
fn resume_from_state(&mut self, webview: &WebviewInstance) { fn resume_from_state(&mut self, webview: &WebviewInstance) {
if let Ok(cfg) = dioxus_cli_config::CURRENT_CONFIG.as_ref() { if let Ok(state) = std::fs::read_to_string(restore_file()) {
let path = cfg.target_dir.join("window_state.json"); if let Ok(state) = serde_json::from_str::<PreservedWindowState>(&state) {
if let Ok(state) = std::fs::read_to_string(path) { let window = &webview.desktop_context.window;
if let Ok(state) = serde_json::from_str::<PreservedWindowState>(&state) { let position = (state.x, state.y);
let window = &webview.desktop_context.window; let size = (state.width, state.height);
let position = (state.x, state.y); window.set_outer_position(tao::dpi::PhysicalPosition::new(position.0, position.1));
let size = (state.width, state.height); window.set_inner_size(tao::dpi::PhysicalSize::new(size.0, size.1));
window.set_outer_position(tao::dpi::PhysicalPosition::new(
position.0, position.1,
));
window.set_inner_size(tao::dpi::PhysicalSize::new(size.0, size.1));
}
} }
} }
} }
@ -552,3 +550,24 @@ pub fn hide_app_window(window: &wry::WebView) {
}); });
} }
} }
/// Return the location of a tempfile with our window state in it such that we can restore it later
#[cfg(debug_assertions)]
fn restore_file() -> std::path::PathBuf {
/// Get the name of the program or default to "dioxus" so we can hash it
fn get_prog_name_or_default() -> Option<String> {
Some(
std::env::current_exe()
.ok()?
.file_name()?
.to_str()?
.to_string(),
)
}
let name = get_prog_name_or_default().unwrap_or_else(|| "dioxus".to_string());
let hashed_id = name.chars().map(|c| c as usize).sum::<usize>();
let mut path = std::env::temp_dir();
path.push(format!("{}-window-state.json", hashed_id));
path
}

View file

@ -1,4 +1,4 @@
use dioxus_core::prelude::{Runtime, RuntimeGuard, ScopeId}; use dioxus_core::prelude::{Runtime, ScopeId};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use std::{cell::RefCell, rc::Rc}; use std::{cell::RefCell, rc::Rc};
use wry::{http::Request, RequestAsyncResponder}; use wry::{http::Request, RequestAsyncResponder};
@ -36,9 +36,6 @@ impl AssetHandlerRegistry {
responder: RequestAsyncResponder, responder: RequestAsyncResponder,
) { ) {
if let Some(handler) = self.handlers.borrow().get(name) { if let Some(handler) = self.handlers.borrow().get(name) {
// Push the runtime onto the stack
let _guard = RuntimeGuard::new(self.dom_rt.clone());
// And run the handler in the scope of the component that created it // And run the handler in the scope of the component that created it
self.dom_rt self.dom_rt
.on_scope(handler.scope, || (handler.f)(request, responder)); .on_scope(handler.scope, || (handler.f)(request, responder));

View file

@ -53,7 +53,7 @@ impl Config {
.with_title( .with_title(
dioxus_cli_config::CURRENT_CONFIG dioxus_cli_config::CURRENT_CONFIG
.as_ref() .as_ref()
.map(|c| c.dioxus_config.application.name.clone()) .map(|c| c.application.name.clone())
.unwrap_or("Dioxus App".to_string()), .unwrap_or("Dioxus App".to_string()),
) )
// During development we want the window to be on top so we can see it while we work // During development we want the window to be on top so we can see it while we work

View file

@ -24,7 +24,7 @@ pub enum UserWindowEvent {
not(target_os = "android"), not(target_os = "android"),
not(target_os = "ios") not(target_os = "ios")
))] ))]
HotReloadEvent(dioxus_hot_reload::HotReloadMsg), HotReloadEvent(dioxus_hot_reload::DevserverMsg),
/// Create a new window /// Create a new window
NewWindow, NewWindow,

View file

@ -255,7 +255,7 @@ fn get_asset_root() -> Option<PathBuf> {
if running_in_dev_mode() { if running_in_dev_mode() {
return dioxus_cli_config::CURRENT_CONFIG return dioxus_cli_config::CURRENT_CONFIG
.as_ref() .as_ref()
.map(|c| c.out_dir()) .map(|c| c.application.out_dir.clone())
.ok(); .ok();
} }

Some files were not shown because too many files have changed in this diff Show more