mirror of
https://github.com/DioxusLabs/dioxus
synced 2024-11-23 04:33:06 +00:00
Merge branch 'DioxusLabs:master' into bump-wry
This commit is contained in:
commit
1f58530b43
117 changed files with 1913 additions and 525 deletions
2
.github/workflows/docs stable.yml
vendored
2
.github/workflows/docs stable.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
|||
# cd fermi && mdbook build -d ../nightly/fermi && cd ..
|
||||
|
||||
- name: Deploy 🚀
|
||||
uses: JamesIves/github-pages-deploy-action@v4.4.3
|
||||
uses: JamesIves/github-pages-deploy-action@v4.5.0
|
||||
with:
|
||||
branch: gh-pages # The branch the action should deploy to.
|
||||
folder: docs/nightly # The folder the action should deploy.
|
||||
|
|
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
|
@ -39,7 +39,7 @@ jobs:
|
|||
# cd fermi && mdbook build -d ../nightly/fermi && cd ..
|
||||
|
||||
- name: Deploy 🚀
|
||||
uses: JamesIves/github-pages-deploy-action@v4.4.3
|
||||
uses: JamesIves/github-pages-deploy-action@v4.5.0
|
||||
with:
|
||||
branch: gh-pages # The branch the action should deploy to.
|
||||
folder: docs/nightly # The folder the action should deploy.
|
||||
|
|
7
.github/workflows/main.yml
vendored
7
.github/workflows/main.yml
vendored
|
@ -124,8 +124,6 @@ jobs:
|
|||
}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: install stable
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
|
@ -141,6 +139,11 @@ jobs:
|
|||
workspaces: core -> ../target
|
||||
save-if: ${{ matrix.features.key == 'all' }}
|
||||
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: test
|
||||
run: |
|
||||
${{ env.RUST_CARGO_COMMAND }} ${{ matrix.platform.command }} ${{ matrix.platform.args }} --target ${{ matrix.platform.target }}
|
||||
|
|
3
.github/workflows/miri.yml
vendored
3
.github/workflows/miri.yml
vendored
|
@ -86,8 +86,7 @@ jobs:
|
|||
|
||||
# working-directory: tokio
|
||||
env:
|
||||
# todo: disable memory leaks ignore
|
||||
MIRIFLAGS: -Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-retag-fields -Zmiri-ignore-leaks
|
||||
MIRIFLAGS: -Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-retag-fields
|
||||
PROPTEST_CASES: 10
|
||||
|
||||
# Cache the global cargo directory, but NOT the local `target` directory which
|
||||
|
|
2
.github/workflows/playwright.yml
vendored
2
.github/workflows/playwright.yml
vendored
|
@ -43,7 +43,7 @@ jobs:
|
|||
# args: --path packages/cli
|
||||
- name: Run Playwright tests
|
||||
run: npx playwright test
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-report
|
||||
|
|
|
@ -50,7 +50,7 @@ members = [
|
|||
exclude = ["examples/mobile_demo"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.4.2"
|
||||
version = "0.4.3"
|
||||
|
||||
# dependencies that are shared across packages
|
||||
[workspace.dependencies]
|
||||
|
@ -77,7 +77,7 @@ dioxus-native-core = { path = "packages/native-core", version = "0.4.0" }
|
|||
dioxus-native-core-macro = { path = "packages/native-core-macro", version = "0.4.0" }
|
||||
rsx-rosetta = { path = "packages/rsx-rosetta", version = "0.4.0" }
|
||||
dioxus-signals = { path = "packages/signals" }
|
||||
generational-box = { path = "packages/generational-box" }
|
||||
generational-box = { path = "packages/generational-box", version = "0.4.3" }
|
||||
dioxus-hot-reload = { path = "packages/hot-reload", version = "0.4.0" }
|
||||
dioxus-fullstack = { path = "packages/fullstack", version = "0.4.1" }
|
||||
dioxus_server_macro = { path = "packages/server-macro", version = "0.4.1" }
|
||||
|
@ -88,7 +88,7 @@ slab = "0.4.2"
|
|||
futures-channel = "0.3.21"
|
||||
futures-util = { version = "0.3", default-features = false }
|
||||
rustc-hash = "1.1.0"
|
||||
wasm-bindgen = "0.2.87"
|
||||
wasm-bindgen = "0.2.88"
|
||||
html_parser = "0.7.0"
|
||||
thiserror = "1.0.40"
|
||||
prettyplease = { package = "prettier-please", version = "0.2", features = [
|
||||
|
@ -99,7 +99,7 @@ prettyplease = { package = "prettier-please", version = "0.2", features = [
|
|||
# It is not meant to be published, but is used so "cargo run --example XYZ" works properly
|
||||
[package]
|
||||
name = "dioxus-examples"
|
||||
version = "0.0.0"
|
||||
version = "0.4.3"
|
||||
authors = ["Jonathan Kelley"]
|
||||
edition = "2021"
|
||||
description = "Top level crate for the Dioxus repository"
|
||||
|
|
|
@ -24,12 +24,64 @@ script = [
|
|||
]
|
||||
script_runner = "@duckscript"
|
||||
|
||||
[tasks.format]
|
||||
command = "cargo"
|
||||
args = ["fmt", "--all"]
|
||||
|
||||
[tasks.check]
|
||||
command = "cargo"
|
||||
args = ["check", "--workspace", "--examples", "--tests"]
|
||||
|
||||
[tasks.clippy]
|
||||
command = "cargo"
|
||||
args = [
|
||||
"clippy",
|
||||
"--workspace",
|
||||
"--examples",
|
||||
"--tests",
|
||||
"--",
|
||||
"-D",
|
||||
"warnings",
|
||||
]
|
||||
|
||||
[tasks.tidy]
|
||||
category = "Formatting"
|
||||
dependencies = ["format", "check", "clippy"]
|
||||
description = "Format and Check workspace"
|
||||
|
||||
[tasks.install-miri]
|
||||
toolchain = "nightly"
|
||||
install_crate = { rustup_component_name = "miri", binary = "cargo +nightly miri", test_arg = "--help" }
|
||||
private = true
|
||||
|
||||
[tasks.miri-native]
|
||||
command = "cargo"
|
||||
toolchain = "nightly"
|
||||
dependencies = ["install-miri"]
|
||||
args = [
|
||||
"miri",
|
||||
"test",
|
||||
"--package",
|
||||
"dioxus-native-core",
|
||||
"--test",
|
||||
"miri_native",
|
||||
]
|
||||
|
||||
[tasks.miri-stress]
|
||||
command = "cargo"
|
||||
toolchain = "nightly"
|
||||
dependencies = ["install-miri"]
|
||||
args = ["miri", "test", "--package", "dioxus-core", "--test", "miri_stress"]
|
||||
|
||||
[tasks.miri]
|
||||
dependencies = ["miri-native", "miri-stress"]
|
||||
|
||||
[tasks.tests]
|
||||
category = "Testing"
|
||||
dependencies = ["tests-setup"]
|
||||
description = "Run all tests"
|
||||
env = {CARGO_MAKE_WORKSPACE_SKIP_MEMBERS = ["**/examples/*"]}
|
||||
run_task = {name = ["test-flow", "test-with-browser"], fork = true}
|
||||
env = { CARGO_MAKE_WORKSPACE_SKIP_MEMBERS = ["**/examples/*"] }
|
||||
run_task = { name = ["test-flow", "test-with-browser"], fork = true }
|
||||
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
|
@ -42,10 +94,24 @@ private = true
|
|||
[tasks.test]
|
||||
dependencies = ["build"]
|
||||
command = "cargo"
|
||||
args = ["test", "--lib", "--bins", "--tests", "--examples", "--workspace", "--exclude", "dioxus-router", "--exclude", "dioxus-desktop"]
|
||||
args = [
|
||||
"test",
|
||||
"--lib",
|
||||
"--bins",
|
||||
"--tests",
|
||||
"--examples",
|
||||
"--workspace",
|
||||
"--exclude",
|
||||
"dioxus-router",
|
||||
"--exclude",
|
||||
"dioxus-desktop",
|
||||
]
|
||||
private = true
|
||||
|
||||
[tasks.test-with-browser]
|
||||
env = { CARGO_MAKE_WORKSPACE_INCLUDE_MEMBERS = ["**/packages/router", "**/packages/desktop"] }
|
||||
env = { CARGO_MAKE_WORKSPACE_INCLUDE_MEMBERS = [
|
||||
"**/packages/router",
|
||||
"**/packages/desktop",
|
||||
] }
|
||||
private = true
|
||||
workspace = true
|
||||
|
|
|
@ -161,7 +161,7 @@ So... Dioxus is great, but why won't it work for me?
|
|||
## Contributing
|
||||
- Check out the website [section on contributing](https://dioxuslabs.com/learn/0.4/contributing).
|
||||
- Report issues on our [issue tracker](https://github.com/dioxuslabs/dioxus/issues).
|
||||
- Join the discord and ask questions!
|
||||
- [Join](https://discord.gg/XgGxMSkvUM) the discord and ask questions!
|
||||
|
||||
|
||||
<a href="https://github.com/dioxuslabs/dioxus/graphs/contributors">
|
||||
|
|
|
@ -62,6 +62,7 @@ fn app(cx: Scope) -> Element {
|
|||
div { id: "wrapper",
|
||||
div { class: "app",
|
||||
div { class: "calculator",
|
||||
tabindex: "0",
|
||||
onkeydown: handle_key_down_event,
|
||||
div { class: "calculator-display", val.to_string() }
|
||||
div { class: "calculator-keypad",
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "openid_auth_demo"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
|
|
@ -16,8 +16,20 @@ fn app(cx: Scope) -> Element {
|
|||
a: "asd".to_string(),
|
||||
c: "asd".to_string(),
|
||||
d: Some("asd".to_string()),
|
||||
e: Some("asd".to_string()),
|
||||
}
|
||||
Button {
|
||||
a: "asd".to_string(),
|
||||
b: "asd".to_string(),
|
||||
c: "asd".to_string(),
|
||||
d: Some("asd".to_string()),
|
||||
e: "asd".to_string(),
|
||||
}
|
||||
Button {
|
||||
a: "asd".to_string(),
|
||||
c: "asd".to_string(),
|
||||
d: Some("asd".to_string()),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
name = "query_segments_demo"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
|
|
@ -8,13 +8,14 @@ use std::fmt::{Result, Write};
|
|||
|
||||
use dioxus_rsx::IfmtInput;
|
||||
|
||||
use crate::write_ifmt;
|
||||
use crate::{indent::IndentOptions, write_ifmt};
|
||||
|
||||
/// The output buffer that tracks indent and string
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Buffer {
|
||||
pub buf: String,
|
||||
pub indent: usize,
|
||||
pub indent_level: usize,
|
||||
pub indent: IndentOptions,
|
||||
}
|
||||
|
||||
impl Buffer {
|
||||
|
@ -31,16 +32,16 @@ impl Buffer {
|
|||
}
|
||||
|
||||
pub fn tab(&mut self) -> Result {
|
||||
self.write_tabs(self.indent)
|
||||
self.write_tabs(self.indent_level)
|
||||
}
|
||||
|
||||
pub fn indented_tab(&mut self) -> Result {
|
||||
self.write_tabs(self.indent + 1)
|
||||
self.write_tabs(self.indent_level + 1)
|
||||
}
|
||||
|
||||
pub fn write_tabs(&mut self, num: usize) -> std::fmt::Result {
|
||||
for _ in 0..num {
|
||||
write!(self.buf, " ")?
|
||||
write!(self.buf, "{}", self.indent.indent_str())?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ impl Writer<'_> {
|
|||
|
||||
// check if we have a lot of attributes
|
||||
let attr_len = self.is_short_attrs(attributes);
|
||||
let is_short_attr_list = (attr_len + self.out.indent * 4) < 80;
|
||||
let is_short_attr_list = (attr_len + self.out.indent_level * 4) < 80;
|
||||
let children_len = self.is_short_children(children);
|
||||
let is_small_children = children_len.is_some();
|
||||
|
||||
|
@ -86,7 +86,7 @@ impl Writer<'_> {
|
|||
|
||||
// if we have few children and few attributes, make it a one-liner
|
||||
if is_short_attr_list && is_small_children {
|
||||
if children_len.unwrap() + attr_len + self.out.indent * 4 < 100 {
|
||||
if children_len.unwrap() + attr_len + self.out.indent_level * 4 < 100 {
|
||||
opt_level = ShortOptimization::Oneliner;
|
||||
} else {
|
||||
opt_level = ShortOptimization::PropsOnTop;
|
||||
|
@ -185,11 +185,11 @@ impl Writer<'_> {
|
|||
}
|
||||
|
||||
while let Some(attr) = attr_iter.next() {
|
||||
self.out.indent += 1;
|
||||
self.out.indent_level += 1;
|
||||
if !sameline {
|
||||
self.write_comments(attr.attr.start())?;
|
||||
}
|
||||
self.out.indent -= 1;
|
||||
self.out.indent_level -= 1;
|
||||
|
||||
if !sameline {
|
||||
self.out.indented_tabbed_line()?;
|
||||
|
@ -398,14 +398,14 @@ impl Writer<'_> {
|
|||
for idx in start.line..end.line {
|
||||
let line = &self.src[idx];
|
||||
if line.trim().starts_with("//") {
|
||||
for _ in 0..self.out.indent + 1 {
|
||||
for _ in 0..self.out.indent_level + 1 {
|
||||
write!(self.out, " ")?
|
||||
}
|
||||
writeln!(self.out, "{}", line.trim()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..self.out.indent {
|
||||
for _ in 0..self.out.indent_level {
|
||||
write!(self.out, " ")?
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ impl Writer<'_> {
|
|||
let first_line = &self.src[start.line - 1];
|
||||
write!(self.out, "{}", &first_line[start.column - 1..].trim_start())?;
|
||||
|
||||
let prev_block_indent_level = crate::leading_whitespaces(first_line) / 4;
|
||||
let prev_block_indent_level = self.out.indent.count_indents(first_line);
|
||||
|
||||
for (id, line) in self.src[start.line..end.line].iter().enumerate() {
|
||||
writeln!(self.out)?;
|
||||
|
@ -43,9 +43,9 @@ impl Writer<'_> {
|
|||
};
|
||||
|
||||
// trim the leading whitespace
|
||||
let previous_indent = crate::leading_whitespaces(line) / 4;
|
||||
let previous_indent = self.out.indent.count_indents(line);
|
||||
let offset = previous_indent.saturating_sub(prev_block_indent_level);
|
||||
let required_indent = self.out.indent + offset;
|
||||
let required_indent = self.out.indent_level + offset;
|
||||
self.out.write_tabs(required_indent)?;
|
||||
|
||||
let line = line.trim_start();
|
||||
|
|
108
packages/autofmt/src/indent.rs
Normal file
108
packages/autofmt/src/indent.rs
Normal file
|
@ -0,0 +1,108 @@
|
|||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub enum IndentType {
|
||||
Spaces,
|
||||
Tabs,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndentOptions {
|
||||
width: usize,
|
||||
indent_string: String,
|
||||
}
|
||||
|
||||
impl IndentOptions {
|
||||
pub fn new(typ: IndentType, width: usize) -> Self {
|
||||
assert_ne!(width, 0, "Cannot have an indent width of 0");
|
||||
Self {
|
||||
width,
|
||||
indent_string: match typ {
|
||||
IndentType::Tabs => "\t".into(),
|
||||
IndentType::Spaces => " ".repeat(width),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets a string containing one indent worth of whitespace
|
||||
pub fn indent_str(&self) -> &str {
|
||||
&self.indent_string
|
||||
}
|
||||
|
||||
/// Computes the line length in characters, counting tabs as the indent width.
|
||||
pub fn line_length(&self, line: &str) -> usize {
|
||||
line.chars()
|
||||
.map(|ch| if ch == '\t' { self.width } else { 1 })
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Estimates how many times the line has been indented.
|
||||
pub fn count_indents(&self, mut line: &str) -> usize {
|
||||
let mut indent = 0;
|
||||
while !line.is_empty() {
|
||||
// Try to count tabs
|
||||
let num_tabs = line.chars().take_while(|ch| *ch == '\t').count();
|
||||
if num_tabs > 0 {
|
||||
indent += num_tabs;
|
||||
line = &line[num_tabs..];
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try to count spaces
|
||||
let num_spaces = line.chars().take_while(|ch| *ch == ' ').count();
|
||||
if num_spaces >= self.width {
|
||||
// Intentionally floor here to take only the amount of space that matches an indent
|
||||
let num_space_indents = num_spaces / self.width;
|
||||
indent += num_space_indents;
|
||||
line = &line[num_space_indents * self.width..];
|
||||
continue;
|
||||
}
|
||||
|
||||
// Line starts with either non-indent characters or an unevent amount of spaces,
|
||||
// so no more indent remains.
|
||||
break;
|
||||
}
|
||||
indent
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for IndentOptions {
|
||||
fn default() -> Self {
|
||||
Self::new(IndentType::Spaces, 4)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn count_indents() {
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents("no indentation here!"),
|
||||
0
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents(" v += 2"),
|
||||
1
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents(" v += 2"),
|
||||
2
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents(" v += 2"),
|
||||
2
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents("\t\tv += 2"),
|
||||
2
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 4).count_indents("\t\t v += 2"),
|
||||
2
|
||||
);
|
||||
assert_eq!(
|
||||
IndentOptions::new(IndentType::Spaces, 2).count_indents(" v += 2"),
|
||||
2
|
||||
);
|
||||
}
|
||||
}
|
|
@ -16,8 +16,11 @@ mod collect_macros;
|
|||
mod component;
|
||||
mod element;
|
||||
mod expr;
|
||||
mod indent;
|
||||
mod writer;
|
||||
|
||||
pub use indent::{IndentOptions, IndentType};
|
||||
|
||||
/// A modification to the original file to be applied by an IDE
|
||||
///
|
||||
/// Right now this re-writes entire rsx! blocks at a time, instead of precise line-by-line changes.
|
||||
|
@ -47,7 +50,7 @@ pub struct FormattedBlock {
|
|||
/// back to the file precisely.
|
||||
///
|
||||
/// Nested blocks of RSX will be handled automatically
|
||||
pub fn fmt_file(contents: &str) -> Vec<FormattedBlock> {
|
||||
pub fn fmt_file(contents: &str, indent: IndentOptions) -> Vec<FormattedBlock> {
|
||||
let mut formatted_blocks = Vec::new();
|
||||
|
||||
let parsed = syn::parse_file(contents).unwrap();
|
||||
|
@ -61,6 +64,7 @@ pub fn fmt_file(contents: &str) -> Vec<FormattedBlock> {
|
|||
}
|
||||
|
||||
let mut writer = Writer::new(contents);
|
||||
writer.out.indent = indent;
|
||||
|
||||
// Don't parse nested macros
|
||||
let mut end_span = LineColumn { column: 0, line: 0 };
|
||||
|
@ -76,7 +80,10 @@ pub fn fmt_file(contents: &str) -> Vec<FormattedBlock> {
|
|||
|
||||
let rsx_start = macro_path.span().start();
|
||||
|
||||
writer.out.indent = leading_whitespaces(writer.src[rsx_start.line - 1]) / 4;
|
||||
writer.out.indent_level = writer
|
||||
.out
|
||||
.indent
|
||||
.count_indents(writer.src[rsx_start.line - 1]);
|
||||
|
||||
write_body(&mut writer, &body);
|
||||
|
||||
|
@ -159,12 +166,13 @@ pub fn fmt_block_from_expr(raw: &str, expr: ExprMacro) -> Option<String> {
|
|||
buf.consume()
|
||||
}
|
||||
|
||||
pub fn fmt_block(block: &str, indent_level: usize) -> Option<String> {
|
||||
pub fn fmt_block(block: &str, indent_level: usize, indent: IndentOptions) -> Option<String> {
|
||||
let body = syn::parse_str::<dioxus_rsx::CallBody>(block).unwrap();
|
||||
|
||||
let mut buf = Writer::new(block);
|
||||
|
||||
buf.out.indent = indent_level;
|
||||
buf.out.indent = indent;
|
||||
buf.out.indent_level = indent_level;
|
||||
|
||||
write_body(&mut buf, &body);
|
||||
|
||||
|
@ -230,14 +238,3 @@ pub(crate) fn write_ifmt(input: &IfmtInput, writable: &mut impl Write) -> std::f
|
|||
let display = DisplayIfmt(input);
|
||||
write!(writable, "{}", display)
|
||||
}
|
||||
|
||||
pub fn leading_whitespaces(input: &str) -> usize {
|
||||
input
|
||||
.chars()
|
||||
.map_while(|c| match c {
|
||||
' ' => Some(1),
|
||||
'\t' => Some(4),
|
||||
_ => None,
|
||||
})
|
||||
.sum()
|
||||
}
|
||||
|
|
|
@ -96,11 +96,11 @@ impl<'a> Writer<'a> {
|
|||
|
||||
// Push out the indent level and write each component, line by line
|
||||
pub fn write_body_indented(&mut self, children: &[BodyNode]) -> Result {
|
||||
self.out.indent += 1;
|
||||
self.out.indent_level += 1;
|
||||
|
||||
self.write_body_no_indent(children)?;
|
||||
|
||||
self.out.indent -= 1;
|
||||
self.out.indent_level -= 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ macro_rules! twoway {
|
|||
#[test]
|
||||
fn $name() {
|
||||
let src = include_str!(concat!("./samples/", stringify!($name), ".rsx"));
|
||||
let formatted = dioxus_autofmt::fmt_file(src);
|
||||
let formatted = dioxus_autofmt::fmt_file(src, Default::default());
|
||||
let out = dioxus_autofmt::apply_formats(src, formatted);
|
||||
// normalize line endings
|
||||
let out = out.replace("\r", "");
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
use dioxus_autofmt::{IndentOptions, IndentType};
|
||||
|
||||
macro_rules! twoway {
|
||||
($val:literal => $name:ident) => {
|
||||
($val:literal => $name:ident ($indent:expr)) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
let src_right = include_str!(concat!("./wrong/", $val, ".rsx"));
|
||||
let src_wrong = include_str!(concat!("./wrong/", $val, ".wrong.rsx"));
|
||||
let formatted = dioxus_autofmt::fmt_file(src_wrong);
|
||||
let formatted = dioxus_autofmt::fmt_file(src_wrong, $indent);
|
||||
let out = dioxus_autofmt::apply_formats(src_wrong, formatted);
|
||||
|
||||
// normalize line endings
|
||||
|
@ -16,8 +18,11 @@ macro_rules! twoway {
|
|||
};
|
||||
}
|
||||
|
||||
twoway!("comments" => comments);
|
||||
twoway!("comments-4sp" => comments_4sp (IndentOptions::new(IndentType::Spaces, 4)));
|
||||
twoway!("comments-tab" => comments_tab (IndentOptions::new(IndentType::Tabs, 4)));
|
||||
|
||||
twoway!("multi" => multi);
|
||||
twoway!("multi-4sp" => multi_4sp (IndentOptions::new(IndentType::Spaces, 4)));
|
||||
twoway!("multi-tab" => multi_tab (IndentOptions::new(IndentType::Tabs, 4)));
|
||||
|
||||
twoway!("multiexpr" => multiexpr);
|
||||
twoway!("multiexpr-4sp" => multiexpr_4sp (IndentOptions::new(IndentType::Spaces, 4)));
|
||||
twoway!("multiexpr-tab" => multiexpr_tab (IndentOptions::new(IndentType::Tabs, 4)));
|
||||
|
|
7
packages/autofmt/tests/wrong/comments-tab.rsx
Normal file
7
packages/autofmt/tests/wrong/comments-tab.rsx
Normal file
|
@ -0,0 +1,7 @@
|
|||
rsx! {
|
||||
div {
|
||||
// Comments
|
||||
class: "asdasd",
|
||||
"hello world"
|
||||
}
|
||||
}
|
5
packages/autofmt/tests/wrong/comments-tab.wrong.rsx
Normal file
5
packages/autofmt/tests/wrong/comments-tab.wrong.rsx
Normal file
|
@ -0,0 +1,5 @@
|
|||
rsx! {
|
||||
div {
|
||||
// Comments
|
||||
class: "asdasd", "hello world" }
|
||||
}
|
3
packages/autofmt/tests/wrong/multi-tab.rsx
Normal file
3
packages/autofmt/tests/wrong/multi-tab.rsx
Normal file
|
@ -0,0 +1,3 @@
|
|||
fn app(cx: Scope) -> Element {
|
||||
cx.render(rsx! { div { "hello world" } })
|
||||
}
|
5
packages/autofmt/tests/wrong/multi-tab.wrong.rsx
Normal file
5
packages/autofmt/tests/wrong/multi-tab.wrong.rsx
Normal file
|
@ -0,0 +1,5 @@
|
|||
fn app(cx: Scope) -> Element {
|
||||
cx.render(rsx! {
|
||||
div {"hello world" }
|
||||
})
|
||||
}
|
8
packages/autofmt/tests/wrong/multiexpr-tab.rsx
Normal file
8
packages/autofmt/tests/wrong/multiexpr-tab.rsx
Normal file
|
@ -0,0 +1,8 @@
|
|||
fn ItWroks() {
|
||||
cx.render(rsx! {
|
||||
div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light",
|
||||
left,
|
||||
right
|
||||
}
|
||||
})
|
||||
}
|
5
packages/autofmt/tests/wrong/multiexpr-tab.wrong.rsx
Normal file
5
packages/autofmt/tests/wrong/multiexpr-tab.wrong.rsx
Normal file
|
@ -0,0 +1,5 @@
|
|||
fn ItWroks() {
|
||||
cx.render(rsx! {
|
||||
div { class: "flex flex-wrap items-center dark:text-white py-16 border-t font-light", left, right }
|
||||
})
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "dioxus-cli"
|
||||
version = "0.4.1"
|
||||
version = "0.4.3"
|
||||
authors = ["Jonathan Kelley"]
|
||||
edition = "2021"
|
||||
description = "CLI tool for developing, testing, and publishing Dioxus apps"
|
||||
|
@ -83,6 +83,7 @@ dioxus-html = { workspace = true, features = ["hot-reload-context"] }
|
|||
dioxus-core = { workspace = true, features = ["serialize"] }
|
||||
dioxus-hot-reload = { workspace = true }
|
||||
interprocess-docfix = { version = "1.2.2" }
|
||||
gitignore = "1.0.8"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
|
|
@ -10,8 +10,8 @@ It handles building, bundling, development and publishing to simplify developmen
|
|||
|
||||
### Install the stable version (recommended)
|
||||
|
||||
```
|
||||
cargo install dioxus-cli --locked
|
||||
```shell
|
||||
cargo install dioxus-cli
|
||||
```
|
||||
|
||||
### Install the latest development build through git
|
||||
|
@ -20,7 +20,7 @@ To get the latest bug fixes and features, you can install the development versio
|
|||
However, this is not fully tested.
|
||||
That means you're probably going to have more bugs despite having the latest bug fixes.
|
||||
|
||||
```
|
||||
```shell
|
||||
cargo install --git https://github.com/DioxusLabs/dioxus dioxus-cli
|
||||
```
|
||||
|
||||
|
@ -29,7 +29,7 @@ and install it in Cargo's global binary directory (`~/.cargo/bin/` by default).
|
|||
|
||||
### Install from local folder
|
||||
|
||||
```
|
||||
```shell
|
||||
cargo install --path . --debug
|
||||
```
|
||||
|
||||
|
@ -40,7 +40,7 @@ It will be cloned from the [dioxus-template](https://github.com/DioxusLabs/dioxu
|
|||
|
||||
Alternatively, you can specify the template path:
|
||||
|
||||
```
|
||||
```shell
|
||||
dx create hello --template gh:dioxuslabs/dioxus-template
|
||||
```
|
||||
|
||||
|
|
|
@ -48,14 +48,25 @@ pub fn build(config: &CrateConfig, quiet: bool) -> Result<BuildResult> {
|
|||
|
||||
// [1] Build the .wasm module
|
||||
log::info!("🚅 Running build command...");
|
||||
|
||||
let wasm_check_command = std::process::Command::new("rustup")
|
||||
.args(["show"])
|
||||
.output()?;
|
||||
let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
|
||||
if !wasm_check_output.contains("wasm32-unknown-unknown") {
|
||||
log::info!("wasm32-unknown-unknown target not detected, installing..");
|
||||
let _ = std::process::Command::new("rustup")
|
||||
.args(["target", "add", "wasm32-unknown-unknown"])
|
||||
.output()?;
|
||||
}
|
||||
|
||||
let cmd = subprocess::Exec::cmd("cargo");
|
||||
let cmd = cmd
|
||||
.cwd(crate_dir)
|
||||
.arg("build")
|
||||
.arg("--target")
|
||||
.arg("wasm32-unknown-unknown")
|
||||
.arg("--message-format=json")
|
||||
.arg("--quiet");
|
||||
.arg("--message-format=json");
|
||||
|
||||
let cmd = if config.release {
|
||||
cmd.arg("--release")
|
||||
|
@ -65,7 +76,7 @@ pub fn build(config: &CrateConfig, quiet: bool) -> Result<BuildResult> {
|
|||
let cmd = if config.verbose {
|
||||
cmd.arg("--verbose")
|
||||
} else {
|
||||
cmd
|
||||
cmd.arg("--quiet")
|
||||
};
|
||||
|
||||
let cmd = if config.custom_profile.is_some() {
|
||||
|
@ -254,7 +265,6 @@ pub fn build_desktop(config: &CrateConfig, _is_serve: bool) -> Result<BuildResul
|
|||
let mut cmd = subprocess::Exec::cmd("cargo")
|
||||
.cwd(&config.crate_dir)
|
||||
.arg("build")
|
||||
.arg("--quiet")
|
||||
.arg("--message-format=json");
|
||||
|
||||
if config.release {
|
||||
|
@ -262,6 +272,8 @@ pub fn build_desktop(config: &CrateConfig, _is_serve: bool) -> Result<BuildResul
|
|||
}
|
||||
if config.verbose {
|
||||
cmd = cmd.arg("--verbose");
|
||||
} else {
|
||||
cmd = cmd.arg("--quiet");
|
||||
}
|
||||
|
||||
if config.custom_profile.is_some() {
|
||||
|
@ -469,7 +481,7 @@ pub fn gen_page(config: &DioxusConfig, serve: bool) -> String {
|
|||
.unwrap_or_default()
|
||||
.contains_key("tailwindcss")
|
||||
{
|
||||
style_str.push_str("<link rel=\"stylesheet\" href=\"tailwind.css\">\n");
|
||||
style_str.push_str("<link rel=\"stylesheet\" href=\"/{base_path}/tailwind.css\">\n");
|
||||
}
|
||||
|
||||
replace_or_insert_before("{style_include}", &style_str, "</head", &mut html);
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use dioxus_autofmt::{IndentOptions, IndentType};
|
||||
use futures::{stream::FuturesUnordered, StreamExt};
|
||||
use std::{fs, path::Path, process::exit};
|
||||
|
||||
|
@ -26,16 +27,19 @@ pub struct Autoformat {
|
|||
impl Autoformat {
|
||||
// Todo: autoformat the entire crate
|
||||
pub async fn autoformat(self) -> Result<()> {
|
||||
let Autoformat { check, raw, file } = self;
|
||||
|
||||
// Default to formatting the project
|
||||
if self.raw.is_none() && self.file.is_none() {
|
||||
if let Err(e) = autoformat_project(self.check).await {
|
||||
if raw.is_none() && file.is_none() {
|
||||
if let Err(e) = autoformat_project(check).await {
|
||||
eprintln!("error formatting project: {}", e);
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(raw) = self.raw {
|
||||
if let Some(inner) = dioxus_autofmt::fmt_block(&raw, 0) {
|
||||
if let Some(raw) = raw {
|
||||
let indent = indentation_for(".")?;
|
||||
if let Some(inner) = dioxus_autofmt::fmt_block(&raw, 0, indent) {
|
||||
println!("{}", inner);
|
||||
} else {
|
||||
// exit process with error
|
||||
|
@ -45,7 +49,16 @@ impl Autoformat {
|
|||
}
|
||||
|
||||
// Format single file
|
||||
if let Some(file) = self.file {
|
||||
if let Some(file) = file {
|
||||
refactor_file(file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn refactor_file(file: String) -> Result<(), Error> {
|
||||
let indent = indentation_for(".")?;
|
||||
let file_content = if file == "-" {
|
||||
let mut contents = String::new();
|
||||
std::io::stdin().read_to_string(&mut contents)?;
|
||||
|
@ -53,33 +66,71 @@ impl Autoformat {
|
|||
} else {
|
||||
fs::read_to_string(&file)
|
||||
};
|
||||
|
||||
match file_content {
|
||||
Ok(s) => {
|
||||
let edits = dioxus_autofmt::fmt_file(&s);
|
||||
let Ok(s) = file_content else {
|
||||
eprintln!("failed to open file: {}", file_content.unwrap_err());
|
||||
exit(1);
|
||||
};
|
||||
let edits = dioxus_autofmt::fmt_file(&s, indent);
|
||||
let out = dioxus_autofmt::apply_formats(&s, edits);
|
||||
|
||||
if file == "-" {
|
||||
print!("{}", out);
|
||||
} else if let Err(e) = fs::write(&file, out) {
|
||||
eprintln!("failed to write formatted content to file: {e}",);
|
||||
} else {
|
||||
match fs::write(&file, out) {
|
||||
Ok(_) => {
|
||||
println!("formatted {}", file);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("failed to write formatted content to file: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("failed to open file: {}", e);
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_project_files(config: &CrateConfig) -> Vec<PathBuf> {
|
||||
let mut files = vec![];
|
||||
|
||||
let gitignore_path = config.crate_dir.join(".gitignore");
|
||||
if gitignore_path.is_file() {
|
||||
let gitigno = gitignore::File::new(gitignore_path.as_path()).unwrap();
|
||||
if let Ok(git_files) = gitigno.included_files() {
|
||||
let git_files = git_files
|
||||
.into_iter()
|
||||
.filter(|f| f.ends_with(".rs") && !is_target_dir(f));
|
||||
files.extend(git_files)
|
||||
};
|
||||
} else {
|
||||
collect_rs_files(&config.crate_dir, &mut files);
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
fn is_target_dir(file: &Path) -> bool {
|
||||
let stripped = if let Ok(cwd) = std::env::current_dir() {
|
||||
file.strip_prefix(cwd).unwrap_or(file)
|
||||
} else {
|
||||
file
|
||||
};
|
||||
if let Some(first) = stripped.components().next() {
|
||||
first.as_os_str() == "target"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
async fn format_file(
|
||||
path: impl AsRef<Path>,
|
||||
indent: IndentOptions,
|
||||
) -> Result<usize, tokio::io::Error> {
|
||||
let contents = tokio::fs::read_to_string(&path).await?;
|
||||
|
||||
let edits = dioxus_autofmt::fmt_file(&contents, indent);
|
||||
let len = edits.len();
|
||||
|
||||
if !edits.is_empty() {
|
||||
let out = dioxus_autofmt::apply_formats(&contents, edits);
|
||||
tokio::fs::write(path, out).await?;
|
||||
}
|
||||
|
||||
Ok(len)
|
||||
}
|
||||
|
||||
/// Read every .rs file accessible when considering the .gitignore and try to format it
|
||||
|
@ -90,42 +141,27 @@ impl Autoformat {
|
|||
async fn autoformat_project(check: bool) -> Result<()> {
|
||||
let crate_config = crate::CrateConfig::new(None)?;
|
||||
|
||||
let mut files_to_format = vec![];
|
||||
collect_rs_files(&crate_config.crate_dir, &mut files_to_format);
|
||||
let files_to_format = get_project_files(&crate_config);
|
||||
|
||||
if files_to_format.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let indent = indentation_for(&files_to_format[0])?;
|
||||
|
||||
let counts = files_to_format
|
||||
.into_iter()
|
||||
.filter(|file| {
|
||||
if file.components().any(|f| f.as_os_str() == "target") {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|path| async {
|
||||
let _path = path.clone();
|
||||
let res = tokio::spawn(async move {
|
||||
let contents = tokio::fs::read_to_string(&path).await?;
|
||||
|
||||
let edits = dioxus_autofmt::fmt_file(&contents);
|
||||
let len = edits.len();
|
||||
|
||||
if !edits.is_empty() {
|
||||
let out = dioxus_autofmt::apply_formats(&contents, edits);
|
||||
tokio::fs::write(&path, out).await?;
|
||||
}
|
||||
|
||||
Ok(len) as Result<usize, tokio::io::Error>
|
||||
})
|
||||
.await;
|
||||
let path_clone = path.clone();
|
||||
let res = tokio::spawn(format_file(path, indent.clone())).await;
|
||||
|
||||
match res {
|
||||
Err(err) => {
|
||||
eprintln!("error formatting file: {}\n{err}", _path.display());
|
||||
eprintln!("error formatting file: {}\n{err}", path_clone.display());
|
||||
None
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
eprintln!("error formatting file: {}\n{err}", _path.display());
|
||||
eprintln!("error formatting file: {}\n{err}", path_clone.display());
|
||||
None
|
||||
}
|
||||
Ok(Ok(res)) => Some(res),
|
||||
|
@ -135,13 +171,7 @@ async fn autoformat_project(check: bool) -> Result<()> {
|
|||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
|
||||
let files_formatted: usize = counts
|
||||
.into_iter()
|
||||
.map(|f| match f {
|
||||
Some(res) => res,
|
||||
_ => 0,
|
||||
})
|
||||
.sum();
|
||||
let files_formatted: usize = counts.into_iter().flatten().sum();
|
||||
|
||||
if files_formatted > 0 && check {
|
||||
eprintln!("{} files needed formatting", files_formatted);
|
||||
|
@ -151,26 +181,67 @@ async fn autoformat_project(check: bool) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_rs_files(folder: &Path, files: &mut Vec<PathBuf>) {
|
||||
let Ok(folder) = folder.read_dir() else {
|
||||
fn indentation_for(file_or_dir: impl AsRef<Path>) -> Result<IndentOptions> {
|
||||
let out = std::process::Command::new("cargo")
|
||||
.args(["fmt", "--", "--print-config", "current"])
|
||||
.arg(file_or_dir.as_ref())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::inherit())
|
||||
.output()?;
|
||||
if !out.status.success() {
|
||||
return Err(Error::CargoError("cargo fmt failed".into()));
|
||||
}
|
||||
|
||||
let config = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
let hard_tabs = config
|
||||
.lines()
|
||||
.find(|line| line.starts_with("hard_tabs "))
|
||||
.and_then(|line| line.split_once('='))
|
||||
.map(|(_, value)| value.trim() == "true")
|
||||
.ok_or_else(|| {
|
||||
Error::RuntimeError("Could not find hard_tabs option in rustfmt config".into())
|
||||
})?;
|
||||
let tab_spaces = config
|
||||
.lines()
|
||||
.find(|line| line.starts_with("tab_spaces "))
|
||||
.and_then(|line| line.split_once('='))
|
||||
.map(|(_, value)| value.trim().parse::<usize>())
|
||||
.ok_or_else(|| {
|
||||
Error::RuntimeError("Could not find tab_spaces option in rustfmt config".into())
|
||||
})?
|
||||
.map_err(|_| {
|
||||
Error::RuntimeError("Could not parse tab_spaces option in rustfmt config".into())
|
||||
})?;
|
||||
|
||||
Ok(IndentOptions::new(
|
||||
if hard_tabs {
|
||||
IndentType::Tabs
|
||||
} else {
|
||||
IndentType::Spaces
|
||||
},
|
||||
tab_spaces,
|
||||
))
|
||||
}
|
||||
|
||||
fn collect_rs_files(folder: &impl AsRef<Path>, files: &mut Vec<PathBuf>) {
|
||||
if is_target_dir(folder.as_ref()) {
|
||||
return;
|
||||
}
|
||||
let Ok(folder) = folder.as_ref().read_dir() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// load the gitignore
|
||||
|
||||
for entry in folder {
|
||||
let Ok(entry) = entry else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
collect_rs_files(&path, files);
|
||||
}
|
||||
|
||||
if let Some(ext) = path.extension() {
|
||||
if ext == "rs" {
|
||||
if ext == "rs" && !is_target_dir(&path) {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,8 +37,8 @@ impl Build {
|
|||
.platform
|
||||
.unwrap_or(crate_config.dioxus_config.application.default_platform);
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
let _ = PluginManager::on_build_start(&crate_config, &platform);
|
||||
// #[cfg(feature = "plugin")]
|
||||
// let _ = PluginManager::on_build_start(&crate_config, &platform);
|
||||
|
||||
match platform {
|
||||
Platform::Web => {
|
||||
|
@ -66,8 +66,8 @@ impl Build {
|
|||
)?;
|
||||
file.write_all(temp.as_bytes())?;
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
let _ = PluginManager::on_build_finish(&crate_config, &platform);
|
||||
// #[cfg(feature = "plugin")]
|
||||
// let _ = PluginManager::on_build_finish(&crate_config, &platform);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -29,6 +29,9 @@ pub enum Error {
|
|||
#[error("Cargo Error: {0}")]
|
||||
CargoError(String),
|
||||
|
||||
#[error("Couldn't retrieve cargo metadata")]
|
||||
CargoMetadata(#[source] cargo_metadata::Error),
|
||||
|
||||
#[error("{0}")]
|
||||
CustomError(String),
|
||||
|
||||
|
|
|
@ -28,7 +28,19 @@ pub fn set_up_logging() {
|
|||
message = message,
|
||||
));
|
||||
})
|
||||
.level(log::LevelFilter::Info)
|
||||
.level(match std::env::var("DIOXUS_LOG") {
|
||||
Ok(level) => match level.to_lowercase().as_str() {
|
||||
"error" => log::LevelFilter::Error,
|
||||
"warn" => log::LevelFilter::Warn,
|
||||
"info" => log::LevelFilter::Info,
|
||||
"debug" => log::LevelFilter::Debug,
|
||||
"trace" => log::LevelFilter::Trace,
|
||||
_ => {
|
||||
panic!("Invalid log level: {}", level)
|
||||
}
|
||||
},
|
||||
Err(_) => log::LevelFilter::Info,
|
||||
})
|
||||
.chain(std::io::stdout())
|
||||
.apply()
|
||||
.unwrap();
|
||||
|
|
|
@ -9,42 +9,31 @@ use dioxus_cli::plugin::PluginManager;
|
|||
|
||||
use Commands::*;
|
||||
|
||||
fn get_bin(bin: Option<String>) -> Result<Option<PathBuf>> {
|
||||
const ERR_MESSAGE: &str = "The `--bin` flag has to be ran in a Cargo workspace.";
|
||||
|
||||
if let Some(ref bin) = bin {
|
||||
let manifest = cargo_toml::Manifest::from_path("./Cargo.toml")
|
||||
.map_err(|_| Error::CargoError(ERR_MESSAGE.to_string()))?;
|
||||
|
||||
if let Some(workspace) = manifest.workspace {
|
||||
for item in workspace.members.iter() {
|
||||
let path = PathBuf::from(item);
|
||||
|
||||
if !path.exists() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if path.ends_with(bin.clone()) {
|
||||
return Ok(Some(path));
|
||||
}
|
||||
}
|
||||
fn get_bin(bin: Option<String>) -> Result<PathBuf> {
|
||||
let metadata = cargo_metadata::MetadataCommand::new()
|
||||
.exec()
|
||||
.map_err(Error::CargoMetadata)?;
|
||||
let package = if let Some(bin) = bin {
|
||||
metadata
|
||||
.workspace_packages()
|
||||
.into_iter()
|
||||
.find(|p| p.name == bin)
|
||||
.ok_or(format!("no such package: {}", bin))
|
||||
.map_err(Error::CargoError)?
|
||||
} else {
|
||||
return Err(Error::CargoError(ERR_MESSAGE.to_string()));
|
||||
}
|
||||
}
|
||||
metadata
|
||||
.root_package()
|
||||
.ok_or("no root package?".into())
|
||||
.map_err(Error::CargoError)?
|
||||
};
|
||||
|
||||
// If the bin exists but we couldn't find it
|
||||
if bin.is_some() {
|
||||
return Err(Error::CargoError(
|
||||
"The specified bin does not exist.".to_string(),
|
||||
));
|
||||
}
|
||||
let crate_dir = package
|
||||
.manifest_path
|
||||
.parent()
|
||||
.ok_or("couldn't take parent dir".into())
|
||||
.map_err(Error::CargoError)?;
|
||||
|
||||
Ok(None)
|
||||
Ok(crate_dir.into())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
|
@ -53,9 +42,10 @@ async fn main() -> anyhow::Result<()> {
|
|||
|
||||
set_up_logging();
|
||||
|
||||
let bin = get_bin(args.bin)?;
|
||||
let bin = get_bin(args.bin);
|
||||
|
||||
let _dioxus_config = DioxusConfig::load(bin.clone())
|
||||
if let Ok(bin) = &bin {
|
||||
let _dioxus_config = DioxusConfig::load(Some(bin.clone()))
|
||||
.map_err(|e| anyhow!("Failed to load Dioxus config because: {e}"))?
|
||||
.unwrap_or_else(|| {
|
||||
log::warn!("You appear to be creating a Dioxus project from scratch; we will use the default config");
|
||||
|
@ -65,22 +55,23 @@ async fn main() -> anyhow::Result<()> {
|
|||
#[cfg(feature = "plugin")]
|
||||
PluginManager::init(_dioxus_config.plugin)
|
||||
.map_err(|e| anyhow!("🚫 Plugin system initialization failed: {e}"))?;
|
||||
}
|
||||
|
||||
match args.action {
|
||||
Translate(opts) => opts
|
||||
.translate()
|
||||
.map_err(|e| anyhow!("🚫 Translation of HTML into RSX failed: {}", e)),
|
||||
|
||||
Build(opts) => opts
|
||||
.build(bin.clone())
|
||||
Build(opts) if bin.is_ok() => opts
|
||||
.build(Some(bin.unwrap().clone()))
|
||||
.map_err(|e| anyhow!("🚫 Building project failed: {}", e)),
|
||||
|
||||
Clean(opts) => opts
|
||||
.clean(bin.clone())
|
||||
Clean(opts) if bin.is_ok() => opts
|
||||
.clean(Some(bin.unwrap().clone()))
|
||||
.map_err(|e| anyhow!("🚫 Cleaning project failed: {}", e)),
|
||||
|
||||
Serve(opts) => opts
|
||||
.serve(bin.clone())
|
||||
Serve(opts) if bin.is_ok() => opts
|
||||
.serve(Some(bin.unwrap().clone()))
|
||||
.await
|
||||
.map_err(|e| anyhow!("🚫 Serving project failed: {}", e)),
|
||||
|
||||
|
@ -92,8 +83,8 @@ async fn main() -> anyhow::Result<()> {
|
|||
.config()
|
||||
.map_err(|e| anyhow!("🚫 Configuring new project failed: {}", e)),
|
||||
|
||||
Bundle(opts) => opts
|
||||
.bundle(bin.clone())
|
||||
Bundle(opts) if bin.is_ok() => opts
|
||||
.bundle(Some(bin.unwrap().clone()))
|
||||
.map_err(|e| anyhow!("🚫 Bundling project failed: {}", e)),
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
|
@ -118,5 +109,6 @@ async fn main() -> anyhow::Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(anyhow::anyhow!(bin.unwrap_err())),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,8 +43,6 @@ pub async fn startup(config: CrateConfig) -> Result<()> {
|
|||
|
||||
let hot_reload_tx = broadcast::channel(100).0;
|
||||
|
||||
clear_paths();
|
||||
|
||||
Some(HotReloadState {
|
||||
messages: hot_reload_tx.clone(),
|
||||
file_map: file_map.clone(),
|
||||
|
@ -73,6 +71,7 @@ pub async fn serve(config: CrateConfig, hot_reload_state: Option<HotReloadState>
|
|||
|
||||
move || {
|
||||
let mut current_child = currently_running_child.write().unwrap();
|
||||
log::trace!("Killing old process");
|
||||
current_child.kill()?;
|
||||
let (child, result) = start_desktop(&config)?;
|
||||
*current_child = child;
|
||||
|
@ -109,7 +108,14 @@ pub async fn serve(config: CrateConfig, hot_reload_state: Option<HotReloadState>
|
|||
}
|
||||
|
||||
async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()> {
|
||||
match LocalSocketListener::bind("@dioxusin") {
|
||||
let metadata = cargo_metadata::MetadataCommand::new()
|
||||
.no_deps()
|
||||
.exec()
|
||||
.unwrap();
|
||||
let target_dir = metadata.target_directory.as_std_path();
|
||||
let path = target_dir.join("dioxusin");
|
||||
clear_paths(&path);
|
||||
match LocalSocketListener::bind(path) {
|
||||
Ok(local_socket_stream) => {
|
||||
let aborted = Arc::new(Mutex::new(false));
|
||||
// States
|
||||
|
@ -121,9 +127,9 @@ async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()
|
|||
let file_map = hot_reload_state.file_map.clone();
|
||||
let channels = channels.clone();
|
||||
let aborted = aborted.clone();
|
||||
let _ = local_socket_stream.set_nonblocking(true);
|
||||
move || {
|
||||
loop {
|
||||
//accept() will block the thread when local_socket_stream is in blocking mode (default)
|
||||
match local_socket_stream.accept() {
|
||||
Ok(mut connection) => {
|
||||
// send any templates than have changed before the socket connected
|
||||
|
@ -148,7 +154,11 @@ async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()
|
|||
println!("Connected to hot reloading 🚀");
|
||||
}
|
||||
Err(err) => {
|
||||
if err.kind() != std::io::ErrorKind::WouldBlock {
|
||||
let error_string = err.to_string();
|
||||
// Filter out any error messages about a operation that may block and an error message that triggers on some operating systems that says "Waiting for a process to open the other end of the pipe" without WouldBlock being set
|
||||
let display_error = err.kind() != std::io::ErrorKind::WouldBlock
|
||||
&& !error_string.contains("Waiting for a process");
|
||||
if display_error {
|
||||
println!("Error connecting to hot reloading: {} (Hot reloading is a feature of the dioxus-cli. If you are not using the CLI, this error can be ignored)", err);
|
||||
}
|
||||
}
|
||||
|
@ -181,17 +191,14 @@ async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn clear_paths() {
|
||||
fn clear_paths(file_socket_path: &std::path::Path) {
|
||||
if cfg!(target_os = "macos") {
|
||||
// On unix, if you force quit the application, it can leave the file socket open
|
||||
// This will cause the local socket listener to fail to open
|
||||
// We check if the file socket is already open from an old session and then delete it
|
||||
let paths = ["./dioxusin", "./@dioxusin"];
|
||||
for path in paths {
|
||||
let path = std::path::PathBuf::from(path);
|
||||
if path.exists() {
|
||||
let _ = std::fs::remove_file(path);
|
||||
}
|
||||
|
||||
if file_socket_path.exists() {
|
||||
let _ = std::fs::remove_file(file_socket_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -212,6 +219,7 @@ fn send_msg(msg: HotReloadMsg, channel: &mut impl std::io::Write) -> bool {
|
|||
|
||||
pub fn start_desktop(config: &CrateConfig) -> Result<(Child, BuildResult)> {
|
||||
// Run the desktop application
|
||||
log::trace!("Building application");
|
||||
let result = crate::builder::build_desktop(config, true)?;
|
||||
|
||||
match &config.executable {
|
||||
|
@ -222,6 +230,7 @@ pub fn start_desktop(config: &CrateConfig) -> Result<(Child, BuildResult)> {
|
|||
if cfg!(windows) {
|
||||
file.set_extension("exe");
|
||||
}
|
||||
log::trace!("Running application from {:?}", file);
|
||||
let child = Command::new(file.to_str().unwrap()).spawn()?;
|
||||
|
||||
Ok((child, result))
|
||||
|
|
|
@ -55,6 +55,16 @@ async fn setup_file_watcher<F: Fn() -> Result<BuildResult> + Send + 'static>(
|
|||
break;
|
||||
}
|
||||
|
||||
// Workaround for notify and vscode-like editor:
|
||||
// when edit & save a file in vscode, there will be two notifications,
|
||||
// the first one is a file with empty content.
|
||||
// filter the empty file notification to avoid false rebuild during hot-reload
|
||||
if let Ok(metadata) = fs::metadata(path) {
|
||||
if metadata.len() == 0 {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
match rsx_file_map.update_rsx(path, &config.crate_dir) {
|
||||
Ok(UpdateResult::UpdatedRsx(msgs)) => {
|
||||
messages.extend(msgs);
|
||||
|
|
|
@ -22,6 +22,8 @@ pub fn print_console_info(
|
|||
options: PrettierOptions,
|
||||
web_info: Option<WebServerInfo>,
|
||||
) {
|
||||
// Don't clear the screen if the user has set the DIOXUS_LOG environment variable to "trace" so that we can see the logs
|
||||
if Some("trace") != std::env::var("DIOXUS_LOG").ok().as_deref() {
|
||||
if let Ok(native_clearseq) = Command::new(if cfg!(target_os = "windows") {
|
||||
"cls"
|
||||
} else {
|
||||
|
@ -34,6 +36,7 @@ pub fn print_console_info(
|
|||
// Try ANSI-Escape characters
|
||||
print!("\x1b[2J\x1b[H");
|
||||
}
|
||||
}
|
||||
|
||||
let mut profile = if config.release { "Release" } else { "Debug" }.to_string();
|
||||
if config.custom_profile.is_some() {
|
||||
|
|
|
@ -31,6 +31,7 @@ fn get_out_comp_fn(orig_comp_fn: &ItemFn, cx_pat: &Pat) -> ItemFn {
|
|||
block: parse_quote! {
|
||||
{
|
||||
#[warn(non_snake_case)]
|
||||
#[allow(clippy::inline_always)]
|
||||
#[inline(always)]
|
||||
#inner_comp_fn
|
||||
#inner_comp_ident (#cx_pat)
|
||||
|
|
|
@ -243,10 +243,6 @@ mod field_info {
|
|||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn type_from_inside_option(&self, check_option_name: bool) -> Option<&syn::Type> {
|
||||
type_from_inside_option(self.ty, check_option_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
@ -551,18 +547,16 @@ mod struct_info {
|
|||
let generics_with_empty = modify_types_generics_hack(&ty_generics, |args| {
|
||||
args.insert(0, syn::GenericArgument::Type(empties_tuple.clone().into()));
|
||||
});
|
||||
let phantom_generics = self.generics.params.iter().map(|param| match param {
|
||||
let phantom_generics = self.generics.params.iter().filter_map(|param| match param {
|
||||
syn::GenericParam::Lifetime(lifetime) => {
|
||||
let lifetime = &lifetime.lifetime;
|
||||
quote!(::core::marker::PhantomData<&#lifetime ()>)
|
||||
Some(quote!(::core::marker::PhantomData<&#lifetime ()>))
|
||||
}
|
||||
syn::GenericParam::Type(ty) => {
|
||||
let ty = &ty.ident;
|
||||
quote!(::core::marker::PhantomData<#ty>)
|
||||
}
|
||||
syn::GenericParam::Const(_cnst) => {
|
||||
quote!()
|
||||
Some(quote!(::core::marker::PhantomData<#ty>))
|
||||
}
|
||||
syn::GenericParam::Const(_cnst) => None,
|
||||
});
|
||||
let builder_method_doc = match self.builder_attr.builder_method_doc {
|
||||
Some(ref doc) => quote!(#doc),
|
||||
|
@ -633,7 +627,7 @@ Finally, call `.build()` to create the instance of `{name}`.
|
|||
Ok(quote! {
|
||||
impl #impl_generics #name #ty_generics #where_clause {
|
||||
#[doc = #builder_method_doc]
|
||||
#[allow(dead_code)]
|
||||
#[allow(dead_code, clippy::type_complexity)]
|
||||
#vis fn builder() -> #builder_name #generics_with_empty {
|
||||
#builder_name {
|
||||
fields: #empties_tuple,
|
||||
|
@ -785,19 +779,9 @@ Finally, call `.build()` to create the instance of `{name}`.
|
|||
None => quote!(),
|
||||
};
|
||||
|
||||
// NOTE: both auto_into and strip_option affect `arg_type` and `arg_expr`, but the order of
|
||||
// nesting is different so we have to do this little dance.
|
||||
let arg_type = if field.builder_attr.strip_option {
|
||||
field.type_from_inside_option(false).ok_or_else(|| {
|
||||
Error::new_spanned(
|
||||
field_type,
|
||||
"can't `strip_option` - field is not `Option<...>`",
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
field_type
|
||||
};
|
||||
let (arg_type, arg_expr) = if field.builder_attr.auto_into {
|
||||
let arg_type = field_type;
|
||||
let (arg_type, arg_expr) =
|
||||
if field.builder_attr.auto_into || field.builder_attr.strip_option {
|
||||
(
|
||||
quote!(impl ::core::convert::Into<#arg_type>),
|
||||
quote!(#field_name.into()),
|
||||
|
@ -805,11 +789,6 @@ Finally, call `.build()` to create the instance of `{name}`.
|
|||
} else {
|
||||
(quote!(#arg_type), quote!(#field_name))
|
||||
};
|
||||
let arg_expr = if field.builder_attr.strip_option {
|
||||
quote!(Some(#arg_expr))
|
||||
} else {
|
||||
arg_expr
|
||||
};
|
||||
|
||||
let repeated_fields_error_type_name = syn::Ident::new(
|
||||
&format!(
|
||||
|
@ -825,6 +804,7 @@ Finally, call `.build()` to create the instance of `{name}`.
|
|||
#[allow(dead_code, non_camel_case_types, missing_docs)]
|
||||
impl #impl_generics #builder_name < #( #ty_generics ),* > #where_clause {
|
||||
#doc
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn #field_name (self, #field_name: #arg_type) -> #builder_name < #( #target_generics ),* > {
|
||||
let #field_name = (#arg_expr,);
|
||||
let ( #(#descructuring,)* ) = self.fields;
|
||||
|
@ -843,6 +823,7 @@ Finally, call `.build()` to create the instance of `{name}`.
|
|||
#[deprecated(
|
||||
note = #repeated_fields_error_message
|
||||
)]
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn #field_name (self, _: #repeated_fields_error_type_name) -> #builder_name < #( #target_generics ),* > {
|
||||
self
|
||||
}
|
||||
|
|
|
@ -164,18 +164,12 @@ impl VirtualDom {
|
|||
});
|
||||
|
||||
// Now that all the references are gone, we can safely drop our own references in our listeners.
|
||||
let mut listeners = scope.attributes_to_drop.borrow_mut();
|
||||
let mut listeners = scope.attributes_to_drop_before_render.borrow_mut();
|
||||
listeners.drain(..).for_each(|listener| {
|
||||
let listener = unsafe { &*listener };
|
||||
match &listener.value {
|
||||
AttributeValue::Listener(l) => {
|
||||
if let AttributeValue::Listener(l) = &listener.value {
|
||||
_ = l.take();
|
||||
}
|
||||
AttributeValue::Any(a) => {
|
||||
_ = a.take();
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,16 @@
|
|||
use crate::nodes::RenderReturn;
|
||||
use crate::{Attribute, AttributeValue, VComponent};
|
||||
use bumpalo::Bump;
|
||||
use std::cell::RefCell;
|
||||
use std::cell::{Cell, UnsafeCell};
|
||||
|
||||
pub(crate) struct BumpFrame {
|
||||
pub bump: UnsafeCell<Bump>,
|
||||
pub node: Cell<*const RenderReturn<'static>>,
|
||||
|
||||
// The bump allocator will not call the destructor of the objects it allocated. Attributes and props need to have there destructor called, so we keep a list of them to drop before the bump allocator is reset.
|
||||
pub(crate) attributes_to_drop_before_reset: RefCell<Vec<*const Attribute<'static>>>,
|
||||
pub(crate) props_to_drop_before_reset: RefCell<Vec<*const VComponent<'static>>>,
|
||||
}
|
||||
|
||||
impl BumpFrame {
|
||||
|
@ -13,6 +19,8 @@ impl BumpFrame {
|
|||
Self {
|
||||
bump: UnsafeCell::new(bump),
|
||||
node: Cell::new(std::ptr::null()),
|
||||
attributes_to_drop_before_reset: Default::default(),
|
||||
props_to_drop_before_reset: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,8 +39,38 @@ impl BumpFrame {
|
|||
unsafe { &*self.bump.get() }
|
||||
}
|
||||
|
||||
#[allow(clippy::mut_from_ref)]
|
||||
pub(crate) unsafe fn bump_mut(&self) -> &mut Bump {
|
||||
unsafe { &mut *self.bump.get() }
|
||||
pub(crate) fn add_attribute_to_drop(&self, attribute: *const Attribute<'static>) {
|
||||
self.attributes_to_drop_before_reset
|
||||
.borrow_mut()
|
||||
.push(attribute);
|
||||
}
|
||||
|
||||
/// Reset the bump allocator and drop all the attributes and props that were allocated in it.
|
||||
///
|
||||
/// # Safety
|
||||
/// The caller must insure that no reference to anything allocated in the bump allocator is available after this function is called.
|
||||
pub(crate) unsafe fn reset(&self) {
|
||||
let mut attributes = self.attributes_to_drop_before_reset.borrow_mut();
|
||||
attributes.drain(..).for_each(|attribute| {
|
||||
let attribute = unsafe { &*attribute };
|
||||
if let AttributeValue::Any(l) = &attribute.value {
|
||||
_ = l.take();
|
||||
}
|
||||
});
|
||||
let mut props = self.props_to_drop_before_reset.borrow_mut();
|
||||
props.drain(..).for_each(|prop| {
|
||||
let prop = unsafe { &*prop };
|
||||
_ = prop.props.borrow_mut().take();
|
||||
});
|
||||
unsafe {
|
||||
let bump = &mut *self.bump.get();
|
||||
bump.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BumpFrame {
|
||||
fn drop(&mut self) {
|
||||
unsafe { self.reset() }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -560,7 +560,7 @@ impl<'b> VirtualDom {
|
|||
// If none of the old keys are reused by the new children, then we remove all the remaining old children and
|
||||
// create the new children afresh.
|
||||
if shared_keys.is_empty() {
|
||||
if old.get(0).is_some() {
|
||||
if old.first().is_some() {
|
||||
self.remove_nodes(&old[1..]);
|
||||
self.replace(&old[0], new);
|
||||
} else {
|
||||
|
|
|
@ -107,8 +107,6 @@ impl<T: std::fmt::Debug> std::fmt::Debug for Event<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
|
||||
/// The callback type generated by the `rsx!` macro when an `on` field is specified for components.
|
||||
///
|
||||
/// This makes it possible to pass `move |evt| {}` style closures into components as property fields.
|
||||
|
|
|
@ -18,13 +18,42 @@ use crate::{innerlude::VNode, ScopeState};
|
|||
|
||||
/// A concrete type provider for closures that build [`VNode`] structures.
|
||||
///
|
||||
/// This struct wraps lazy structs that build [`VNode`] trees Normally, we cannot perform a blanket implementation over
|
||||
/// This struct wraps lazy structs that build [`VNode`] trees. Normally, we cannot perform a blanket implementation over
|
||||
/// closures, but if we wrap the closure in a concrete type, we can use it for different branches in matching.
|
||||
///
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// LazyNodes::new(|f| f.element("div", [], [], [] None))
|
||||
/// LazyNodes::new(|f| {
|
||||
/// static TEMPLATE: dioxus::core::Template = dioxus::core::Template {
|
||||
/// name: "main.rs:5:5:20", // Source location of the template for hot reloading
|
||||
/// roots: &[
|
||||
/// dioxus::core::TemplateNode::Element {
|
||||
/// tag: dioxus_elements::div::TAG_NAME,
|
||||
/// namespace: dioxus_elements::div::NAME_SPACE,
|
||||
/// attrs: &[],
|
||||
/// children: &[],
|
||||
/// },
|
||||
/// ],
|
||||
/// node_paths: &[],
|
||||
/// attr_paths: &[],
|
||||
/// };
|
||||
/// dioxus::core::VNode {
|
||||
/// parent: None,
|
||||
/// key: None,
|
||||
/// template: std::cell::Cell::new(TEMPLATE),
|
||||
/// root_ids: dioxus::core::exports::bumpalo::collections::Vec::with_capacity_in(
|
||||
/// 1usize,
|
||||
/// f.bump(),
|
||||
/// )
|
||||
/// .into(),
|
||||
/// dynamic_nodes: f.bump().alloc([]),
|
||||
/// dynamic_attrs: f.bump().alloc([]),
|
||||
/// })
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Find more information about how to construct [`VNode`] at <https://dioxuslabs.com/learn/0.4/contributing/walkthrough_readme#the-rsx-macro>
|
||||
|
||||
pub struct LazyNodes<'a, 'b> {
|
||||
#[cfg(not(miri))]
|
||||
inner: SmallBox<dyn FnMut(&'a ScopeState) -> VNode<'a> + 'b, S16>,
|
||||
|
@ -61,7 +90,7 @@ impl<'a, 'b> LazyNodes<'a, 'b> {
|
|||
/// Call the closure with the given factory to produce real [`VNode`].
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// let f = LazyNodes::new(move |f| f.element("div", [], [], [] None));
|
||||
/// let f = LazyNodes::new(/* Closure for creating VNodes */);
|
||||
///
|
||||
/// let node = f.call(cac);
|
||||
/// ```
|
||||
|
|
|
@ -91,7 +91,7 @@ pub enum Mutation<'a> {
|
|||
id: ElementId,
|
||||
},
|
||||
|
||||
/// Create an placeholder int he DOM that we will use later.
|
||||
/// Create a placeholder in the DOM that we will use later.
|
||||
///
|
||||
/// Dioxus currently requires the use of placeholders to maintain a re-entrance point for things like list diffing
|
||||
CreatePlaceholder {
|
||||
|
|
|
@ -707,7 +707,7 @@ impl<'a, 'b> IntoDynNode<'b> for &'a str {
|
|||
impl IntoDynNode<'_> for String {
|
||||
fn into_vnode(self, cx: &ScopeState) -> DynamicNode {
|
||||
DynamicNode::Text(VText {
|
||||
value: cx.bump().alloc(self),
|
||||
value: cx.bump().alloc_str(&self),
|
||||
id: Default::default(),
|
||||
})
|
||||
}
|
||||
|
@ -791,6 +791,12 @@ impl<'a> IntoAttributeValue<'a> for &'a str {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoAttributeValue<'a> for String {
|
||||
fn into_value(self, cx: &'a Bump) -> AttributeValue<'a> {
|
||||
AttributeValue::Text(cx.alloc_str(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoAttributeValue<'a> for f64 {
|
||||
fn into_value(self, _: &'a Bump) -> AttributeValue<'a> {
|
||||
AttributeValue::Float(self)
|
||||
|
|
|
@ -35,7 +35,7 @@ impl VirtualDom {
|
|||
hook_idx: Default::default(),
|
||||
|
||||
borrowed_props: Default::default(),
|
||||
attributes_to_drop: Default::default(),
|
||||
attributes_to_drop_before_render: Default::default(),
|
||||
}));
|
||||
|
||||
let context =
|
||||
|
@ -54,7 +54,7 @@ impl VirtualDom {
|
|||
|
||||
let new_nodes = unsafe {
|
||||
let scope = &self.scopes[scope_id.0];
|
||||
scope.previous_frame().bump_mut().reset();
|
||||
scope.previous_frame().reset();
|
||||
|
||||
scope.context().suspended.set(false);
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ pub struct ScopeState {
|
|||
pub(crate) hook_idx: Cell<usize>,
|
||||
|
||||
pub(crate) borrowed_props: RefCell<Vec<*const VComponent<'static>>>,
|
||||
pub(crate) attributes_to_drop: RefCell<Vec<*const Attribute<'static>>>,
|
||||
pub(crate) attributes_to_drop_before_render: RefCell<Vec<*const Attribute<'static>>>,
|
||||
|
||||
pub(crate) props: Option<Box<dyn AnyProps<'static>>>,
|
||||
}
|
||||
|
@ -348,25 +348,36 @@ impl<'src> ScopeState {
|
|||
pub fn render(&'src self, rsx: LazyNodes<'src, '_>) -> Element<'src> {
|
||||
let element = rsx.call(self);
|
||||
|
||||
let mut listeners = self.attributes_to_drop.borrow_mut();
|
||||
let mut listeners = self.attributes_to_drop_before_render.borrow_mut();
|
||||
for attr in element.dynamic_attrs {
|
||||
match attr.value {
|
||||
AttributeValue::Any(_) | AttributeValue::Listener(_) => {
|
||||
// We need to drop listeners before the next render because they may borrow data from the borrowed props which will be dropped
|
||||
AttributeValue::Listener(_) => {
|
||||
let unbounded = unsafe { std::mem::transmute(attr as *const Attribute) };
|
||||
listeners.push(unbounded);
|
||||
}
|
||||
// We need to drop any values manually to make sure that their drop implementation is called before the next render
|
||||
AttributeValue::Any(_) => {
|
||||
let unbounded = unsafe { std::mem::transmute(attr as *const Attribute) };
|
||||
self.previous_frame().add_attribute_to_drop(unbounded);
|
||||
}
|
||||
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
let mut props = self.borrowed_props.borrow_mut();
|
||||
let mut drop_props = self
|
||||
.previous_frame()
|
||||
.props_to_drop_before_reset
|
||||
.borrow_mut();
|
||||
for node in element.dynamic_nodes {
|
||||
if let DynamicNode::Component(comp) = node {
|
||||
if !comp.static_props {
|
||||
let unbounded = unsafe { std::mem::transmute(comp as *const VComponent) };
|
||||
if !comp.static_props {
|
||||
props.push(unbounded);
|
||||
}
|
||||
drop_props.push(unbounded);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,6 +59,7 @@ fullscreen = ["wry/fullscreen"]
|
|||
transparent = ["wry/transparent"]
|
||||
devtools = ["wry/devtools"]
|
||||
hot-reload = ["dioxus-hot-reload"]
|
||||
gnu = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
default-features = false
|
||||
|
|
9
packages/desktop/build.rs
Normal file
9
packages/desktop/build.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
fn main() {
|
||||
// WARN about wry support on windows gnu targets. GNU windows targets don't work well in wry currently
|
||||
if std::env::var("CARGO_CFG_WINDOWS").is_ok()
|
||||
&& std::env::var("CARGO_CFG_TARGET_ENV").unwrap() == "gnu"
|
||||
&& !cfg!(feature = "gnu")
|
||||
{
|
||||
println!("cargo:warning=GNU windows targets have some limitations within Wry. Using the MSVC windows toolchain is recommended. If you would like to use continue using GNU, you can read https://github.com/wravery/webview2-rs#cross-compilation and disable this warning by adding the gnu feature to dioxus-desktop in your Cargo.toml")
|
||||
}
|
||||
}
|
|
@ -55,7 +55,7 @@ use wry::{application::window::WindowId, webview::WebContext};
|
|||
///
|
||||
/// This function will start a multithreaded Tokio runtime as well the WebView event loop.
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// ```rust, no_run
|
||||
/// use dioxus::prelude::*;
|
||||
///
|
||||
/// fn main() {
|
||||
|
@ -78,11 +78,12 @@ pub fn launch(root: Component) {
|
|||
///
|
||||
/// You can configure the WebView window with a configuration closure
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// ```rust, no_run
|
||||
/// use dioxus::prelude::*;
|
||||
/// use dioxus_desktop::*;
|
||||
///
|
||||
/// fn main() {
|
||||
/// dioxus_desktop::launch_cfg(app, |c| c.with_window(|w| w.with_title("My App")));
|
||||
/// dioxus_desktop::launch_cfg(app, Config::default().with_window(WindowBuilder::new().with_title("My App")));
|
||||
/// }
|
||||
///
|
||||
/// fn app(cx: Scope) -> Element {
|
||||
|
@ -101,8 +102,9 @@ pub fn launch_cfg(root: Component, config_builder: Config) {
|
|||
///
|
||||
/// You can configure the WebView window with a configuration closure
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// ```rust, no_run
|
||||
/// use dioxus::prelude::*;
|
||||
/// use dioxus_desktop::Config;
|
||||
///
|
||||
/// fn main() {
|
||||
/// dioxus_desktop::launch_with_props(app, AppProps { name: "asd" }, Config::default());
|
||||
|
|
|
@ -15,21 +15,21 @@ fn app(cx: Scope) -> Element {
|
|||
let mapping: DioxusElementToNodeId = cx.consume_context().unwrap();
|
||||
// disable templates so that every node has an id and can be queried
|
||||
cx.render(rsx! {
|
||||
div{
|
||||
div {
|
||||
width: "100%",
|
||||
background_color: "hsl({hue}, 70%, {brightness}%)",
|
||||
onmousemove: move |evt| {
|
||||
if let RenderReturn::Ready(node) = cx.root_node() {
|
||||
if let Some(id) = node.root_ids.borrow().get(0).cloned() {
|
||||
if let Some(id) = node.root_ids.borrow().first().cloned() {
|
||||
let node = tui_query.get(mapping.get_node_id(id).unwrap());
|
||||
let Size{width, height} = node.size().unwrap();
|
||||
let Size { width, height } = node.size().unwrap();
|
||||
let pos = evt.inner().element_coordinates();
|
||||
hue.set((pos.x as f32/width as f32)*255.0);
|
||||
brightness.set((pos.y as f32/height as f32)*100.0);
|
||||
hue.set((pos.x as f32 / width as f32) * 255.0);
|
||||
brightness.set((pos.y as f32 / height as f32) * 100.0);
|
||||
}
|
||||
}
|
||||
},
|
||||
"hsl({hue}, 70%, {brightness}%)",
|
||||
"hsl({hue}, 70%, {brightness}%)"
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,17 +1,39 @@
|
|||
//! This file exports functions into the vscode extension
|
||||
|
||||
use dioxus_autofmt::FormattedBlock;
|
||||
use dioxus_autofmt::{FormattedBlock, IndentOptions, IndentType};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn format_rsx(raw: String) -> String {
|
||||
let block = dioxus_autofmt::fmt_block(&raw, 0);
|
||||
pub fn format_rsx(raw: String, use_tabs: bool, indent_size: usize) -> String {
|
||||
let block = dioxus_autofmt::fmt_block(
|
||||
&raw,
|
||||
0,
|
||||
IndentOptions::new(
|
||||
if use_tabs {
|
||||
IndentType::Tabs
|
||||
} else {
|
||||
IndentType::Spaces
|
||||
},
|
||||
indent_size,
|
||||
),
|
||||
);
|
||||
block.unwrap()
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn format_selection(raw: String) -> String {
|
||||
let block = dioxus_autofmt::fmt_block(&raw, 0);
|
||||
pub fn format_selection(raw: String, use_tabs: bool, indent_size: usize) -> String {
|
||||
let block = dioxus_autofmt::fmt_block(
|
||||
&raw,
|
||||
0,
|
||||
IndentOptions::new(
|
||||
if use_tabs {
|
||||
IndentType::Tabs
|
||||
} else {
|
||||
IndentType::Spaces
|
||||
},
|
||||
indent_size,
|
||||
),
|
||||
);
|
||||
block.unwrap()
|
||||
}
|
||||
|
||||
|
@ -35,8 +57,18 @@ impl FormatBlockInstance {
|
|||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn format_file(contents: String) -> FormatBlockInstance {
|
||||
let _edits = dioxus_autofmt::fmt_file(&contents);
|
||||
pub fn format_file(contents: String, use_tabs: bool, indent_size: usize) -> FormatBlockInstance {
|
||||
let _edits = dioxus_autofmt::fmt_file(
|
||||
&contents,
|
||||
IndentOptions::new(
|
||||
if use_tabs {
|
||||
IndentType::Tabs
|
||||
} else {
|
||||
IndentType::Spaces
|
||||
},
|
||||
indent_size,
|
||||
),
|
||||
);
|
||||
let out = dioxus_autofmt::apply_formats(&contents, _edits.clone());
|
||||
FormatBlockInstance { new: out, _edits }
|
||||
}
|
||||
|
|
|
@ -90,7 +90,13 @@ function fmtDocument(document: vscode.TextDocument) {
|
|||
if (!editor) return; // Need an editor to apply text edits.
|
||||
|
||||
const contents = editor.document.getText();
|
||||
const formatted = dioxus.format_file(contents);
|
||||
let tabSize: number;
|
||||
if (typeof editor.options.tabSize === 'number') {
|
||||
tabSize = editor.options.tabSize;
|
||||
} else {
|
||||
tabSize = 4;
|
||||
}
|
||||
const formatted = dioxus.format_file(contents, !editor.options.insertSpaces, tabSize);
|
||||
|
||||
// Replace the entire text document
|
||||
// Yes, this is a bit heavy handed, but the dioxus side doesn't know the line/col scheme that vscode is using
|
||||
|
|
|
@ -7,6 +7,6 @@ use dioxus_core::ScopeState;
|
|||
pub fn use_atom_root(cx: &ScopeState) -> &Rc<AtomRoot> {
|
||||
cx.use_hook(|| match cx.consume_context::<Rc<AtomRoot>>() {
|
||||
Some(root) => root,
|
||||
None => panic!("No atom root found in context. Did you forget place an AtomRoot component at the top of your app?"),
|
||||
None => panic!("No atom root found in context. Did you forget to call use_init_atom_root at the top of your app?"),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -86,7 +86,9 @@ impl<T: 'static> AtomState<T> {
|
|||
/// ```
|
||||
#[must_use]
|
||||
pub fn current(&self) -> Rc<T> {
|
||||
self.value.as_ref().unwrap().clone()
|
||||
let atoms = self.root.atoms.borrow();
|
||||
let slot = atoms.get(&self.id).unwrap();
|
||||
slot.value.clone().downcast().unwrap()
|
||||
}
|
||||
|
||||
/// Get the `setter` function directly without the `AtomState` wrapper.
|
||||
|
|
|
@ -22,8 +22,6 @@ mod atoms {
|
|||
pub use atom::*;
|
||||
pub use atomfamily::*;
|
||||
pub use atomref::*;
|
||||
pub use selector::*;
|
||||
pub use selectorfamily::*;
|
||||
}
|
||||
|
||||
pub mod hooks {
|
||||
|
|
|
@ -11,7 +11,7 @@ keywords = ["ui", "gui", "react", "ssr", "fullstack"]
|
|||
|
||||
[dependencies]
|
||||
# server functions
|
||||
server_fn = { version = "0.4.6", default-features = false }
|
||||
server_fn = { version = "0.5.2", default-features = false }
|
||||
dioxus_server_macro = { workspace = true }
|
||||
|
||||
# warp
|
||||
|
|
|
@ -24,6 +24,7 @@ fn app(cx: Scope<AppProps>) -> Element {
|
|||
|
||||
let mut count = use_state(cx, || 0);
|
||||
let text = use_state(cx, || "...".to_string());
|
||||
let eval = use_eval(cx);
|
||||
|
||||
cx.render(rsx! {
|
||||
div {
|
||||
|
|
|
@ -369,15 +369,65 @@ fn apply_request_parts_to_response<B>(
|
|||
}
|
||||
}
|
||||
|
||||
/// SSR renderer handler for Axum
|
||||
pub async fn render_handler<P: Clone + serde::Serialize + Send + Sync + 'static>(
|
||||
State((cfg, ssr_state)): State<(ServeConfig<P>, SSRState)>,
|
||||
/// SSR renderer handler for Axum with added context injection.
|
||||
///
|
||||
/// # Example
|
||||
/// ```rust,no_run
|
||||
/// #![allow(non_snake_case)]
|
||||
/// use std::sync::{Arc, Mutex};
|
||||
///
|
||||
/// use axum::routing::get;
|
||||
/// use dioxus::prelude::*;
|
||||
/// use dioxus_fullstack::{axum_adapter::render_handler_with_context, prelude::*};
|
||||
///
|
||||
/// fn app(cx: Scope) -> Element {
|
||||
/// render! {
|
||||
/// "hello!"
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// #[tokio::main]
|
||||
/// async fn main() {
|
||||
/// let cfg = ServeConfigBuilder::new(app, ())
|
||||
/// .assets_path("dist")
|
||||
/// .build();
|
||||
/// let ssr_state = SSRState::new(&cfg);
|
||||
///
|
||||
/// // This could be any state you want to be accessible from your server
|
||||
/// // functions using `[DioxusServerContext::get]`.
|
||||
/// let state = Arc::new(Mutex::new("state".to_string()));
|
||||
///
|
||||
/// let addr = std::net::SocketAddr::from(([127, 0, 0, 1], 8080));
|
||||
/// axum::Server::bind(&addr)
|
||||
/// .serve(
|
||||
/// axum::Router::new()
|
||||
/// // Register server functions, etc.
|
||||
/// // Note you probably want to use `register_server_fns_with_handler`
|
||||
/// // to inject the context into server functions running outside
|
||||
/// // of an SSR render context.
|
||||
/// .fallback(get(render_handler_with_context).with_state((
|
||||
/// move |ctx| ctx.insert(state.clone()).unwrap(),
|
||||
/// cfg,
|
||||
/// ssr_state,
|
||||
/// )))
|
||||
/// .into_make_service(),
|
||||
/// )
|
||||
/// .await
|
||||
/// .unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn render_handler_with_context<
|
||||
P: Clone + serde::Serialize + Send + Sync + 'static,
|
||||
F: FnMut(&mut DioxusServerContext),
|
||||
>(
|
||||
State((mut inject_context, cfg, ssr_state)): State<(F, ServeConfig<P>, SSRState)>,
|
||||
request: Request<Body>,
|
||||
) -> impl IntoResponse {
|
||||
let (parts, _) = request.into_parts();
|
||||
let url = parts.uri.path_and_query().unwrap().to_string();
|
||||
let parts: Arc<RwLock<http::request::Parts>> = Arc::new(RwLock::new(parts.into()));
|
||||
let server_context = DioxusServerContext::new(parts.clone());
|
||||
let mut server_context = DioxusServerContext::new(parts.clone());
|
||||
inject_context(&mut server_context);
|
||||
|
||||
match ssr_state.render(url, &cfg, &server_context).await {
|
||||
Ok(rendered) => {
|
||||
|
@ -395,6 +445,14 @@ pub async fn render_handler<P: Clone + serde::Serialize + Send + Sync + 'static>
|
|||
}
|
||||
}
|
||||
|
||||
/// SSR renderer handler for Axum
|
||||
pub async fn render_handler<P: Clone + serde::Serialize + Send + Sync + 'static>(
|
||||
State((cfg, ssr_state)): State<(ServeConfig<P>, SSRState)>,
|
||||
request: Request<Body>,
|
||||
) -> impl IntoResponse {
|
||||
render_handler_with_context(State((|_: &mut _| (), cfg, ssr_state)), request).await
|
||||
}
|
||||
|
||||
fn report_err<E: std::fmt::Display>(e: E) -> Response<BoxBody> {
|
||||
Response::builder()
|
||||
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
|
|
|
@ -89,9 +89,9 @@ impl Service for ServerFnHandler {
|
|||
let parts = Arc::new(RwLock::new(parts));
|
||||
|
||||
// Because the future returned by `server_fn_handler` is `Send`, and the future returned by this function must be send, we need to spawn a new runtime
|
||||
let (resp_tx, resp_rx) = tokio::sync::oneshot::channel();
|
||||
let pool = get_local_pool();
|
||||
pool.spawn_pinned({
|
||||
let result = pool
|
||||
.spawn_pinned({
|
||||
let function = function.clone();
|
||||
let mut server_context = server_context.clone();
|
||||
server_context.parts = parts;
|
||||
|
@ -101,14 +101,14 @@ impl Service for ServerFnHandler {
|
|||
Encoding::GetJSON | Encoding::GetCBOR => &query,
|
||||
};
|
||||
let server_function_future = function.call((), data);
|
||||
let server_function_future =
|
||||
ProvideServerContext::new(server_function_future, server_context.clone());
|
||||
let resp = server_function_future.await;
|
||||
|
||||
resp_tx.send(resp).unwrap();
|
||||
let server_function_future = ProvideServerContext::new(
|
||||
server_function_future,
|
||||
server_context.clone(),
|
||||
);
|
||||
server_function_future.await
|
||||
}
|
||||
});
|
||||
let result = resp_rx.await.unwrap();
|
||||
})
|
||||
.await?;
|
||||
let mut res = http::Response::builder();
|
||||
|
||||
// Set the headers from the server context
|
||||
|
|
|
@ -3,7 +3,9 @@ use tracing_futures::Instrument;
|
|||
|
||||
use http::{Request, Response};
|
||||
|
||||
/// A layer that wraps a service. This can be used to add additional information to the request, or response on top of some other service
|
||||
pub trait Layer: Send + Sync + 'static {
|
||||
/// Wrap a boxed service with this layer
|
||||
fn layer(&self, inner: BoxedService) -> BoxedService;
|
||||
}
|
||||
|
||||
|
@ -17,7 +19,9 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// A service is a function that takes a request and returns an async response
|
||||
pub trait Service {
|
||||
/// Run the service and produce a future that resolves to a response
|
||||
fn run(
|
||||
&mut self,
|
||||
req: http::Request<hyper::body::Body>,
|
||||
|
@ -55,6 +59,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// A boxed service is a type-erased service that can be used without knowing the underlying type
|
||||
pub struct BoxedService(pub Box<dyn Service + Send>);
|
||||
|
||||
impl tower::Service<http::Request<hyper::body::Body>> for BoxedService {
|
||||
|
|
|
@ -40,6 +40,8 @@ pub mod prelude {
|
|||
#[cfg(not(feature = "ssr"))]
|
||||
pub use crate::html_storage::deserialize::get_root_props_from_document;
|
||||
pub use crate::launch::LaunchBuilder;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub use crate::layer::{Layer, Service};
|
||||
#[cfg(all(feature = "ssr", feature = "router"))]
|
||||
pub use crate::render::pre_cache_static_routes_with_props;
|
||||
#[cfg(feature = "ssr")]
|
||||
|
|
|
@ -45,6 +45,8 @@ impl SsrRendererPool {
|
|||
.expect("couldn't spawn runtime")
|
||||
.block_on(async move {
|
||||
let mut vdom = VirtualDom::new_with_props(component, props);
|
||||
// Make sure the evaluator is initialized
|
||||
dioxus_ssr::eval::init_eval(vdom.base_scope());
|
||||
let mut to = WriteBuffer { buffer: Vec::new() };
|
||||
// before polling the future, we need to set the context
|
||||
let prev_context =
|
||||
|
|
|
@ -53,7 +53,7 @@ fn default_external_navigation_handler() -> fn(Scope) -> Element {
|
|||
dioxus_router::prelude::FailureExternalNavigation
|
||||
}
|
||||
|
||||
/// The configeration for the router
|
||||
/// The configuration for the router
|
||||
#[derive(Props, serde::Serialize, serde::Deserialize)]
|
||||
pub struct FullstackRouterConfig<R>
|
||||
where
|
||||
|
|
|
@ -125,14 +125,6 @@ impl server_fn::ServerFunctionRegistry<()> for DioxusServerFnRegistry {
|
|||
}
|
||||
}
|
||||
|
||||
fn register(
|
||||
url: &'static str,
|
||||
server_function: ServerFunction,
|
||||
encoding: server_fn::Encoding,
|
||||
) -> Result<(), Self::Error> {
|
||||
Self::register_explicit("", url, server_function, encoding)
|
||||
}
|
||||
|
||||
/// Returns the server function registered at the given URL, or `None` if no function is registered at that URL.
|
||||
fn get(url: &str) -> Option<server_fn::ServerFnTraitObj<()>> {
|
||||
REGISTERED_SERVER_FUNCTIONS
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
[package]
|
||||
name = "generational-box"
|
||||
authors = ["Evan Almloff"]
|
||||
version = "0.0.0"
|
||||
version = "0.4.3"
|
||||
edition = "2018"
|
||||
|
||||
description = "A box backed by a generational runtime"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/DioxusLabs/dioxus/"
|
||||
keywords = ["generational", "box", "memory", "allocator"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
@ -15,3 +18,5 @@ rand = "0.8.5"
|
|||
[features]
|
||||
default = ["check_generation"]
|
||||
check_generation = []
|
||||
debug_borrows = []
|
||||
debug_ownership = []
|
||||
|
|
|
@ -11,6 +11,8 @@ Three main types manage state in Generational Box:
|
|||
Example:
|
||||
|
||||
```rust
|
||||
use generational_box::Store;
|
||||
|
||||
// Create a store for this thread
|
||||
let store = Store::default();
|
||||
|
||||
|
|
|
@ -2,9 +2,12 @@
|
|||
#![warn(missing_docs)]
|
||||
|
||||
use std::{
|
||||
any::Any,
|
||||
cell::{Cell, Ref, RefCell, RefMut},
|
||||
fmt::Debug,
|
||||
error::Error,
|
||||
fmt::{Debug, Display},
|
||||
marker::PhantomData,
|
||||
ops::{Deref, DerefMut},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
|
@ -29,12 +32,12 @@ fn reused() {
|
|||
let first_ptr;
|
||||
{
|
||||
let owner = store.owner();
|
||||
first_ptr = owner.insert(1).raw.data.as_ptr();
|
||||
first_ptr = owner.insert(1).raw.0.data.as_ptr();
|
||||
drop(owner);
|
||||
}
|
||||
{
|
||||
let owner = store.owner();
|
||||
let second_ptr = owner.insert(1234).raw.data.as_ptr();
|
||||
let second_ptr = owner.insert(1234).raw.0.data.as_ptr();
|
||||
assert_eq!(first_ptr, second_ptr);
|
||||
drop(owner);
|
||||
}
|
||||
|
@ -53,7 +56,10 @@ fn leaking_is_ok() {
|
|||
// don't drop the owner
|
||||
std::mem::forget(owner);
|
||||
}
|
||||
assert_eq!(key.try_read().as_deref(), Some(&"hello world".to_string()));
|
||||
assert_eq!(
|
||||
key.try_read().as_deref().unwrap(),
|
||||
&"hello world".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -68,7 +74,7 @@ fn drops() {
|
|||
key = owner.insert(data);
|
||||
// drop the owner
|
||||
}
|
||||
assert!(key.try_read().is_none());
|
||||
assert!(key.try_read().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -129,7 +135,7 @@ fn fuzz() {
|
|||
println!("{:?}", path);
|
||||
for key in valid_keys.iter() {
|
||||
let value = key.read();
|
||||
println!("{:?}", value);
|
||||
println!("{:?}", &*value);
|
||||
assert!(value.starts_with("hello world"));
|
||||
}
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
|
@ -153,6 +159,8 @@ pub struct GenerationalBox<T> {
|
|||
raw: MemoryLocation,
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
generation: u32,
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: &'static std::panic::Location<'static>,
|
||||
_marker: PhantomData<T>,
|
||||
}
|
||||
|
||||
|
@ -161,7 +169,7 @@ impl<T: 'static> Debug for GenerationalBox<T> {
|
|||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
f.write_fmt(format_args!(
|
||||
"{:?}@{:?}",
|
||||
self.raw.data.as_ptr(),
|
||||
self.raw.0.data.as_ptr(),
|
||||
self.generation
|
||||
))?;
|
||||
#[cfg(not(any(debug_assertions, feature = "check_generation")))]
|
||||
|
@ -175,7 +183,7 @@ impl<T: 'static> GenerationalBox<T> {
|
|||
fn validate(&self) -> bool {
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
{
|
||||
self.raw.generation.get() == self.generation
|
||||
self.raw.0.generation.get() == self.generation
|
||||
}
|
||||
#[cfg(not(any(debug_assertions, feature = "check_generation")))]
|
||||
{
|
||||
|
@ -184,43 +192,51 @@ impl<T: 'static> GenerationalBox<T> {
|
|||
}
|
||||
|
||||
/// Try to read the value. Returns None if the value is no longer valid.
|
||||
pub fn try_read(&self) -> Option<Ref<'static, T>> {
|
||||
self.validate()
|
||||
.then(|| {
|
||||
Ref::filter_map(self.raw.data.borrow(), |any| {
|
||||
any.as_ref()?.downcast_ref::<T>()
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.flatten()
|
||||
#[track_caller]
|
||||
pub fn try_read(&self) -> Result<GenerationalRef<T>, BorrowError> {
|
||||
if !self.validate() {
|
||||
return Err(BorrowError::Dropped(ValueDroppedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
created_at: self.created_at,
|
||||
}));
|
||||
}
|
||||
self.raw.try_borrow(
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
self.created_at,
|
||||
)
|
||||
}
|
||||
|
||||
/// Read the value. Panics if the value is no longer valid.
|
||||
pub fn read(&self) -> Ref<'static, T> {
|
||||
#[track_caller]
|
||||
pub fn read(&self) -> GenerationalRef<T> {
|
||||
self.try_read().unwrap()
|
||||
}
|
||||
|
||||
/// Try to write the value. Returns None if the value is no longer valid.
|
||||
pub fn try_write(&self) -> Option<RefMut<'static, T>> {
|
||||
self.validate()
|
||||
.then(|| {
|
||||
RefMut::filter_map(self.raw.data.borrow_mut(), |any| {
|
||||
any.as_mut()?.downcast_mut::<T>()
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.flatten()
|
||||
#[track_caller]
|
||||
pub fn try_write(&self) -> Result<GenerationalRefMut<T>, BorrowMutError> {
|
||||
if !self.validate() {
|
||||
return Err(BorrowMutError::Dropped(ValueDroppedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
created_at: self.created_at,
|
||||
}));
|
||||
}
|
||||
self.raw.try_borrow_mut(
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
self.created_at,
|
||||
)
|
||||
}
|
||||
|
||||
/// Write the value. Panics if the value is no longer valid.
|
||||
pub fn write(&self) -> RefMut<'static, T> {
|
||||
#[track_caller]
|
||||
pub fn write(&self) -> GenerationalRefMut<T> {
|
||||
self.try_write().unwrap()
|
||||
}
|
||||
|
||||
/// Set the value. Panics if the value is no longer valid.
|
||||
pub fn set(&self, value: T) {
|
||||
self.validate().then(|| {
|
||||
*self.raw.data.borrow_mut() = Some(Box::new(value));
|
||||
*self.raw.0.data.borrow_mut() = Some(Box::new(value));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -228,7 +244,8 @@ impl<T: 'static> GenerationalBox<T> {
|
|||
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
{
|
||||
self.raw.data.as_ptr() == other.raw.data.as_ptr() && self.generation == other.generation
|
||||
self.raw.0.data.as_ptr() == other.raw.0.data.as_ptr()
|
||||
&& self.generation == other.generation
|
||||
}
|
||||
#[cfg(not(any(debug_assertions, feature = "check_generation")))]
|
||||
{
|
||||
|
@ -246,26 +263,37 @@ impl<T> Clone for GenerationalBox<T> {
|
|||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct MemoryLocation {
|
||||
data: &'static RefCell<Option<Box<dyn std::any::Any>>>,
|
||||
struct MemoryLocation(&'static MemoryLocationInner);
|
||||
|
||||
struct MemoryLocationInner {
|
||||
data: RefCell<Option<Box<dyn std::any::Any>>>,
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
generation: &'static Cell<u32>,
|
||||
generation: Cell<u32>,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_at: RefCell<Vec<&'static std::panic::Location<'static>>>,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_mut_at: Cell<Option<&'static std::panic::Location<'static>>>,
|
||||
}
|
||||
|
||||
impl MemoryLocation {
|
||||
#[allow(unused)]
|
||||
fn drop(&self) {
|
||||
let old = self.data.borrow_mut().take();
|
||||
let old = self.0.data.borrow_mut().take();
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
if old.is_some() {
|
||||
drop(old);
|
||||
let new_generation = self.generation.get() + 1;
|
||||
self.generation.set(new_generation);
|
||||
let new_generation = self.0.generation.get() + 1;
|
||||
self.0.generation.set(new_generation);
|
||||
}
|
||||
}
|
||||
|
||||
fn replace<T: 'static>(&mut self, value: T) -> GenerationalBox<T> {
|
||||
let mut inner_mut = self.data.borrow_mut();
|
||||
fn replace_with_caller<T: 'static>(
|
||||
&mut self,
|
||||
value: T,
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
caller: &'static std::panic::Location<'static>,
|
||||
) -> GenerationalBox<T> {
|
||||
let mut inner_mut = self.0.data.borrow_mut();
|
||||
|
||||
let raw = Box::new(value);
|
||||
let old = inner_mut.replace(raw);
|
||||
|
@ -273,10 +301,315 @@ impl MemoryLocation {
|
|||
GenerationalBox {
|
||||
raw: *self,
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
generation: self.generation.get(),
|
||||
generation: self.0.generation.get(),
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: caller,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn try_borrow<T: Any>(
|
||||
&self,
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: &'static std::panic::Location<'static>,
|
||||
) -> Result<GenerationalRef<T>, BorrowError> {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
self.0
|
||||
.borrowed_at
|
||||
.borrow_mut()
|
||||
.push(std::panic::Location::caller());
|
||||
match self.0.data.try_borrow() {
|
||||
Ok(borrow) => match Ref::filter_map(borrow, |any| any.as_ref()?.downcast_ref::<T>()) {
|
||||
Ok(reference) => Ok(GenerationalRef {
|
||||
inner: reference,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefBorrowInfo {
|
||||
borrowed_at: std::panic::Location::caller(),
|
||||
borrowed_from: self.0,
|
||||
},
|
||||
}),
|
||||
Err(_) => Err(BorrowError::Dropped(ValueDroppedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at,
|
||||
})),
|
||||
},
|
||||
Err(_) => Err(BorrowError::AlreadyBorrowedMut(AlreadyBorrowedMutError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_mut_at: self.0.borrowed_mut_at.get().unwrap(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn try_borrow_mut<T: Any>(
|
||||
&self,
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: &'static std::panic::Location<'static>,
|
||||
) -> Result<GenerationalRefMut<T>, BorrowMutError> {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
{
|
||||
self.0
|
||||
.borrowed_mut_at
|
||||
.set(Some(std::panic::Location::caller()));
|
||||
}
|
||||
match self.0.data.try_borrow_mut() {
|
||||
Ok(borrow_mut) => {
|
||||
match RefMut::filter_map(borrow_mut, |any| any.as_mut()?.downcast_mut::<T>()) {
|
||||
Ok(reference) => Ok(GenerationalRefMut {
|
||||
inner: reference,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefMutBorrowInfo {
|
||||
borrowed_from: self.0,
|
||||
},
|
||||
}),
|
||||
Err(_) => Err(BorrowMutError::Dropped(ValueDroppedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at,
|
||||
})),
|
||||
}
|
||||
}
|
||||
Err(_) => Err(BorrowMutError::AlreadyBorrowed(AlreadyBorrowedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_at: self.0.borrowed_at.borrow().clone(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// An error that can occur when trying to borrow a value.
|
||||
pub enum BorrowError {
|
||||
/// The value was dropped.
|
||||
Dropped(ValueDroppedError),
|
||||
/// The value was already borrowed mutably.
|
||||
AlreadyBorrowedMut(AlreadyBorrowedMutError),
|
||||
}
|
||||
|
||||
impl Display for BorrowError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
BorrowError::Dropped(error) => Display::fmt(error, f),
|
||||
BorrowError::AlreadyBorrowedMut(error) => Display::fmt(error, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for BorrowError {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// An error that can occur when trying to borrow a value mutably.
|
||||
pub enum BorrowMutError {
|
||||
/// The value was dropped.
|
||||
Dropped(ValueDroppedError),
|
||||
/// The value was already borrowed.
|
||||
AlreadyBorrowed(AlreadyBorrowedError),
|
||||
/// The value was already borrowed mutably.
|
||||
AlreadyBorrowedMut(AlreadyBorrowedMutError),
|
||||
}
|
||||
|
||||
impl Display for BorrowMutError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
BorrowMutError::Dropped(error) => Display::fmt(error, f),
|
||||
BorrowMutError::AlreadyBorrowedMut(error) => Display::fmt(error, f),
|
||||
BorrowMutError::AlreadyBorrowed(error) => Display::fmt(error, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for BorrowMutError {}
|
||||
|
||||
/// An error that can occur when trying to use a value that has been dropped.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ValueDroppedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: &'static std::panic::Location<'static>,
|
||||
}
|
||||
|
||||
impl Display for ValueDroppedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Failed to borrow because the value was dropped.")?;
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
f.write_fmt(format_args!("created_at: {}", self.created_at))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ValueDroppedError {}
|
||||
|
||||
/// An error that can occur when trying to borrow a value that has already been borrowed mutably.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AlreadyBorrowedMutError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_mut_at: &'static std::panic::Location<'static>,
|
||||
}
|
||||
|
||||
impl Display for AlreadyBorrowedMutError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Failed to borrow because the value was already borrowed mutably.")?;
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
f.write_fmt(format_args!("borrowed_mut_at: {}", self.borrowed_mut_at))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for AlreadyBorrowedMutError {}
|
||||
|
||||
/// An error that can occur when trying to borrow a value mutably that has already been borrowed immutably.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AlreadyBorrowedError {
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_at: Vec<&'static std::panic::Location<'static>>,
|
||||
}
|
||||
|
||||
impl Display for AlreadyBorrowedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Failed to borrow mutably because the value was already borrowed immutably.")?;
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
f.write_str("borrowed_at:")?;
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
for location in self.borrowed_at.iter() {
|
||||
f.write_fmt(format_args!("\t{}", location))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for AlreadyBorrowedError {}
|
||||
|
||||
/// A reference to a value in a generational box.
|
||||
pub struct GenerationalRef<T: 'static> {
|
||||
inner: Ref<'static, T>,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefBorrowInfo,
|
||||
}
|
||||
|
||||
impl<T: 'static> GenerationalRef<T> {
|
||||
/// Map one ref type to another.
|
||||
pub fn map<U, F>(orig: GenerationalRef<T>, f: F) -> GenerationalRef<U>
|
||||
where
|
||||
F: FnOnce(&T) -> &U,
|
||||
{
|
||||
GenerationalRef {
|
||||
inner: Ref::map(orig.inner, f),
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefBorrowInfo {
|
||||
borrowed_at: orig.borrow.borrowed_at,
|
||||
borrowed_from: orig.borrow.borrowed_from,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Filter one ref type to another.
|
||||
pub fn filter_map<U, F>(orig: GenerationalRef<T>, f: F) -> Option<GenerationalRef<U>>
|
||||
where
|
||||
F: FnOnce(&T) -> Option<&U>,
|
||||
{
|
||||
let Self {
|
||||
inner,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow,
|
||||
} = orig;
|
||||
Ref::filter_map(inner, f).ok().map(|inner| GenerationalRef {
|
||||
inner,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefBorrowInfo {
|
||||
borrowed_at: borrow.borrowed_at,
|
||||
borrowed_from: borrow.borrowed_from,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Deref for GenerationalRef<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.inner.deref()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
struct GenerationalRefBorrowInfo {
|
||||
borrowed_at: &'static std::panic::Location<'static>,
|
||||
borrowed_from: &'static MemoryLocationInner,
|
||||
}
|
||||
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
impl Drop for GenerationalRefBorrowInfo {
|
||||
fn drop(&mut self) {
|
||||
self.borrowed_from
|
||||
.borrowed_at
|
||||
.borrow_mut()
|
||||
.retain(|location| std::ptr::eq(*location, self.borrowed_at as *const _));
|
||||
}
|
||||
}
|
||||
|
||||
/// A mutable reference to a value in a generational box.
|
||||
pub struct GenerationalRefMut<T: 'static> {
|
||||
inner: RefMut<'static, T>,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: GenerationalRefMutBorrowInfo,
|
||||
}
|
||||
|
||||
impl<T: 'static> GenerationalRefMut<T> {
|
||||
/// Map one ref type to another.
|
||||
pub fn map<U, F>(orig: GenerationalRefMut<T>, f: F) -> GenerationalRefMut<U>
|
||||
where
|
||||
F: FnOnce(&mut T) -> &mut U,
|
||||
{
|
||||
GenerationalRefMut {
|
||||
inner: RefMut::map(orig.inner, f),
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow: orig.borrow,
|
||||
}
|
||||
}
|
||||
|
||||
/// Filter one ref type to another.
|
||||
pub fn filter_map<U, F>(orig: GenerationalRefMut<T>, f: F) -> Option<GenerationalRefMut<U>>
|
||||
where
|
||||
F: FnOnce(&mut T) -> Option<&mut U>,
|
||||
{
|
||||
let Self {
|
||||
inner,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow,
|
||||
} = orig;
|
||||
RefMut::filter_map(inner, f)
|
||||
.ok()
|
||||
.map(|inner| GenerationalRefMut {
|
||||
inner,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrow,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Deref for GenerationalRefMut<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.inner.deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> DerefMut for GenerationalRefMut<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.inner.deref_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
struct GenerationalRefMutBorrowInfo {
|
||||
borrowed_from: &'static MemoryLocationInner,
|
||||
}
|
||||
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
impl Drop for GenerationalRefMutBorrowInfo {
|
||||
fn drop(&mut self) {
|
||||
self.borrowed_from.borrowed_mut_at.take();
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles recycling generational boxes that have been dropped. Your application should have one store or one store per thread.
|
||||
|
@ -305,12 +638,16 @@ impl Store {
|
|||
if let Some(location) = self.recycled.borrow_mut().pop() {
|
||||
location
|
||||
} else {
|
||||
let data: &'static RefCell<_> = self.bump.alloc(RefCell::new(None));
|
||||
MemoryLocation {
|
||||
data,
|
||||
let data: &'static MemoryLocationInner = self.bump.alloc(MemoryLocationInner {
|
||||
data: RefCell::new(None),
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
generation: self.bump.alloc(Cell::new(0)),
|
||||
}
|
||||
generation: Cell::new(0),
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_at: Default::default(),
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
borrowed_mut_at: Default::default(),
|
||||
});
|
||||
MemoryLocation(data)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,9 +668,31 @@ pub struct Owner {
|
|||
|
||||
impl Owner {
|
||||
/// Insert a value into the store. The value will be dropped when the owner is dropped.
|
||||
#[track_caller]
|
||||
pub fn insert<T: 'static>(&self, value: T) -> GenerationalBox<T> {
|
||||
let mut location = self.store.claim();
|
||||
let key = location.replace(value);
|
||||
let key = location.replace_with_caller(
|
||||
value,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
std::panic::Location::caller(),
|
||||
);
|
||||
self.owned.borrow_mut().push(location);
|
||||
key
|
||||
}
|
||||
|
||||
/// Insert a value into the store with a specific location blamed for creating the value. The value will be dropped when the owner is dropped.
|
||||
pub fn insert_with_caller<T: 'static>(
|
||||
&self,
|
||||
value: T,
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
caller: &'static std::panic::Location<'static>,
|
||||
) -> GenerationalBox<T> {
|
||||
let mut location = self.store.claim();
|
||||
let key = location.replace_with_caller(
|
||||
value,
|
||||
#[cfg(any(debug_assertions, feature = "debug_borrows"))]
|
||||
caller,
|
||||
);
|
||||
self.owned.borrow_mut().push(location);
|
||||
key
|
||||
}
|
||||
|
@ -344,7 +703,9 @@ impl Owner {
|
|||
GenerationalBox {
|
||||
raw: location,
|
||||
#[cfg(any(debug_assertions, feature = "check_generation"))]
|
||||
generation: location.generation.get(),
|
||||
generation: location.0.generation.get(),
|
||||
#[cfg(any(debug_assertions, feature = "debug_ownership"))]
|
||||
created_at: std::panic::Location::caller(),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,6 +60,9 @@ pub mod computed;
|
|||
mod use_on_destroy;
|
||||
pub use use_on_destroy::*;
|
||||
|
||||
mod use_const;
|
||||
pub use use_const::*;
|
||||
|
||||
mod use_context;
|
||||
pub use use_context::*;
|
||||
|
||||
|
|
76
packages/hooks/src/use_const.rs
Normal file
76
packages/hooks/src/use_const.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
use std::rc::Rc;
|
||||
|
||||
use dioxus_core::prelude::*;
|
||||
|
||||
/// Store constant state between component renders.
|
||||
///
|
||||
/// UseConst allows you to store state that is initialized once and then remains constant across renders.
|
||||
/// You can only get an immutable reference after initalization.
|
||||
/// This can be useful for values that don't need to update reactively, thus can be memoized easily
|
||||
///
|
||||
/// ```rust, ignore
|
||||
/// struct ComplexData(i32);
|
||||
///
|
||||
/// fn Component(cx: Scope) -> Element {
|
||||
/// let id = use_const(cx, || ComplexData(100));
|
||||
///
|
||||
/// cx.render(rsx! {
|
||||
/// div { "{id.0}" }
|
||||
/// })
|
||||
/// }
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn use_const<T: 'static>(
|
||||
cx: &ScopeState,
|
||||
initial_state_fn: impl FnOnce() -> T,
|
||||
) -> &UseConst<T> {
|
||||
cx.use_hook(|| UseConst {
|
||||
value: Rc::new(initial_state_fn()),
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UseConst<T> {
|
||||
value: Rc<T>,
|
||||
}
|
||||
|
||||
impl<T> PartialEq for UseConst<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Rc::ptr_eq(&self.value, &other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: core::fmt::Display> core::fmt::Display for UseConst<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.value.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> UseConst<T> {
|
||||
pub fn get_rc(&self) -> &Rc<T> {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::ops::Deref for UseConst<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.value.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_const_makes_sense() {
|
||||
#[allow(unused)]
|
||||
|
||||
fn app(cx: Scope) -> Element {
|
||||
let const_val = use_const(cx, || vec![0, 1, 2, 3]);
|
||||
|
||||
assert!(const_val[0] == 0);
|
||||
|
||||
// const_val.remove(0); // Cannot Compile, cannot get mutable reference now
|
||||
|
||||
None
|
||||
}
|
||||
}
|
|
@ -1,5 +1,10 @@
|
|||
use dioxus_core::{ScopeState, TaskId};
|
||||
use std::{any::Any, cell::Cell, future::Future};
|
||||
use std::{
|
||||
any::Any,
|
||||
cell::{Cell, RefCell},
|
||||
future::Future,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
use crate::UseFutureDep;
|
||||
|
||||
|
@ -14,7 +19,7 @@ use crate::UseFutureDep;
|
|||
/// ## Arguments
|
||||
///
|
||||
/// - `dependencies`: a tuple of references to values that are `PartialEq` + `Clone`.
|
||||
/// - `future`: a closure that takes the `dependencies` as arguments and returns a `'static` future.
|
||||
/// - `future`: a closure that takes the `dependencies` as arguments and returns a `'static` future. That future may return nothing or a closure that will be executed when the dependencies change to clean up the effect.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
|
@ -33,6 +38,16 @@ use crate::UseFutureDep;
|
|||
/// }
|
||||
/// });
|
||||
///
|
||||
/// // Only fetch the user data when the id changes.
|
||||
/// use_effect(cx, (id,), |(id,)| {
|
||||
/// to_owned![name];
|
||||
/// async move {
|
||||
/// let user = fetch_user(id).await;
|
||||
/// name.set(user.name);
|
||||
/// move || println!("Cleaning up from {}", id)
|
||||
/// }
|
||||
/// });
|
||||
///
|
||||
/// let name = name.get().clone().unwrap_or("Loading...".to_string());
|
||||
///
|
||||
/// render!(
|
||||
|
@ -45,34 +60,80 @@ use crate::UseFutureDep;
|
|||
/// render!(Profile { id: 0 })
|
||||
/// }
|
||||
/// ```
|
||||
pub fn use_effect<T, F, D>(cx: &ScopeState, dependencies: D, future: impl FnOnce(D::Out) -> F)
|
||||
pub fn use_effect<T, R, D>(cx: &ScopeState, dependencies: D, future: impl FnOnce(D::Out) -> R)
|
||||
where
|
||||
T: 'static,
|
||||
F: Future<Output = T> + 'static,
|
||||
D: UseFutureDep,
|
||||
R: UseEffectReturn<T>,
|
||||
{
|
||||
struct UseEffect {
|
||||
needs_regen: bool,
|
||||
task: Cell<Option<TaskId>>,
|
||||
dependencies: Vec<Box<dyn Any>>,
|
||||
cleanup: UseEffectCleanup,
|
||||
}
|
||||
|
||||
impl Drop for UseEffect {
|
||||
fn drop(&mut self) {
|
||||
if let Some(cleanup) = self.cleanup.borrow_mut().take() {
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let state = cx.use_hook(move || UseEffect {
|
||||
needs_regen: true,
|
||||
task: Cell::new(None),
|
||||
dependencies: Vec::new(),
|
||||
cleanup: Rc::new(RefCell::new(None)),
|
||||
});
|
||||
|
||||
if dependencies.clone().apply(&mut state.dependencies) || state.needs_regen {
|
||||
// Call the cleanup function if it exists
|
||||
if let Some(cleanup) = state.cleanup.borrow_mut().take() {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
// We don't need regen anymore
|
||||
state.needs_regen = false;
|
||||
|
||||
// Create the new future
|
||||
let fut = future(dependencies.out());
|
||||
let return_value = future(dependencies.out());
|
||||
|
||||
state.task.set(Some(cx.push_future(async move {
|
||||
fut.await;
|
||||
})));
|
||||
if let Some(task) = return_value.apply(state.cleanup.clone(), cx) {
|
||||
state.task.set(Some(task));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type UseEffectCleanup = Rc<RefCell<Option<Box<dyn FnOnce()>>>>;
|
||||
|
||||
/// Something that can be returned from a `use_effect` hook.
|
||||
pub trait UseEffectReturn<T> {
|
||||
fn apply(self, oncleanup: UseEffectCleanup, cx: &ScopeState) -> Option<TaskId>;
|
||||
}
|
||||
|
||||
impl<T> UseEffectReturn<()> for T
|
||||
where
|
||||
T: Future<Output = ()> + 'static,
|
||||
{
|
||||
fn apply(self, _: UseEffectCleanup, cx: &ScopeState) -> Option<TaskId> {
|
||||
Some(cx.push_future(self))
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct CleanupFutureMarker;
|
||||
impl<T, F> UseEffectReturn<CleanupFutureMarker> for T
|
||||
where
|
||||
T: Future<Output = F> + 'static,
|
||||
F: FnOnce() + 'static,
|
||||
{
|
||||
fn apply(self, oncleanup: UseEffectCleanup, cx: &ScopeState) -> Option<TaskId> {
|
||||
let task = cx.push_future(async move {
|
||||
let cleanup = self.await;
|
||||
*oncleanup.borrow_mut() = Some(Box::new(cleanup) as Box<dyn FnOnce()>);
|
||||
});
|
||||
Some(task)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,13 +31,14 @@ where
|
|||
|
||||
let state = cx.use_hook(move || UseFuture {
|
||||
update: cx.schedule_update(),
|
||||
needs_regen: Cell::new(true),
|
||||
needs_regen: Rc::new(Cell::new(true)),
|
||||
state: val.clone(),
|
||||
task: Default::default(),
|
||||
dependencies: Vec::new(),
|
||||
});
|
||||
|
||||
if dependencies.clone().apply(&mut state.dependencies) || state.needs_regen.get() {
|
||||
let state_dependencies = cx.use_hook(Vec::new);
|
||||
|
||||
if dependencies.clone().apply(state_dependencies) || state.needs_regen.get() {
|
||||
// kill the old one, if it exists
|
||||
if let Some(task) = state.task.take() {
|
||||
cx.remove_future(task);
|
||||
|
@ -69,11 +70,11 @@ pub enum FutureState<'a, T> {
|
|||
Regenerating(&'a T), // the old value
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UseFuture<T: 'static> {
|
||||
update: Arc<dyn Fn()>,
|
||||
needs_regen: Cell<bool>,
|
||||
needs_regen: Rc<Cell<bool>>,
|
||||
task: Rc<Cell<Option<TaskId>>>,
|
||||
dependencies: Vec<Box<dyn Any>>,
|
||||
state: UseState<Option<T>>,
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ macro_rules! debug_location {
|
|||
}
|
||||
|
||||
pub mod error {
|
||||
#[cfg(debug_assertions)]
|
||||
fn locations_display(locations: &[&'static std::panic::Location<'static>]) -> String {
|
||||
locations
|
||||
.iter()
|
||||
|
|
|
@ -122,7 +122,7 @@ pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
|
|||
} = cfg;
|
||||
|
||||
if let Ok(crate_dir) = PathBuf::from_str(root_path) {
|
||||
// try to find the gitingore file
|
||||
// try to find the gitignore file
|
||||
let gitignore_file_path = crate_dir.join(".gitignore");
|
||||
let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
|
||||
|
||||
|
@ -152,21 +152,20 @@ pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
|
|||
}
|
||||
let file_map = Arc::new(Mutex::new(file_map));
|
||||
|
||||
let target_dir = crate_dir.join("target");
|
||||
let hot_reload_socket_path = target_dir.join("dioxusin");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// On unix, if you force quit the application, it can leave the file socket open
|
||||
// This will cause the local socket listener to fail to open
|
||||
// We check if the file socket is already open from an old session and then delete it
|
||||
let paths = ["./dioxusin", "./@dioxusin"];
|
||||
for path in paths {
|
||||
let path = PathBuf::from(path);
|
||||
if path.exists() {
|
||||
let _ = std::fs::remove_file(path);
|
||||
}
|
||||
if hot_reload_socket_path.exists() {
|
||||
let _ = std::fs::remove_file(hot_reload_socket_path);
|
||||
}
|
||||
}
|
||||
|
||||
match LocalSocketListener::bind("@dioxusin") {
|
||||
match LocalSocketListener::bind(hot_reload_socket_path) {
|
||||
Ok(local_socket_stream) => {
|
||||
let aborted = Arc::new(Mutex::new(false));
|
||||
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use std::io::{BufRead, BufReader};
|
||||
use std::{
|
||||
io::{BufRead, BufReader},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use dioxus_core::Template;
|
||||
#[cfg(feature = "file_watcher")]
|
||||
|
@ -24,7 +27,8 @@ pub enum HotReloadMsg {
|
|||
/// Connect to the hot reloading listener. The callback provided will be called every time a template change is detected
|
||||
pub fn connect(mut f: impl FnMut(HotReloadMsg) + Send + 'static) {
|
||||
std::thread::spawn(move || {
|
||||
if let Ok(socket) = LocalSocketStream::connect("@dioxusin") {
|
||||
let path = PathBuf::from("./").join("target").join("dioxusin");
|
||||
if let Ok(socket) = LocalSocketStream::connect(path) {
|
||||
let mut buf_reader = BufReader::new(socket);
|
||||
loop {
|
||||
let mut buf = String::new();
|
||||
|
|
|
@ -74,7 +74,7 @@ macro_rules! impl_attribute_match {
|
|||
$attr:ident $fil:ident: $vil:ident (in $ns:literal),
|
||||
) => {
|
||||
if $attr == stringify!($fil) {
|
||||
return Some((stringify!(fil), Some(ns)));
|
||||
return Some((stringify!(fil), Some($ns)));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -180,14 +180,26 @@ macro_rules! impl_element_match {
|
|||
};
|
||||
|
||||
(
|
||||
$el:ident $name:ident $namespace:tt {
|
||||
$el:ident $name:ident $namespace:literal {
|
||||
$(
|
||||
$fil:ident: $vil:ident $extra:tt,
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
if $el == stringify!($name) {
|
||||
return Some((stringify!($name), Some(stringify!($namespace))));
|
||||
return Some((stringify!($name), Some($namespace)));
|
||||
}
|
||||
};
|
||||
|
||||
(
|
||||
$el:ident $name:ident [$_:literal, $namespace:tt] {
|
||||
$(
|
||||
$fil:ident: $vil:ident $extra:tt,
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
if $el == stringify!($name) {
|
||||
return Some((stringify!($name), Some($namespace)));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -207,6 +219,8 @@ macro_rules! impl_element_match_attributes {
|
|||
$attr $fil: $vil ($extra),
|
||||
);
|
||||
)*
|
||||
|
||||
return impl_map_global_attributes!($el $attr $name None);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -223,10 +237,41 @@ macro_rules! impl_element_match_attributes {
|
|||
$attr $fil: $vil ($extra),
|
||||
);
|
||||
)*
|
||||
|
||||
return impl_map_global_attributes!($el $attr $name $namespace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "hot-reload-context")]
|
||||
macro_rules! impl_map_global_attributes {
|
||||
(
|
||||
$el:ident $attr:ident $element:ident None
|
||||
) => {
|
||||
map_global_attributes($attr)
|
||||
};
|
||||
|
||||
(
|
||||
$el:ident $attr:ident $element:ident $namespace:literal
|
||||
) => {
|
||||
if $namespace == "http://www.w3.org/2000/svg" {
|
||||
map_svg_attributes($attr)
|
||||
} else {
|
||||
map_global_attributes($attr)
|
||||
}
|
||||
};
|
||||
|
||||
(
|
||||
$el:ident $attr:ident $element:ident [$name:literal, $namespace:tt]
|
||||
) => {
|
||||
if $namespace == "http://www.w3.org/2000/svg" {
|
||||
map_svg_attributes($attr)
|
||||
} else {
|
||||
map_global_attributes($attr)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! builder_constructors {
|
||||
(
|
||||
$(
|
||||
|
@ -254,7 +299,7 @@ macro_rules! builder_constructors {
|
|||
}
|
||||
);
|
||||
)*
|
||||
map_global_attributes(attribute).or_else(|| map_svg_attributes(attribute))
|
||||
None
|
||||
}
|
||||
|
||||
fn map_element(element: &str) -> Option<(&'static str, Option<&'static str>)> {
|
||||
|
@ -782,6 +827,7 @@ builder_constructors! {
|
|||
decoding: ImageDecoding DEFAULT,
|
||||
height: usize DEFAULT,
|
||||
ismap: Bool DEFAULT,
|
||||
loading: String DEFAULT,
|
||||
src: Uri DEFAULT,
|
||||
srcset: String DEFAULT, // FIXME this is much more complicated
|
||||
usemap: String DEFAULT, // FIXME should be a fragment starting with '#'
|
||||
|
|
|
@ -269,6 +269,9 @@ trait_methods! {
|
|||
/// <https://developer.mozilla.org/en-US/docs/Web/CSS/azimuth>
|
||||
azimuth: "azimuth", "style";
|
||||
|
||||
/// <https://developer.mozilla.org/en-US/docs/Web/CSS/backdrop-filter>
|
||||
backdrop_filter: "backdrop-filter", "style";
|
||||
|
||||
/// <https://developer.mozilla.org/en-US/docs/Web/CSS/backface-visibility>
|
||||
backface_visibility: "backface-visibility", "style";
|
||||
|
||||
|
|
|
@ -38,6 +38,10 @@ salvo = { version = "0.44.1", optional = true, features = ["ws"] }
|
|||
once_cell = "1.17.1"
|
||||
async-trait = "0.1.71"
|
||||
|
||||
# rocket
|
||||
rocket = { version = "0.5.0", optional = true }
|
||||
rocket_ws = { version = "0.1.0", optional = true }
|
||||
|
||||
# actix is ... complicated?
|
||||
# actix-files = { version = "0.6.2", optional = true }
|
||||
# actix-web = { version = "4.2.1", optional = true }
|
||||
|
@ -49,13 +53,16 @@ tokio = { workspace = true, features = ["full"] }
|
|||
dioxus = { workspace = true }
|
||||
warp = "0.3.3"
|
||||
axum = { version = "0.6.1", features = ["ws"] }
|
||||
salvo = { version = "0.44.1", features = ["affix", "ws"] }
|
||||
# salvo = { version = "0.44.1", features = ["affix", "ws"] }
|
||||
rocket = "0.5.0"
|
||||
rocket_ws = "0.1.0"
|
||||
tower = "0.4.13"
|
||||
|
||||
[features]
|
||||
default = ["hot-reload"]
|
||||
# actix = ["actix-files", "actix-web", "actix-ws"]
|
||||
hot-reload = ["dioxus-hot-reload"]
|
||||
rocket = ["dep:rocket", "dep:rocket_ws"]
|
||||
|
||||
[[example]]
|
||||
name = "axum"
|
||||
|
@ -68,3 +75,7 @@ required-features = ["salvo"]
|
|||
[[example]]
|
||||
name = "warp"
|
||||
required-features = ["warp"]
|
||||
|
||||
[[example]]
|
||||
name = "rocket"
|
||||
required-features = ["rocket"]
|
||||
|
|
|
@ -28,6 +28,7 @@ The current backend frameworks supported include:
|
|||
- Axum
|
||||
- Warp
|
||||
- Salvo
|
||||
- Rocket
|
||||
|
||||
Dioxus-LiveView exports some primitives to wire up an app into an existing backend framework.
|
||||
|
||||
|
|
76
packages/liveview/examples/rocket.rs
Normal file
76
packages/liveview/examples/rocket.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
use dioxus::prelude::*;
|
||||
use dioxus_liveview::LiveViewPool;
|
||||
use rocket::response::content::RawHtml;
|
||||
use rocket::{Config, Rocket, State};
|
||||
use rocket_ws::{Channel, WebSocket};
|
||||
|
||||
fn app(cx: Scope) -> Element {
|
||||
let mut num = use_state(cx, || 0);
|
||||
|
||||
cx.render(rsx! {
|
||||
div {
|
||||
"hello Rocket! {num}"
|
||||
button { onclick: move |_| num += 1, "Increment" }
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn index_page_with_glue(glue: &str) -> RawHtml<String> {
|
||||
RawHtml(format!(
|
||||
r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head> <title>Dioxus LiveView with Rocket</title> </head>
|
||||
<body> <div id="main"></div> </body>
|
||||
{glue}
|
||||
</html>
|
||||
"#,
|
||||
glue = glue
|
||||
))
|
||||
}
|
||||
|
||||
#[get("/")]
|
||||
async fn index(config: &Config) -> RawHtml<String> {
|
||||
index_page_with_glue(&dioxus_liveview::interpreter_glue(&format!(
|
||||
"ws://{addr}:{port}/ws",
|
||||
addr = config.address,
|
||||
port = config.port,
|
||||
)))
|
||||
}
|
||||
|
||||
#[get("/as-path")]
|
||||
async fn as_path() -> RawHtml<String> {
|
||||
index_page_with_glue(&dioxus_liveview::interpreter_glue("/ws"))
|
||||
}
|
||||
|
||||
#[get("/ws")]
|
||||
fn ws(ws: WebSocket, pool: &State<LiveViewPool>) -> Channel<'static> {
|
||||
let pool = pool.inner().to_owned();
|
||||
|
||||
ws.channel(move |stream| {
|
||||
Box::pin(async move {
|
||||
let _ = pool
|
||||
.launch(dioxus_liveview::rocket_socket(stream), app)
|
||||
.await;
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let view = dioxus_liveview::LiveViewPool::new();
|
||||
|
||||
Rocket::build()
|
||||
.manage(view)
|
||||
.mount("/", routes![index, as_path, ws])
|
||||
.ignite()
|
||||
.await
|
||||
.expect("Failed to ignite rocket")
|
||||
.launch()
|
||||
.await
|
||||
.expect("Failed to launch rocket");
|
||||
}
|
25
packages/liveview/src/adapters/rocket_adapter.rs
Normal file
25
packages/liveview/src/adapters/rocket_adapter.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
use crate::{LiveViewError, LiveViewSocket};
|
||||
use rocket::futures::{SinkExt, StreamExt};
|
||||
use rocket_ws::{result::Error, stream::DuplexStream, Message};
|
||||
|
||||
/// Convert a rocket websocket into a LiveViewSocket
|
||||
///
|
||||
/// This is required to launch a LiveView app using the rocket web framework
|
||||
pub fn rocket_socket(stream: DuplexStream) -> impl LiveViewSocket {
|
||||
stream
|
||||
.map(transform_rx)
|
||||
.with(transform_tx)
|
||||
.sink_map_err(|_| LiveViewError::SendingFailed)
|
||||
}
|
||||
|
||||
fn transform_rx(message: Result<Message, Error>) -> Result<Vec<u8>, LiveViewError> {
|
||||
message
|
||||
.map_err(|_| LiveViewError::SendingFailed)?
|
||||
.into_text()
|
||||
.map(|s| s.into_bytes())
|
||||
.map_err(|_| LiveViewError::SendingFailed)
|
||||
}
|
||||
|
||||
async fn transform_tx(message: Vec<u8>) -> Result<Message, Error> {
|
||||
Ok(Message::Text(String::from_utf8_lossy(&message).to_string()))
|
||||
}
|
|
@ -18,6 +18,11 @@ pub mod adapters {
|
|||
|
||||
#[cfg(feature = "salvo")]
|
||||
pub use salvo_adapter::*;
|
||||
|
||||
#[cfg(feature = "rocket")]
|
||||
pub mod rocket_adapter;
|
||||
#[cfg(feature = "rocket")]
|
||||
pub use rocket_adapter::*;
|
||||
}
|
||||
|
||||
pub use adapters::*;
|
||||
|
|
|
@ -57,7 +57,12 @@ impl DioxusState {
|
|||
node.insert(ElementIdComponent(element_id));
|
||||
if self.node_id_mapping.len() <= element_id.0 {
|
||||
self.node_id_mapping.resize(element_id.0 + 1, None);
|
||||
} else if let Some(mut node) =
|
||||
self.node_id_mapping[element_id.0].and_then(|id| node.real_dom_mut().get_mut(id))
|
||||
{
|
||||
node.remove();
|
||||
}
|
||||
|
||||
self.node_id_mapping[element_id.0] = Some(node_id);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ dioxus-html = { workspace = true }
|
|||
dioxus-native-core = { workspace = true, features = ["layout-attributes"] }
|
||||
dioxus-native-core-macro = { workspace = true }
|
||||
|
||||
tui = "0.17.0"
|
||||
ratatui = "0.24.0"
|
||||
crossterm = "0.26.1"
|
||||
anyhow = "1.0.42"
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
|
|
@ -17,6 +17,7 @@ use futures::{channel::mpsc::UnboundedSender, pin_mut, Future, StreamExt};
|
|||
use futures_channel::mpsc::unbounded;
|
||||
use layout::TaffyLayout;
|
||||
use prevent_default::PreventDefault;
|
||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
||||
use std::{io, time::Duration};
|
||||
use std::{
|
||||
pin::Pin,
|
||||
|
@ -26,7 +27,6 @@ use std::{rc::Rc, sync::RwLock};
|
|||
use style_attributes::StyleModifier;
|
||||
pub use taffy::{geometry::Point, prelude::*};
|
||||
use tokio::select;
|
||||
use tui::{backend::CrosstermBackend, Terminal};
|
||||
use widgets::{register_widgets, RinkWidgetResponder, RinkWidgetTraitObject};
|
||||
|
||||
mod config;
|
||||
|
@ -180,7 +180,7 @@ pub fn render<R: Driver>(
|
|||
|
||||
if !to_rerender.is_empty() || updated {
|
||||
updated = false;
|
||||
fn resize(dims: tui::layout::Rect, taffy: &mut Taffy, rdom: &RealDom) {
|
||||
fn resize(dims: ratatui::layout::Rect, taffy: &mut Taffy, rdom: &RealDom) {
|
||||
let width = screen_to_layout_space(dims.width);
|
||||
let height = screen_to_layout_space(dims.height);
|
||||
let root_node = rdom
|
||||
|
@ -222,7 +222,7 @@ pub fn render<R: Driver>(
|
|||
} else {
|
||||
let rdom = rdom.read().unwrap();
|
||||
resize(
|
||||
tui::layout::Rect {
|
||||
ratatui::layout::Rect {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 1000,
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
use dioxus_native_core::{prelude::*, tree::TreeRef};
|
||||
use std::io::Stdout;
|
||||
use ratatui::{layout::Rect, style::Color};
|
||||
use taffy::{
|
||||
geometry::Point,
|
||||
prelude::{Dimension, Layout, Size},
|
||||
Taffy,
|
||||
};
|
||||
use tui::{backend::CrosstermBackend, layout::Rect, style::Color};
|
||||
|
||||
use crate::{
|
||||
focus::Focused,
|
||||
|
@ -20,7 +19,7 @@ use crate::{
|
|||
const RADIUS_MULTIPLIER: [f32; 2] = [1.0, 0.5];
|
||||
|
||||
pub(crate) fn render_vnode(
|
||||
frame: &mut tui::Frame<CrosstermBackend<Stdout>>,
|
||||
frame: &mut ratatui::Frame,
|
||||
layout: &Taffy,
|
||||
node: NodeRef,
|
||||
cfg: Config,
|
||||
|
@ -96,7 +95,7 @@ pub(crate) fn render_vnode(
|
|||
|
||||
impl RinkWidget for NodeRef<'_> {
|
||||
fn render(self, area: Rect, mut buf: RinkBuffer<'_>) {
|
||||
use tui::symbols::line::*;
|
||||
use ratatui::symbols::line::*;
|
||||
|
||||
enum Direction {
|
||||
Left,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{num::ParseFloatError, str::FromStr};
|
||||
|
||||
use tui::style::{Color, Modifier, Style};
|
||||
use ratatui::style::{Color, Modifier, Style};
|
||||
|
||||
use crate::RenderingMode;
|
||||
|
||||
|
@ -442,6 +442,7 @@ impl RinkStyle {
|
|||
impl From<RinkStyle> for Style {
|
||||
fn from(val: RinkStyle) -> Self {
|
||||
Style {
|
||||
underline_color: None,
|
||||
fg: val.fg.map(|c| c.color),
|
||||
bg: val.bg.map(|c| c.color),
|
||||
add_modifier: val.add_modifier,
|
||||
|
|
|
@ -187,8 +187,8 @@ pub enum BorderStyle {
|
|||
}
|
||||
|
||||
impl BorderStyle {
|
||||
pub fn symbol_set(&self) -> Option<tui::symbols::line::Set> {
|
||||
use tui::symbols::line::*;
|
||||
pub fn symbol_set(&self) -> Option<ratatui::symbols::line::Set> {
|
||||
use ratatui::symbols::line::*;
|
||||
const DASHED: Set = Set {
|
||||
horizontal: "╌",
|
||||
vertical: "╎",
|
||||
|
@ -570,7 +570,7 @@ fn apply_animation(name: &str, _value: &str, _style: &mut StyleModifier) {
|
|||
}
|
||||
|
||||
fn apply_font(name: &str, value: &str, style: &mut StyleModifier) {
|
||||
use tui::style::Modifier;
|
||||
use ratatui::style::Modifier;
|
||||
match name {
|
||||
"font" => (),
|
||||
"font-family" => (),
|
||||
|
@ -593,7 +593,7 @@ fn apply_font(name: &str, value: &str, style: &mut StyleModifier) {
|
|||
}
|
||||
|
||||
fn apply_text(name: &str, value: &str, style: &mut StyleModifier) {
|
||||
use tui::style::Modifier;
|
||||
use ratatui::style::Modifier;
|
||||
|
||||
match name {
|
||||
"text-align" => todo!(),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use tui::{
|
||||
use ratatui::{
|
||||
buffer::Buffer,
|
||||
layout::Rect,
|
||||
style::{Color, Modifier},
|
||||
|
|
|
@ -75,6 +75,7 @@ impl Redirect {
|
|||
|
||||
let (segments, query) = parse_route_segments(
|
||||
path.span(),
|
||||
#[allow(clippy::map_identity)]
|
||||
closure_arguments.iter().map(|(name, ty)| (name, ty)),
|
||||
&path.value(),
|
||||
)?;
|
||||
|
|
|
@ -20,6 +20,7 @@ quote = { version = "1.0" }
|
|||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
internment = { version = "0.7.0", optional = true }
|
||||
krates = { version = "0.12.6", optional = true }
|
||||
tracing.workspace = true
|
||||
|
||||
[features]
|
||||
hot_reload = ["krates", "internment"]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use syn::{File, Macro};
|
||||
|
||||
pub enum DiffResult {
|
||||
|
@ -10,13 +11,30 @@ pub enum DiffResult {
|
|||
pub fn find_rsx(new: &File, old: &File) -> DiffResult {
|
||||
let mut rsx_calls = Vec::new();
|
||||
if new.items.len() != old.items.len() {
|
||||
tracing::trace!(
|
||||
"found not hot reload-able change {:#?} != {:#?}",
|
||||
new.items
|
||||
.iter()
|
||||
.map(|i| i.to_token_stream().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
old.items
|
||||
.iter()
|
||||
.map(|i| i.to_token_stream().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
return DiffResult::CodeChanged;
|
||||
}
|
||||
for (new, old) in new.items.iter().zip(old.items.iter()) {
|
||||
if find_rsx_item(new, old, &mut rsx_calls) {
|
||||
tracing::trace!(
|
||||
"found not hot reload-able change {:#?} != {:#?}",
|
||||
new.to_token_stream().to_string(),
|
||||
old.to_token_stream().to_string()
|
||||
);
|
||||
return DiffResult::CodeChanged;
|
||||
}
|
||||
}
|
||||
tracing::trace!("found hot reload-able changes {:#?}", rsx_calls);
|
||||
DiffResult::RsxChanged(rsx_calls)
|
||||
}
|
||||
|
||||
|
@ -94,6 +112,9 @@ fn find_rsx_item(
|
|||
(syn::ImplItem::Macro(new_item), syn::ImplItem::Macro(old_item)) => {
|
||||
old_item != new_item
|
||||
}
|
||||
(syn::ImplItem::Verbatim(stream), syn::ImplItem::Verbatim(stream2)) => {
|
||||
stream.to_string() != stream2.to_string()
|
||||
}
|
||||
_ => true,
|
||||
} {
|
||||
return true;
|
||||
|
@ -186,10 +207,12 @@ fn find_rsx_trait(
|
|||
}
|
||||
}
|
||||
(syn::TraitItem::Fn(new_item), syn::TraitItem::Fn(old_item)) => {
|
||||
if let (Some(new_block), Some(old_block)) = (&new_item.default, &old_item.default) {
|
||||
match (&new_item.default, &old_item.default) {
|
||||
(Some(new_block), Some(old_block)) => {
|
||||
find_rsx_block(new_block, old_block, rsx_calls)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
(None, None) => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
(syn::TraitItem::Type(new_item), syn::TraitItem::Type(old_item)) => {
|
||||
|
@ -198,6 +221,9 @@ fn find_rsx_trait(
|
|||
(syn::TraitItem::Macro(new_item), syn::TraitItem::Macro(old_item)) => {
|
||||
old_item != new_item
|
||||
}
|
||||
(syn::TraitItem::Verbatim(stream), syn::TraitItem::Verbatim(stream2)) => {
|
||||
stream.to_string() != stream2.to_string()
|
||||
}
|
||||
_ => true,
|
||||
} {
|
||||
return true;
|
||||
|
@ -355,6 +381,11 @@ fn find_rsx_expr(
|
|||
|| new_expr.or2_token != old_expr.or2_token
|
||||
|| new_expr.output != old_expr.output
|
||||
}
|
||||
(syn::Expr::Const(new_expr), syn::Expr::Const(old_expr)) => {
|
||||
find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
|
||||
|| new_expr.attrs != old_expr.attrs
|
||||
|| new_expr.const_token != old_expr.const_token
|
||||
}
|
||||
(syn::Expr::Continue(new_expr), syn::Expr::Continue(old_expr)) => old_expr != new_expr,
|
||||
(syn::Expr::Field(new_expr), syn::Expr::Field(old_expr)) => {
|
||||
find_rsx_expr(&new_expr.base, &old_expr.base, rsx_calls)
|
||||
|
@ -402,6 +433,7 @@ fn find_rsx_expr(
|
|||
|| new_expr.attrs != old_expr.attrs
|
||||
|| new_expr.bracket_token != old_expr.bracket_token
|
||||
}
|
||||
(syn::Expr::Infer(new_expr), syn::Expr::Infer(old_expr)) => new_expr != old_expr,
|
||||
(syn::Expr::Let(new_expr), syn::Expr::Let(old_expr)) => {
|
||||
find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
|
||||
|| new_expr.attrs != old_expr.attrs
|
||||
|
@ -589,7 +621,9 @@ fn find_rsx_expr(
|
|||
_ => true,
|
||||
}
|
||||
}
|
||||
(syn::Expr::Verbatim(_), syn::Expr::Verbatim(_)) => false,
|
||||
(syn::Expr::Verbatim(stream), syn::Expr::Verbatim(stream2)) => {
|
||||
stream.to_string() != stream2.to_string()
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,17 +51,24 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
|
|||
fn find_rs_files(
|
||||
root: PathBuf,
|
||||
filter: &mut impl FnMut(&Path) -> bool,
|
||||
) -> io::Result<FileMapSearchResult> {
|
||||
) -> FileMapSearchResult {
|
||||
let mut files = HashMap::new();
|
||||
let mut errors = Vec::new();
|
||||
if root.is_dir() {
|
||||
for entry in (fs::read_dir(root)?).flatten() {
|
||||
let read_dir = match fs::read_dir(root) {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) => {
|
||||
errors.push(err);
|
||||
return FileMapSearchResult { map: files, errors };
|
||||
}
|
||||
};
|
||||
for entry in read_dir.flatten() {
|
||||
let path = entry.path();
|
||||
if !filter(&path) {
|
||||
let FileMapSearchResult {
|
||||
map,
|
||||
errors: child_errors,
|
||||
} = find_rs_files(path, filter)?;
|
||||
} = find_rs_files(path, filter);
|
||||
errors.extend(child_errors);
|
||||
files.extend(map);
|
||||
}
|
||||
|
@ -69,14 +76,20 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
|
|||
} else if root.extension().and_then(|s| s.to_str()) == Some("rs") {
|
||||
if let Ok(mut file) = File::open(root.clone()) {
|
||||
let mut src = String::new();
|
||||
file.read_to_string(&mut src)?;
|
||||
match file.read_to_string(&mut src) {
|
||||
Ok(_) => {
|
||||
files.insert(root, (src, None));
|
||||
}
|
||||
Err(err) => {
|
||||
errors.push(err);
|
||||
}
|
||||
Ok(FileMapSearchResult { map: files, errors })
|
||||
}
|
||||
}
|
||||
}
|
||||
FileMapSearchResult { map: files, errors }
|
||||
}
|
||||
|
||||
let FileMapSearchResult { map, errors } = find_rs_files(path, &mut filter)?;
|
||||
let FileMapSearchResult { map, errors } = find_rs_files(path, &mut filter);
|
||||
let result = Self {
|
||||
map,
|
||||
in_workspace: HashMap::new(),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue