Merge pull request #1948 from atty303/fullstack-wasm

feat(fullstack): support wasm target
This commit is contained in:
Evan Almloff 2024-02-19 08:52:05 -06:00 committed by GitHub
commit c0f2e8391c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 199 additions and 158 deletions

1
Cargo.lock generated
View file

@ -2775,6 +2775,7 @@ dependencies = [
"argh",
"askama_escape",
"async-trait",
"chrono",
"dioxus",
"dioxus-core",
"dioxus-html",

View file

@ -367,8 +367,8 @@ async fn start_server(
#[cfg(feature = "plugin")]
PluginManager::on_serve_start(_config)?;
// Parse address
let addr = format!("0.0.0.0:{}", port).parse().unwrap();
// Bind the server to `[::]` and it will LISTEN for both IPv4 and IPv6. (required IPv6 dual stack)
let addr = format!("[::]:{}", port).parse().unwrap();
// Open the browser
if start_browser {

View file

@ -38,14 +38,13 @@ dioxus-mobile = { workspace = true, optional = true }
tracing = { workspace = true }
tracing-futures = { workspace = true, optional = true }
once_cell = "1.17.1"
tokio = { workspace = true, features = ["rt", "sync", "rt-multi-thread"], optional = true }
tokio-util = { version = "0.7.8", features = ["rt"], optional = true }
anymap = { version = "0.12.1", optional = true }
serde = "1.0.159"
serde_json = { version = "1.0.95", optional = true }
tokio-stream = { version = "0.1.12", features = ["sync"], optional = true }
futures-util = { workspace = true, default-features = false, optional = true }
futures-util = { workspace = true, default-features = false }
ciborium = "0.2.1"
base64 = "0.21.0"
@ -59,12 +58,16 @@ web-sys = { version = "0.3.61", optional = true, features = ["Window", "Document
dioxus-cli-config = { workspace = true, optional = true }
[target.'cfg(target_arch = "wasm32")'.dependencies]
tokio = { workspace = true, features = ["rt", "sync"], optional = true }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
dioxus-hot-reload = { workspace = true }
tokio = { workspace = true, features = ["rt", "sync", "rt-multi-thread"], optional = true }
[features]
default = ["hot-reload"]
hot-reload = ["serde_json", "futures-util"]
hot-reload = ["serde_json"]
web = ["dioxus-web", "web-sys"]
desktop = ["dioxus-desktop"]
mobile = ["dioxus-mobile"]

View file

@ -65,8 +65,7 @@ use axum::{
};
use dioxus_lib::prelude::VirtualDom;
use http::header::*;
use server_fn::error::NoCustomError;
use server_fn::error::ServerFnErrorSerde;
use std::sync::Arc;
use crate::{
@ -453,20 +452,6 @@ pub async fn hot_reload_handler(ws: axum::extract::WebSocketUpgrade) -> impl Int
})
}
fn get_local_pool() -> tokio_util::task::LocalPoolHandle {
use once_cell::sync::OnceCell;
static LOCAL_POOL: OnceCell<tokio_util::task::LocalPoolHandle> = OnceCell::new();
LOCAL_POOL
.get_or_init(|| {
tokio_util::task::LocalPoolHandle::new(
std::thread::available_parallelism()
.map(Into::into)
.unwrap_or(1),
)
})
.clone()
}
/// A handler for Dioxus server functions. This will run the server function and return the result.
async fn handle_server_fns_inner(
path: &str,
@ -475,15 +460,13 @@ async fn handle_server_fns_inner(
) -> impl IntoResponse {
use server_fn::middleware::Service;
let (tx, rx) = tokio::sync::oneshot::channel();
let path_string = path.to_string();
get_local_pool().spawn_pinned(move || async move {
let (parts, body) = req.into_parts();
let req = Request::from_parts(parts.clone(), body);
let future = move || async move {
let (parts, body) = req.into_parts();
let req = Request::from_parts(parts.clone(), body);
let res = if let Some(mut service) =
if let Some(mut service) =
server_fn::axum::get_server_fn_service(&path_string)
{
@ -538,18 +521,28 @@ async fn handle_server_fns_inner(
}
)
}
.expect("could not build Response");
.expect("could not build Response")
};
#[cfg(target_arch = "wasm32")]
{
use futures_util::future::FutureExt;
_ = tx.send(res);
});
rx.await.unwrap_or_else(|e| {
(
StatusCode::INTERNAL_SERVER_ERROR,
ServerFnError::<NoCustomError>::ServerError(e.to_string())
.ser()
.unwrap_or_default(),
)
.into_response()
})
let result = tokio::task::spawn_local(future);
let result = result.then(|f| async move { f.unwrap() });
result.await.unwrap_or_else(|e| {
use server_fn::error::NoCustomError;
use server_fn::error::ServerFnErrorSerde;
(
StatusCode::INTERNAL_SERVER_ERROR,
ServerFnError::<NoCustomError>::ServerError(e.to_string())
.ser()
.unwrap_or_default(),
)
.into_response()
})
}
#[cfg(not(target_arch = "wasm32"))]
{
future().await
}
}

View file

@ -21,7 +21,7 @@ pub fn launch(
vdom
};
#[cfg(feature = "server")]
#[cfg(all(feature = "server", not(target_arch = "wasm32")))]
tokio::runtime::Runtime::new()
.unwrap()
.block_on(async move {

View file

@ -16,7 +16,12 @@ mod config;
mod hooks;
pub mod launch;
#[cfg(all(debug_assertions, feature = "hot-reload", feature = "server"))]
#[cfg(all(
debug_assertions,
feature = "hot-reload",
feature = "server",
not(target_arch = "wasm32")
))]
mod hot_reload;
pub use config::*;

View file

@ -6,13 +6,33 @@ use dioxus_ssr::{
incremental::{IncrementalRendererConfig, RenderFreshness, WrapBody},
Renderer,
};
use std::future::Future;
use std::sync::Arc;
use std::sync::RwLock;
use tokio::task::spawn_blocking;
use tokio::task::JoinHandle;
use crate::prelude::*;
use dioxus_lib::prelude::*;
fn spawn_platform<Fut>(f: impl FnOnce() -> Fut + Send + 'static) -> JoinHandle<Fut::Output>
where
Fut: Future + 'static,
Fut::Output: Send + 'static,
{
#[cfg(not(target_arch = "wasm32"))]
{
tokio::task::spawn_blocking(move || {
tokio::runtime::Runtime::new()
.expect("couldn't spawn runtime")
.block_on(f())
})
}
#[cfg(target_arch = "wasm32")]
{
tokio::task::spawn_local(f())
}
}
enum SsrRendererPool {
Renderer(RwLock<Vec<Renderer>>),
Incremental(RwLock<Vec<dioxus_ssr::incremental::IncrementalRenderer>>),
@ -37,53 +57,45 @@ impl SsrRendererPool {
let (tx, rx) = tokio::sync::oneshot::channel();
spawn_blocking(move || {
tokio::runtime::Runtime::new()
.expect("couldn't spawn runtime")
.block_on(async move {
let mut vdom = virtual_dom_factory();
let mut to = WriteBuffer { buffer: Vec::new() };
// before polling the future, we need to set the context
let prev_context =
SERVER_CONTEXT.with(|ctx| ctx.replace(server_context));
// poll the future, which may call server_context()
tracing::info!("Rebuilding vdom");
vdom.rebuild(&mut NoOpMutations);
vdom.wait_for_suspense().await;
tracing::info!("Suspense resolved");
// after polling the future, we need to restore the context
SERVER_CONTEXT.with(|ctx| ctx.replace(prev_context));
spawn_platform(move || async move {
let mut vdom = virtual_dom_factory();
let mut to = WriteBuffer { buffer: Vec::new() };
// before polling the future, we need to set the context
let prev_context = SERVER_CONTEXT.with(|ctx| ctx.replace(server_context));
// poll the future, which may call server_context()
tracing::info!("Rebuilding vdom");
vdom.rebuild(&mut NoOpMutations);
vdom.wait_for_suspense().await;
tracing::info!("Suspense resolved");
// after polling the future, we need to restore the context
SERVER_CONTEXT.with(|ctx| ctx.replace(prev_context));
if let Err(err) = wrapper.render_before_body(&mut *to) {
let _ = tx.send(Err(err));
return;
}
if let Err(err) = renderer.render_to(&mut to, &vdom) {
let _ = tx.send(Err(
dioxus_ssr::incremental::IncrementalRendererError::RenderError(
err,
),
));
return;
}
if let Err(err) = wrapper.render_after_body(&mut *to) {
let _ = tx.send(Err(err));
return;
}
match String::from_utf8(to.buffer) {
Ok(html) => {
let _ =
tx.send(Ok((renderer, RenderFreshness::now(None), html)));
}
Err(err) => {
_ = tx.send(Err(
dioxus_ssr::incremental::IncrementalRendererError::Other(
Box::new(err),
),
));
}
}
});
if let Err(err) = wrapper.render_before_body(&mut *to) {
let _ = tx.send(Err(err));
return;
}
if let Err(err) = renderer.render_to(&mut to, &vdom) {
let _ = tx.send(Err(
dioxus_ssr::incremental::IncrementalRendererError::RenderError(err),
));
return;
}
if let Err(err) = wrapper.render_after_body(&mut *to) {
let _ = tx.send(Err(err));
return;
}
match String::from_utf8(to.buffer) {
Ok(html) => {
let _ = tx.send(Ok((renderer, RenderFreshness::now(None), html)));
}
Err(err) => {
_ = tx.send(Err(
dioxus_ssr::incremental::IncrementalRendererError::Other(Box::new(
err,
)),
));
}
}
});
let (renderer, freshness, html) = rx.await.unwrap()?;
pool.write().unwrap().push(renderer);
@ -98,53 +110,49 @@ impl SsrRendererPool {
let (tx, rx) = tokio::sync::oneshot::channel();
let server_context = server_context.clone();
spawn_blocking(move || {
tokio::runtime::Runtime::new()
.expect("couldn't spawn runtime")
.block_on(async move {
let mut to = WriteBuffer { buffer: Vec::new() };
match renderer
.render(
route,
virtual_dom_factory,
&mut *to,
|vdom| {
Box::pin(async move {
// before polling the future, we need to set the context
let prev_context = SERVER_CONTEXT
.with(|ctx| ctx.replace(Box::new(server_context)));
// poll the future, which may call server_context()
tracing::info!("Rebuilding vdom");
vdom.rebuild(&mut NoOpMutations);
vdom.wait_for_suspense().await;
tracing::info!("Suspense resolved");
// after polling the future, we need to restore the context
SERVER_CONTEXT.with(|ctx| ctx.replace(prev_context));
})
},
&wrapper,
)
.await
{
Ok(freshness) => {
match String::from_utf8(to.buffer).map_err(|err| {
dioxus_ssr::incremental::IncrementalRendererError::Other(
Box::new(err),
)
}) {
Ok(html) => {
let _ = tx.send(Ok((freshness, html)));
}
Err(err) => {
let _ = tx.send(Err(err));
}
}
spawn_platform(move || async move {
let mut to = WriteBuffer { buffer: Vec::new() };
match renderer
.render(
route,
virtual_dom_factory,
&mut *to,
|vdom| {
Box::pin(async move {
// before polling the future, we need to set the context
let prev_context = SERVER_CONTEXT
.with(|ctx| ctx.replace(Box::new(server_context)));
// poll the future, which may call server_context()
tracing::info!("Rebuilding vdom");
vdom.rebuild(&mut NoOpMutations);
vdom.wait_for_suspense().await;
tracing::info!("Suspense resolved");
// after polling the future, we need to restore the context
SERVER_CONTEXT.with(|ctx| ctx.replace(prev_context));
})
},
&wrapper,
)
.await
{
Ok(freshness) => {
match String::from_utf8(to.buffer).map_err(|err| {
dioxus_ssr::incremental::IncrementalRendererError::Other(Box::new(
err,
))
}) {
Ok(html) => {
let _ = tx.send(Ok((freshness, html)));
}
Err(err) => {
let _ = tx.send(Err(err));
}
}
})
}
Err(err) => {
let _ = tx.send(Err(err));
}
}
});
let (freshness, html) = rx.await.unwrap()?;

View file

@ -9,6 +9,7 @@ use std::path::PathBuf;
#[derive(Clone, Default)]
pub struct ServeConfigBuilder {
pub(crate) root_id: Option<&'static str>,
pub(crate) index_html: Option<String>,
pub(crate) index_path: Option<PathBuf>,
pub(crate) assets_path: Option<PathBuf>,
pub(crate) incremental:
@ -40,6 +41,7 @@ impl ServeConfigBuilder {
pub fn new() -> Self {
Self {
root_id: None,
index_html: None,
index_path: None,
assets_path: None,
incremental: None,
@ -52,6 +54,12 @@ impl ServeConfigBuilder {
self
}
/// Set the contents of the index.html file to be served. (precedence over index_path)
pub fn index_html(mut self, index_html: String) -> Self {
self.index_html = Some(index_html);
self
}
/// Set the path of the index.html file to be served. (defaults to {assets_path}/index.html)
pub fn index_path(mut self, index_path: PathBuf) -> Self {
self.index_path = Some(index_path);
@ -86,8 +94,11 @@ impl ServeConfigBuilder {
let root_id = self.root_id.unwrap_or("main");
let index = load_index_html(index_path, root_id);
let index_html = self
.index_html
.unwrap_or_else(|| load_index_path(index_path));
let index = load_index_html(index_html, root_id);
ServeConfig {
index,
assets_path,
@ -96,13 +107,16 @@ impl ServeConfigBuilder {
}
}
fn load_index_html(path: PathBuf, root_id: &'static str) -> IndexHtml {
fn load_index_path(path: PathBuf) -> String {
let mut file = File::open(path).expect("Failed to find index.html. Make sure the index_path is set correctly and the WASM application has been built.");
let mut contents = String::new();
file.read_to_string(&mut contents)
.expect("Failed to read index.html");
contents
}
fn load_index_html(contents: String, root_id: &'static str) -> IndexHtml {
let (pre_main, post_main) = contents.split_once(&format!("id=\"{root_id}\"")).unwrap_or_else(|| panic!("Failed to find id=\"{root_id}\" in index.html. The id is used to inject the application into the page."));
let post_main = post_main.split_once('>').unwrap_or_else(|| {
@ -131,6 +145,7 @@ pub(crate) struct IndexHtml {
#[derive(Clone)]
pub struct ServeConfig {
pub(crate) index: IndexHtml,
#[allow(dead_code)]
pub(crate) assets_path: PathBuf,
pub(crate) incremental:
Option<std::sync::Arc<dioxus_ssr::incremental::IncrementalRendererConfig>>,

View file

@ -18,9 +18,15 @@ rustc-hash = "1.1.0"
lru = { workspace = true }
tracing = { workspace = true }
http = { workspace = true }
tokio = { version = "1.28", features = ["fs", "io-util"], optional = true }
async-trait = { workspace = true }
serde_json = { workspace = true }
chrono = { verison = "0.4.34", optional = true }
[target.'cfg(target_arch = "wasm32")'.dependencies]
tokio = { version = "1.28", features = ["io-util"], optional = true }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
tokio = { version = "1.28", features = ["fs", "io-util"], optional = true }
[dev-dependencies]
dioxus = { workspace = true }
@ -36,4 +42,4 @@ fs_extra = "1.2.0"
[features]
default = []
incremental = ["dep:tokio"]
incremental = ["dep:tokio", "chrono"]

View file

@ -103,6 +103,7 @@ impl ValidCachedPath {
})
}
#[cfg(not(target_arch = "wasm32"))]
pub fn freshness(&self, max_age: Option<std::time::Duration>) -> Option<RenderFreshness> {
let age = self.timestamp.elapsed().ok()?.as_secs();
let max_age = max_age.map(|max_age| max_age.as_secs());

View file

@ -3,18 +3,19 @@
#![allow(non_snake_case)]
use crate::fs_cache::ValidCachedPath;
use chrono::offset::Utc;
use chrono::DateTime;
use dioxus_core::VirtualDom;
use rustc_hash::FxHasher;
use std::{
future::Future,
hash::BuildHasherDefault,
io::Write,
ops::{Deref, DerefMut},
path::PathBuf,
pin::Pin,
time::{Duration, SystemTime},
};
use tokio::io::{AsyncWrite, AsyncWriteExt, BufReader};
use tokio::io::{AsyncWrite, AsyncWriteExt};
pub use crate::fs_cache::*;
pub use crate::incremental_cfg::*;
@ -24,7 +25,7 @@ pub struct IncrementalRenderer {
pub(crate) static_dir: PathBuf,
#[allow(clippy::type_complexity)]
pub(crate) memory_cache:
Option<lru::LruCache<String, (SystemTime, Vec<u8>), BuildHasherDefault<FxHasher>>>,
Option<lru::LruCache<String, (DateTime<Utc>, Vec<u8>), BuildHasherDefault<FxHasher>>>,
pub(crate) invalidate_after: Option<Duration>,
pub(crate) ssr_renderer: crate::Renderer,
pub(crate) map_path: PathMapFn,
@ -65,6 +66,7 @@ impl IncrementalRenderer {
let _ = std::fs::remove_dir_all(&self.static_dir);
}
#[cfg(not(target_arch = "wasm32"))]
fn track_timestamps(&self) -> bool {
self.invalidate_after.is_some()
}
@ -98,25 +100,30 @@ impl IncrementalRenderer {
route: String,
html: Vec<u8>,
) -> Result<RenderFreshness, IncrementalRendererError> {
let file_path = self.route_as_path(&route);
if let Some(parent) = file_path.parent() {
if !parent.exists() {
std::fs::create_dir_all(parent)?;
#[cfg(not(target_arch = "wasm32"))]
{
use std::io::Write;
let file_path = self.route_as_path(&route);
if let Some(parent) = file_path.parent() {
if !parent.exists() {
std::fs::create_dir_all(parent)?;
}
}
let file = std::fs::File::create(file_path)?;
let mut file = std::io::BufWriter::new(file);
file.write_all(&html)?;
}
let file = std::fs::File::create(file_path)?;
let mut file = std::io::BufWriter::new(file);
file.write_all(&html)?;
self.add_to_memory_cache(route, html);
Ok(RenderFreshness::now(self.invalidate_after))
}
fn add_to_memory_cache(&mut self, route: String, html: Vec<u8>) {
if let Some(cache) = self.memory_cache.as_mut() {
cache.put(route, (SystemTime::now(), html));
cache.put(route, (Utc::now(), html));
}
}
#[cfg(not(target_arch = "wasm32"))]
fn promote_memory_cache<K: AsRef<str>>(&mut self, route: K) {
if let Some(cache) = self.memory_cache.as_mut() {
cache.promote(route.as_ref())
@ -134,27 +141,28 @@ impl IncrementalRenderer {
.as_mut()
.and_then(|cache| cache.get(&route))
{
if let Ok(elapsed) = timestamp.elapsed() {
let age = elapsed.as_secs();
if let Some(invalidate_after) = self.invalidate_after {
if elapsed < invalidate_after {
tracing::trace!("memory cache hit {:?}", route);
output.write_all(cache_hit).await?;
let max_age = invalidate_after.as_secs();
return Ok(Some(RenderFreshness::new(age, max_age)));
}
} else {
let now = Utc::now();
let elapsed = timestamp.signed_duration_since(now);
let age = elapsed.num_seconds();
if let Some(invalidate_after) = self.invalidate_after {
if elapsed.to_std().unwrap() < invalidate_after {
tracing::trace!("memory cache hit {:?}", route);
output.write_all(cache_hit).await?;
return Ok(Some(RenderFreshness::new_age(age)));
let max_age = invalidate_after.as_secs();
return Ok(Some(RenderFreshness::new(age as u64, max_age)));
}
} else {
tracing::trace!("memory cache hit {:?}", route);
output.write_all(cache_hit).await?;
return Ok(Some(RenderFreshness::new_age(age as u64)));
}
}
// check the file cache
#[cfg(not(target_arch = "wasm32"))]
if let Some(file_path) = self.find_file(&route) {
if let Some(freshness) = file_path.freshness(self.invalidate_after) {
if let Ok(file) = tokio::fs::File::open(file_path.full_path).await {
let mut file = BufReader::new(file);
let mut file = tokio::io::BufReader::new(file);
tokio::io::copy_buf(&mut file, output).await?;
tracing::trace!("file cache hit {:?}", route);
self.promote_memory_cache(&route);
@ -223,6 +231,7 @@ impl IncrementalRenderer {
}
}
#[cfg(not(target_arch = "wasm32"))]
fn route_as_path(&self, route: &str) -> PathBuf {
let mut file_path = (self.map_path)(route);
if self.track_timestamps() {