mirror of
https://github.com/DioxusLabs/dioxus
synced 2024-11-10 06:34:20 +00:00
revert changes from cli/dev refactor
This commit is contained in:
parent
1e51ec90c7
commit
ac5b01dab1
112 changed files with 2930 additions and 8923 deletions
|
@ -11,15 +11,18 @@ rust-version = "1.79.0"
|
|||
|
||||
[dependencies]
|
||||
# cli core
|
||||
clap = { workspace = true, features = ["derive", "cargo"] }
|
||||
clap = { version = "4.2", features = ["derive", "cargo"] }
|
||||
thiserror = { workspace = true }
|
||||
wasm-bindgen-cli-support = "0.2"
|
||||
wasm-bindgen-shared = "0.2"
|
||||
colored = "2.0.0"
|
||||
dioxus-cli-config = { workspace = true, features = ["cli"], default-features = false }
|
||||
|
||||
# features
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
log = "0.4.14"
|
||||
fern = { version = "0.6.0", features = ["colored"] }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = "1.0.79"
|
||||
toml = { workspace = true }
|
||||
fs_extra = "1.2.0"
|
||||
cargo_toml = { workspace = true }
|
||||
|
@ -27,10 +30,10 @@ futures-util = { workspace = true, features = ["async-await-macro"] }
|
|||
notify = { workspace = true, features = ["serde"] }
|
||||
html_parser = { workspace = true }
|
||||
cargo_metadata = "0.18.1"
|
||||
tokio = { workspace = true, features = ["fs", "sync", "rt", "macros", "process", "rt-multi-thread"] }
|
||||
tokio = { version = "1.16.1", features = ["fs", "sync", "rt", "macros", "process", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1.15"
|
||||
atty = "0.2.14"
|
||||
chrono = { workspace = true }
|
||||
chrono = "0.4.19"
|
||||
anyhow = "1"
|
||||
hyper = { workspace = true }
|
||||
hyper-util = "0.1.3"
|
||||
|
@ -42,7 +45,6 @@ console = "0.15.8"
|
|||
ctrlc = "3.2.3"
|
||||
futures-channel = { workspace = true }
|
||||
krates = { version = "0.17.0" }
|
||||
cargo-config2 = { workspace = true, optional = true }
|
||||
regex = "1.10.6"
|
||||
|
||||
axum = { workspace = true, features = ["ws"] }
|
||||
|
@ -52,7 +54,7 @@ tower-http = { workspace = true, features = ["full"] }
|
|||
proc-macro2 = { workspace = true, features = ["span-locations"] }
|
||||
syn = { workspace = true, features = ["full", "extra-traits", "visit", "visit-mut"] }
|
||||
|
||||
headers = "0.4.0"
|
||||
headers = "0.3.7"
|
||||
walkdir = "2"
|
||||
|
||||
# tools download
|
||||
|
@ -82,6 +84,7 @@ tauri-bundler = { workspace = true }
|
|||
prettyplease = { workspace = true }
|
||||
|
||||
# Assets
|
||||
manganis-cli-support = { workspace = true, features = ["html"] }
|
||||
brotli = "6.0.0"
|
||||
|
||||
dioxus-autofmt = { workspace = true }
|
||||
|
@ -92,11 +95,9 @@ dioxus-rsx-hotreload = { workspace = true }
|
|||
dioxus-html = { workspace = true, features = ["hot-reload-context"] }
|
||||
dioxus-core = { workspace = true, features = ["serialize"] }
|
||||
dioxus-core-types = { workspace = true }
|
||||
dioxus-devtools-types = { workspace = true }
|
||||
dioxus-runtime-config = { workspace = true }
|
||||
dioxus-fullstack = { workspace = true }
|
||||
dioxus-hot-reload = { workspace = true, features = ["serve"] }
|
||||
ignore = "0.4.22"
|
||||
env_logger = { workspace = true }
|
||||
env_logger = "0.11.3"
|
||||
|
||||
tracing-subscriber = { version = "0.3.18", features = ["std", "env-filter"] }
|
||||
console-subscriber = { version = "0.3.0", optional = true }
|
||||
|
@ -106,58 +107,10 @@ ratatui = { version = "0.27.0", features = ["crossterm", "unstable"] }
|
|||
crossterm = { version = "0.27.0", features = ["event-stream"] }
|
||||
ansi-to-tui = "=5.0.0-rc.1"
|
||||
ansi-to-html = "0.2.1"
|
||||
manganis-core = { workspace = true }
|
||||
|
||||
# link intercept
|
||||
tempfile = "3.3"
|
||||
|
||||
|
||||
# # just use the manganis crate directly since it has all the types we need
|
||||
# manganis = { workspace = true }
|
||||
# serde = { version = "1.0.183", features = ["derive"] }
|
||||
# serde_json = {version="1.0.116"}
|
||||
# anyhow = "1"
|
||||
# rayon = "1.7.0"
|
||||
# rustc-hash = "1.1.0"
|
||||
|
||||
# # Tailwind
|
||||
# railwind = "0.1.5"
|
||||
|
||||
# # Image compression/conversion
|
||||
# # JPEG
|
||||
# mozjpeg = { version = "0.10.7", default-features = false, features = ["parallel"] }
|
||||
# # PNG
|
||||
# imagequant = "4.2.0"
|
||||
# png = "0.17.9"
|
||||
# # Conversion
|
||||
# image = { version = "0.25" }
|
||||
# ravif = { version = "0.11", default-features = false }
|
||||
|
||||
# # CSS Minification
|
||||
# lightningcss = "1.0.0-alpha.44"
|
||||
|
||||
# # # Js minification
|
||||
# # swc = "=0.283.0"
|
||||
# # swc_common = "=0.37.1"
|
||||
|
||||
|
||||
# Extracting data from an executable
|
||||
object = {version="0.36.0", features=["wasm"]}
|
||||
tokio-util = { version = "0.7.11", features = ["full"] }
|
||||
|
||||
# [dev-dependencies]
|
||||
# tracing-subscriber = "0.3.18"
|
||||
|
||||
# [features]
|
||||
# default = ["html"]
|
||||
# html = []
|
||||
# # html = ["manganis-common/html"]
|
||||
|
||||
# asm = ["ravif/asm", "mozjpeg/nasm_simd"]
|
||||
|
||||
# # Note: this feature now enables nothing and should be removed in the next major version
|
||||
# webp = []
|
||||
# avif = []
|
||||
# on macos, we need to specify the vendored feature on ssl when cross compiling
|
||||
# [target.'cfg(target_os = "macos")'.dependencies]
|
||||
# openssl = { version = "0.10", features = ["vendored"] }
|
||||
|
||||
[build-dependencies]
|
||||
built = { version = "=0.7.4", features = ["git2"] }
|
||||
|
@ -168,20 +121,23 @@ plugin = []
|
|||
tokio-console = ["dep:console-subscriber"]
|
||||
|
||||
# when releasing dioxus, we want to enable wasm-opt
|
||||
# and then also maybe developing it too.
|
||||
# making this optional cuts workspace deps down from 1000 to 500, so it's very nice for workspace adev
|
||||
optimization = ["wasm-opt", "asset-opt"]
|
||||
asset-opt = []
|
||||
wasm-opt = ["dep:wasm-opt"]
|
||||
|
||||
[[bin]]
|
||||
path = "src/main.rs"
|
||||
name = "dx"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3"
|
||||
|
||||
[package.metadata.binstall]
|
||||
# temporarily, we're going to use the 0.5.0 download page for all binaries
|
||||
pkg-url = "{ repo }/releases/download/v{ version }/dx-{ target }-v{ version }{ archive-suffix }"
|
||||
|
||||
# the old one...
|
||||
# pkg-url = "{ repo }/releases/download/v0.5.0/dx-{ target }-v{ version }{ archive-suffix }"
|
||||
|
||||
# pkg-url = "{ repo }/releases/download/v{ version }/dx-{ target }{ archive-suffix }"
|
||||
pkg-fmt = "tgz"
|
||||
|
||||
[package.metadata.binstall.overrides.x86_64-pc-windows-msvc]
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,149 +0,0 @@
|
|||
use crate::builder::*;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use crate::Result;
|
||||
use crate::{build::BuildArgs, bundler::AppBundle};
|
||||
use futures_util::StreamExt;
|
||||
use progress::{BuildUpdateProgress, ProgressRx, ProgressTx};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
/// A handle to ongoing builds and then the spawned tasks themselves
|
||||
pub(crate) struct Builder {
|
||||
/// The application we are building
|
||||
krate: DioxusCrate,
|
||||
|
||||
/// Ongoing builds
|
||||
building: JoinSet<(Platform, Result<AppBundle>)>,
|
||||
|
||||
/// Messages from the build engine will be sent to this channel
|
||||
channel: (ProgressTx, ProgressRx),
|
||||
}
|
||||
|
||||
pub(crate) enum BuildUpdate {
|
||||
Progress(BuildUpdateProgress),
|
||||
|
||||
BuildReady {
|
||||
target: Platform,
|
||||
result: AppBundle,
|
||||
},
|
||||
|
||||
BuildFailed {
|
||||
target: Platform,
|
||||
err: crate::Error,
|
||||
},
|
||||
|
||||
/// All builds have finished and there's nothing left to do
|
||||
AllFinished,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
/// Create a new builder that can accept multiple simultaneous builds
|
||||
pub(crate) fn new(krate: &DioxusCrate) -> Self {
|
||||
Self {
|
||||
channel: futures_channel::mpsc::unbounded(),
|
||||
krate: krate.clone(),
|
||||
building: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new builder and immediately start a build
|
||||
pub(crate) fn start(krate: &DioxusCrate, args: BuildArgs) -> Result<Self> {
|
||||
let mut builder = Self::new(krate);
|
||||
builder.build(args)?;
|
||||
Ok(builder)
|
||||
}
|
||||
|
||||
/// Start a new build - killing the current one if it exists
|
||||
pub(crate) fn build(&mut self, args: BuildArgs) -> Result<()> {
|
||||
self.abort_all();
|
||||
|
||||
super::profiles::initialize_profiles(&self.krate)?;
|
||||
|
||||
let mut requests = vec![
|
||||
// At least one request for the target app
|
||||
BuildRequest::new_client(&self.krate, args.clone(), self.channel.0.clone()),
|
||||
];
|
||||
|
||||
// And then the fullstack app if we're building a fullstack app
|
||||
if args.fullstack {
|
||||
let server = BuildRequest::new_server(&self.krate, args.clone(), self.tx());
|
||||
requests.push(server);
|
||||
}
|
||||
|
||||
// Queue the builds on the joinset, being careful to not panic, so we can unwrap
|
||||
for build_request in requests {
|
||||
let platform = build_request.platform();
|
||||
tracing::info!("Spawning build request for {platform:?}");
|
||||
self.building.spawn(async move {
|
||||
// Run the build, but in a protected spawn, ensuring we can't produce panics and thus, joinerrors
|
||||
let res = tokio::spawn(build_request.build())
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
Err(crate::Error::Unique(format!(
|
||||
"Panic while building project: {err:?}"
|
||||
)))
|
||||
});
|
||||
|
||||
(platform, res)
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wait for the build to finish
|
||||
pub(crate) async fn wait_for_finish(&mut self) -> Result<Vec<AppBundle>> {
|
||||
let mut results = vec![];
|
||||
|
||||
loop {
|
||||
let next = self.wait().await;
|
||||
|
||||
match next {
|
||||
BuildUpdate::Progress(_) => {}
|
||||
BuildUpdate::BuildReady { target, result } => {
|
||||
results.push(result);
|
||||
tracing::info!("Build ready for target {target:?}");
|
||||
}
|
||||
BuildUpdate::BuildFailed { target, err } => {
|
||||
tracing::error!("Build failed for target {target:?}: {err}");
|
||||
return Err(err);
|
||||
}
|
||||
BuildUpdate::AllFinished => {
|
||||
tracing::info!("All builds finished!");
|
||||
return Ok(results);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Wait for any new updates to the builder - either it completed or gave us a message etc
|
||||
///
|
||||
/// Also listen for any input from the app's handle
|
||||
///
|
||||
/// Returns immediately with `Finished` if there are no more builds to run - don't poll-loop this!
|
||||
pub(crate) async fn wait(&mut self) -> BuildUpdate {
|
||||
if self.building.is_empty() {
|
||||
return BuildUpdate::AllFinished;
|
||||
}
|
||||
|
||||
tokio::select! {
|
||||
Some(update) = self.channel.1.next() => BuildUpdate::Progress(update),
|
||||
Some(Ok((target, result))) = self.building.join_next() => {
|
||||
match result {
|
||||
Ok(result) => BuildUpdate::BuildReady { target, result },
|
||||
Err(err) => BuildUpdate::BuildFailed { target, err },
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Shutdown the current build process
|
||||
///
|
||||
/// todo: might want to use a cancellation token here to allow cleaner shutdowns
|
||||
pub(crate) fn abort_all(&mut self) {
|
||||
self.building.abort_all();
|
||||
}
|
||||
|
||||
fn tx(&self) -> ProgressTx {
|
||||
self.channel.0.clone()
|
||||
}
|
||||
}
|
|
@ -1,222 +1,48 @@
|
|||
use super::web::install_web_build_tooling;
|
||||
use super::BuildRequest;
|
||||
use crate::{assets::AssetManifest, link::LINK_OUTPUT_ENV_VAR};
|
||||
use crate::{builder::Platform, bundler::AppBundle};
|
||||
use crate::{link::InterceptedArgs, Result};
|
||||
use super::BuildResult;
|
||||
use super::TargetPlatform;
|
||||
use crate::assets::copy_dir_to;
|
||||
use crate::assets::create_assets_head;
|
||||
use crate::assets::{asset_manifest, process_assets, AssetConfigDropGuard};
|
||||
use crate::builder::progress::build_cargo;
|
||||
use crate::builder::progress::CargoBuildResult;
|
||||
use crate::builder::progress::Stage;
|
||||
use crate::builder::progress::UpdateBuildProgress;
|
||||
use crate::builder::progress::UpdateStage;
|
||||
use crate::link::LinkCommand;
|
||||
use crate::Result;
|
||||
use crate::TraceSrc;
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
use std::{path::PathBuf, process::Stdio};
|
||||
use tokio::{io::AsyncBufReadExt, process::Command};
|
||||
use dioxus_cli_config::Platform;
|
||||
use futures_channel::mpsc::UnboundedSender;
|
||||
use manganis_cli_support::AssetManifest;
|
||||
use manganis_cli_support::ManganisSupportGuard;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::PathBuf;
|
||||
use tracing::error;
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) async fn build(self) -> Result<AppBundle> {
|
||||
tracing::info!("🚅 Running build command...");
|
||||
|
||||
// Install any tooling that might be required for this build.
|
||||
self.verify_tooling().await?;
|
||||
|
||||
// Run the build command with a pretty loader, returning the executable output location
|
||||
let executable = self.build_cargo().await?;
|
||||
|
||||
// Extract out the asset manifest from the executable using our linker tricks
|
||||
let assets = self.collect_assets().await?;
|
||||
|
||||
// Assemble a bundle from everything
|
||||
AppBundle::new(self, assets, executable).await
|
||||
}
|
||||
|
||||
pub(crate) async fn verify_tooling(&self) -> Result<()> {
|
||||
match self.platform() {
|
||||
// If this is a web, build make sure we have the web build tooling set up
|
||||
Platform::Web => {}
|
||||
|
||||
// Make sure we have mobile tooling if need be
|
||||
Platform::Ios => {}
|
||||
Platform::Android => {}
|
||||
|
||||
// Make sure we have the required deps for desktop. More important for linux
|
||||
Platform::Desktop => {}
|
||||
|
||||
// Generally nothing for the server, pretty simple
|
||||
Platform::Server => {}
|
||||
Platform::Liveview => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run `cargo`, returning the location of the final exectuable
|
||||
///
|
||||
/// todo: add some stats here, like timing reports, crate-graph optimizations, etc
|
||||
pub(crate) async fn build_cargo(&self) -> anyhow::Result<PathBuf> {
|
||||
// Extract the unit count of the crate graph so build_cargo has more accurate data
|
||||
let crate_count = self.get_unit_count_estimate().await;
|
||||
|
||||
self.status_starting_build();
|
||||
|
||||
let mut child = Command::new("cargo")
|
||||
.arg("rustc")
|
||||
.envs(
|
||||
self.custom_target_dir
|
||||
.as_ref()
|
||||
.map(|dir| ("CARGO_TARGET_DIR", dir)),
|
||||
)
|
||||
.current_dir(self.krate.crate_dir())
|
||||
.arg("--message-format")
|
||||
.arg("json-diagnostic-rendered-ansi")
|
||||
.args(&self.build_arguments())
|
||||
.arg("--")
|
||||
.args(self.rust_flags.clone())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.context("Failed to spawn cargo build")?;
|
||||
|
||||
let stdout = tokio::io::BufReader::new(child.stdout.take().unwrap());
|
||||
let stderr = tokio::io::BufReader::new(child.stderr.take().unwrap());
|
||||
|
||||
let mut output_location = None;
|
||||
let mut stdout = stdout.lines();
|
||||
let mut stderr = stderr.lines();
|
||||
let mut units_compiled = 0;
|
||||
let mut errors = Vec::new();
|
||||
|
||||
loop {
|
||||
use cargo_metadata::Message;
|
||||
|
||||
let line = tokio::select! {
|
||||
Ok(Some(line)) = stdout.next_line() => line,
|
||||
Ok(Some(line)) = stderr.next_line() => line,
|
||||
else => break,
|
||||
};
|
||||
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line.trim());
|
||||
deserializer.disable_recursion_limit();
|
||||
|
||||
let message =
|
||||
Message::deserialize(&mut deserializer).unwrap_or(Message::TextLine(line));
|
||||
|
||||
match message {
|
||||
Message::BuildScriptExecuted(_) => units_compiled += 1,
|
||||
Message::TextLine(line) => self.status_build_message(line),
|
||||
Message::CompilerMessage(msg) => {
|
||||
let message = msg.message;
|
||||
self.status_build_diagnostic(&message);
|
||||
const WARNING_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Help,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Note,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Warning,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Error,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
|
||||
];
|
||||
const FATAL_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Error,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
|
||||
];
|
||||
if WARNING_LEVELS.contains(&message.level) {
|
||||
if let Some(rendered) = message.rendered {
|
||||
errors.push(rendered);
|
||||
}
|
||||
}
|
||||
if FATAL_LEVELS.contains(&message.level) {
|
||||
return Err(anyhow::anyhow!(errors.join("\n")));
|
||||
}
|
||||
}
|
||||
Message::CompilerArtifact(artifact) => {
|
||||
units_compiled += 1;
|
||||
match artifact.executable {
|
||||
Some(executable) => output_location = Some(executable.into()),
|
||||
None => {
|
||||
self.status_build_progress(units_compiled as f64 / crate_count as f64)
|
||||
}
|
||||
}
|
||||
}
|
||||
Message::BuildFinished(finished) => {
|
||||
if !finished.success {
|
||||
return Err(anyhow::anyhow!("Build failed"));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
output_location.context("Build did not return an executable")
|
||||
}
|
||||
|
||||
/// Run the linker intercept and then fill in our AssetManifest from the incremental artifacts
|
||||
///
|
||||
/// This will execute `dx` with an env var set to force `dx` to operate as a linker, and then
|
||||
/// traverse the .o and .rlib files rustc passes that new `dx` instance, collecting the link
|
||||
/// tables marked by manganis and parsing them as a ResourceAsset.
|
||||
pub(crate) async fn collect_assets(&self) -> anyhow::Result<AssetManifest> {
|
||||
// If this is the server build, the client build already copied any assets we need
|
||||
if self.platform() == Platform::Server {
|
||||
return Ok(AssetManifest::default());
|
||||
}
|
||||
|
||||
// If assets are skipped, we don't need to collect them
|
||||
if self.build.skip_assets {
|
||||
return Ok(AssetManifest::default());
|
||||
}
|
||||
|
||||
// Create a temp file to put the output of the args
|
||||
// We need to do this since rustc won't actually print the link args to stdout, so we need to
|
||||
// give `dx` a file to dump its env::args into
|
||||
let tmp_file = tempfile::NamedTempFile::new()?;
|
||||
|
||||
// Run `cargo rustc` again, but this time with a custom linker (dx) and an env var to force
|
||||
// `dx` to act as a linker
|
||||
//
|
||||
// Pass in the tmp_file as the env var itself
|
||||
//
|
||||
// NOTE: that -Csave-temps=y is needed to prevent rustc from deleting the incremental cache...
|
||||
// This might not be a "stable" way of keeping artifacts around, but it's in stable rustc, so we use it
|
||||
Command::new("cargo")
|
||||
.arg("rustc")
|
||||
.args(self.build_arguments())
|
||||
.arg("--offline") /* don't use the network, should already be resolved */
|
||||
.arg("--")
|
||||
.arg(format!("-Clinker={}", std::env::current_exe().unwrap().display())) /* pass ourselves in */
|
||||
.env(LINK_OUTPUT_ENV_VAR, tmp_file.path()) /* but with the env var pointing to the temp file */
|
||||
.arg("-Csave-temps=y") /* don't delete the incremental cache */
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
// Read the contents of the temp file
|
||||
let args =
|
||||
std::fs::read_to_string(tmp_file.path()).context("Failed to read linker output")?;
|
||||
|
||||
// Parse them as a Vec<String> which is just our informal format for link args in the cli
|
||||
// Todo: this might be wrong-ish on windows? The format is weird
|
||||
let args = serde_json::from_str::<InterceptedArgs>(&args)
|
||||
.context("Failed to parse linker output")?;
|
||||
|
||||
Ok(AssetManifest::new_from_linker_intercept(args))
|
||||
}
|
||||
|
||||
/// Create a list of arguments for cargo builds
|
||||
pub(crate) fn build_arguments(&self) -> Vec<String> {
|
||||
let mut cargo_args = Vec::new();
|
||||
|
||||
if self.build.release {
|
||||
if self.build_arguments.release {
|
||||
cargo_args.push("--release".to_string());
|
||||
}
|
||||
if self.build.verbose {
|
||||
if self.build_arguments.verbose {
|
||||
cargo_args.push("--verbose".to_string());
|
||||
} else {
|
||||
cargo_args.push("--quiet".to_string());
|
||||
}
|
||||
|
||||
if let Some(custom_profile) = &self.build.profile {
|
||||
if let Some(custom_profile) = &self.build_arguments.profile {
|
||||
cargo_args.push("--profile".to_string());
|
||||
cargo_args.push(custom_profile.to_string());
|
||||
}
|
||||
|
||||
if !self.build.target_args.features.is_empty() {
|
||||
let features_str = self.build.target_args.features.join(" ");
|
||||
if !self.build_arguments.target_args.features.is_empty() {
|
||||
let features_str = self.build_arguments.target_args.features.join(" ");
|
||||
cargo_args.push("--features".to_string());
|
||||
cargo_args.push(features_str);
|
||||
}
|
||||
|
@ -224,20 +50,20 @@ impl BuildRequest {
|
|||
if let Some(target) = self
|
||||
.targeting_web()
|
||||
.then_some("wasm32-unknown-unknown")
|
||||
.or(self.build.target_args.target.as_deref())
|
||||
.or(self.build_arguments.target_args.target.as_deref())
|
||||
{
|
||||
cargo_args.push("--target".to_string());
|
||||
cargo_args.push(target.to_string());
|
||||
}
|
||||
|
||||
if let Some(ref platform) = self.build.target_args.package {
|
||||
if let Some(ref platform) = self.build_arguments.target_args.package {
|
||||
cargo_args.push(String::from("-p"));
|
||||
cargo_args.push(platform.clone());
|
||||
}
|
||||
|
||||
cargo_args.append(&mut self.build.cargo_args.clone());
|
||||
cargo_args.append(&mut self.build_arguments.cargo_args.clone());
|
||||
|
||||
match self.krate.executable_type() {
|
||||
match self.dioxus_crate.executable_type() {
|
||||
krates::cm::TargetKind::Bin => {
|
||||
cargo_args.push("--bin".to_string());
|
||||
}
|
||||
|
@ -249,9 +75,202 @@ impl BuildRequest {
|
|||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
cargo_args.push(self.krate.executable_name().to_string());
|
||||
cargo_args.push(self.dioxus_crate.executable_name().to_string());
|
||||
|
||||
cargo_args
|
||||
}
|
||||
|
||||
/// Create a build command for cargo
|
||||
fn prepare_build_command(&self) -> Result<(tokio::process::Command, Vec<String>)> {
|
||||
let mut cmd = tokio::process::Command::new("cargo");
|
||||
cmd.arg("rustc");
|
||||
if let Some(target_dir) = &self.target_dir {
|
||||
cmd.env("CARGO_TARGET_DIR", target_dir);
|
||||
}
|
||||
cmd.current_dir(self.dioxus_crate.crate_dir())
|
||||
.arg("--message-format")
|
||||
.arg("json-diagnostic-rendered-ansi");
|
||||
|
||||
let cargo_args = self.build_arguments();
|
||||
cmd.args(&cargo_args);
|
||||
|
||||
cmd.arg("--").args(self.rust_flags.clone());
|
||||
|
||||
Ok((cmd, cargo_args))
|
||||
}
|
||||
|
||||
pub(crate) async fn build(
|
||||
&self,
|
||||
mut progress: UnboundedSender<UpdateBuildProgress>,
|
||||
) -> Result<BuildResult> {
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Build,
|
||||
"Running build [{}] command...",
|
||||
self.target_platform,
|
||||
);
|
||||
|
||||
// Set up runtime guards
|
||||
let mut dioxus_version = crate::dx_build_info::PKG_VERSION.to_string();
|
||||
if let Some(hash) = crate::dx_build_info::GIT_COMMIT_HASH_SHORT {
|
||||
let hash = &hash.trim_start_matches('g')[..4];
|
||||
dioxus_version.push_str(&format!("-{hash}"));
|
||||
}
|
||||
let _guard = dioxus_cli_config::__private::save_config(
|
||||
&self.dioxus_crate.dioxus_config,
|
||||
&dioxus_version,
|
||||
);
|
||||
let _manganis_support = ManganisSupportGuard::default();
|
||||
let _asset_guard =
|
||||
AssetConfigDropGuard::new(self.dioxus_crate.dioxus_config.web.app.base_path.as_deref());
|
||||
|
||||
// If this is a web, build make sure we have the web build tooling set up
|
||||
if self.targeting_web() {
|
||||
install_web_build_tooling(&mut progress).await?;
|
||||
}
|
||||
|
||||
// Create the build command
|
||||
let (cmd, cargo_args) = self.prepare_build_command()?;
|
||||
|
||||
// Run the build command with a pretty loader
|
||||
let crate_count = self.get_unit_count_estimate().await;
|
||||
let cargo_result = build_cargo(crate_count, cmd, &mut progress).await?;
|
||||
|
||||
// Post process the build result
|
||||
let build_result = self
|
||||
.post_process_build(cargo_args, &cargo_result, &mut progress)
|
||||
.await
|
||||
.context("Failed to post process build")?;
|
||||
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Build,
|
||||
"Build completed: [{}]",
|
||||
self.dioxus_crate.out_dir().display(),
|
||||
);
|
||||
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::Finished,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
|
||||
Ok(build_result)
|
||||
}
|
||||
|
||||
async fn post_process_build(
|
||||
&self,
|
||||
cargo_args: Vec<String>,
|
||||
cargo_build_result: &CargoBuildResult,
|
||||
progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> Result<BuildResult> {
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::OptimizingAssets,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
|
||||
let assets = self.collect_assets(cargo_args, progress).await?;
|
||||
|
||||
let file_name = self.dioxus_crate.executable_name();
|
||||
|
||||
// Move the final output executable into the dist folder
|
||||
let out_dir = self.target_out_dir();
|
||||
if !out_dir.is_dir() {
|
||||
create_dir_all(&out_dir)?;
|
||||
}
|
||||
let mut output_path = out_dir.join(file_name);
|
||||
if self.targeting_web() {
|
||||
output_path.set_extension("wasm");
|
||||
} else if cfg!(windows) {
|
||||
output_path.set_extension("exe");
|
||||
}
|
||||
if let Some(res_path) = &cargo_build_result.output_location {
|
||||
std::fs::copy(res_path, &output_path)?;
|
||||
}
|
||||
|
||||
self.copy_assets_dir()?;
|
||||
|
||||
// Create the build result
|
||||
let build_result = BuildResult {
|
||||
executable: output_path,
|
||||
target_platform: self.target_platform,
|
||||
};
|
||||
|
||||
// If this is a web build, run web post processing steps
|
||||
if self.targeting_web() {
|
||||
self.post_process_web_build(&build_result, assets.as_ref(), progress)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(build_result)
|
||||
}
|
||||
|
||||
async fn collect_assets(
|
||||
&self,
|
||||
cargo_args: Vec<String>,
|
||||
progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> anyhow::Result<Option<AssetManifest>> {
|
||||
// If this is the server build, the client build already copied any assets we need
|
||||
if self.target_platform == TargetPlatform::Server {
|
||||
return Ok(None);
|
||||
}
|
||||
// If assets are skipped, we don't need to collect them
|
||||
if self.build_arguments.skip_assets {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Start Manganis linker intercept.
|
||||
let linker_args = vec![format!("{}", self.target_out_dir().display())];
|
||||
|
||||
// Don't block the main thread - manganis should not be running its own std process but it's
|
||||
// fine to wrap it here at the top
|
||||
let build = self.clone();
|
||||
let mut progress = progress.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
manganis_cli_support::start_linker_intercept(
|
||||
&LinkCommand::command_name(),
|
||||
cargo_args,
|
||||
Some(linker_args),
|
||||
)?;
|
||||
let Some(assets) = asset_manifest(&build) else {
|
||||
error!(dx_src = ?TraceSrc::Build, "the asset manifest was not provided by manganis and we were not able to collect assets");
|
||||
return Err(anyhow::anyhow!("asset manifest was not provided by manganis"));
|
||||
};
|
||||
// Collect assets from the asset manifest the linker intercept created
|
||||
process_assets(&build, &assets, &mut progress)?;
|
||||
// Create the __assets_head.html file for bundling
|
||||
create_assets_head(&build, &assets)?;
|
||||
|
||||
Ok(Some(assets))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!(e))?
|
||||
}
|
||||
|
||||
pub fn copy_assets_dir(&self) -> anyhow::Result<()> {
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "Copying public assets to the output directory...");
|
||||
let out_dir = self.target_out_dir();
|
||||
let asset_dir = self.dioxus_crate.asset_dir();
|
||||
|
||||
if asset_dir.is_dir() {
|
||||
// Only pre-compress the assets from the web build. Desktop assets are not served, so they don't need to be pre_compressed
|
||||
let pre_compress = self.targeting_web()
|
||||
&& self
|
||||
.dioxus_crate
|
||||
.should_pre_compress_web_assets(self.build_arguments.release);
|
||||
|
||||
copy_dir_to(asset_dir, out_dir, pre_compress)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the output directory for a specific built target
|
||||
pub fn target_out_dir(&self) -> PathBuf {
|
||||
let out_dir = self.dioxus_crate.out_dir();
|
||||
match self.build_arguments.platform {
|
||||
Some(Platform::Fullstack | Platform::StaticGeneration) => match self.target_platform {
|
||||
TargetPlatform::Web => out_dir.join("public"),
|
||||
TargetPlatform::Desktop => out_dir.join("desktop"),
|
||||
_ => out_dir,
|
||||
},
|
||||
_ => out_dir,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
128
packages/cli/src/builder/fullstack.rs
Normal file
128
packages/cli/src/builder/fullstack.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
use toml_edit::Item;
|
||||
|
||||
use crate::builder::Build;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
|
||||
use crate::builder::BuildRequest;
|
||||
use std::io::Write;
|
||||
|
||||
use super::TargetPlatform;
|
||||
|
||||
static CLIENT_PROFILE: &str = "dioxus-client";
|
||||
static SERVER_PROFILE: &str = "dioxus-server";
|
||||
|
||||
// The `opt-level=2` increases build times, but can noticeably decrease time
|
||||
// between saving changes and being able to interact with an app. The "overall"
|
||||
// time difference (between having and not having the optimization) can be
|
||||
// almost imperceptible (~1 s) but also can be very noticeable (~6 s) — depends
|
||||
// on setup (hardware, OS, browser, idle load).
|
||||
// Find or create the client and server profiles in the .cargo/config.toml file
|
||||
fn initialize_profiles(config: &DioxusCrate) -> crate::Result<()> {
|
||||
let config_path = config.workspace_dir().join(".cargo/config.toml");
|
||||
let mut config = match std::fs::read_to_string(&config_path) {
|
||||
Ok(config) => config.parse::<toml_edit::DocumentMut>().map_err(|e| {
|
||||
crate::Error::Other(anyhow::anyhow!("Failed to parse .cargo/config.toml: {}", e))
|
||||
})?,
|
||||
Err(_) => Default::default(),
|
||||
};
|
||||
|
||||
if let Item::Table(table) = config
|
||||
.as_table_mut()
|
||||
.entry("profile")
|
||||
.or_insert(Item::Table(Default::default()))
|
||||
{
|
||||
if let toml_edit::Entry::Vacant(entry) = table.entry(CLIENT_PROFILE) {
|
||||
let mut client = toml_edit::Table::new();
|
||||
client.insert("inherits", Item::Value("dev".into()));
|
||||
client.insert("opt-level", Item::Value(2.into()));
|
||||
entry.insert(Item::Table(client));
|
||||
}
|
||||
|
||||
if let toml_edit::Entry::Vacant(entry) = table.entry(SERVER_PROFILE) {
|
||||
let mut server = toml_edit::Table::new();
|
||||
server.insert("inherits", Item::Value("dev".into()));
|
||||
server.insert("opt-level", Item::Value(2.into()));
|
||||
entry.insert(Item::Table(server));
|
||||
}
|
||||
}
|
||||
|
||||
// Write the config back to the file
|
||||
if let Some(parent) = config_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = std::fs::File::create(config_path)?;
|
||||
let mut buf_writer = std::io::BufWriter::new(file);
|
||||
write!(buf_writer, "{}", config)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) fn new_fullstack(
|
||||
config: DioxusCrate,
|
||||
build_arguments: Build,
|
||||
serve: bool,
|
||||
) -> Result<Vec<Self>, crate::Error> {
|
||||
initialize_profiles(&config)?;
|
||||
|
||||
Ok(vec![
|
||||
Self::new_client(serve, &config, &build_arguments),
|
||||
Self::new_server(serve, &config, &build_arguments),
|
||||
])
|
||||
}
|
||||
|
||||
fn new_with_target_directory_rust_flags_and_features(
|
||||
serve: bool,
|
||||
config: &DioxusCrate,
|
||||
build: &Build,
|
||||
feature: Option<String>,
|
||||
target_platform: TargetPlatform,
|
||||
) -> Self {
|
||||
let config = config.clone();
|
||||
let mut build = build.clone();
|
||||
// Add the server feature to the features we pass to the build
|
||||
if let Some(feature) = feature {
|
||||
build.target_args.features.push(feature);
|
||||
}
|
||||
|
||||
// Add the server flags to the build arguments
|
||||
Self {
|
||||
serve,
|
||||
build_arguments: build.clone(),
|
||||
dioxus_crate: config,
|
||||
rust_flags: Default::default(),
|
||||
target_dir: None,
|
||||
target_platform,
|
||||
}
|
||||
}
|
||||
|
||||
fn new_server(serve: bool, config: &DioxusCrate, build: &Build) -> Self {
|
||||
let mut build = build.clone();
|
||||
if build.profile.is_none() {
|
||||
build.profile = Some(CLIENT_PROFILE.to_string());
|
||||
}
|
||||
let client_feature = build.auto_detect_server_feature(config);
|
||||
Self::new_with_target_directory_rust_flags_and_features(
|
||||
serve,
|
||||
config,
|
||||
&build,
|
||||
build.target_args.server_feature.clone().or(client_feature),
|
||||
TargetPlatform::Server,
|
||||
)
|
||||
}
|
||||
|
||||
fn new_client(serve: bool, config: &DioxusCrate, build: &Build) -> Self {
|
||||
let mut build = build.clone();
|
||||
if build.profile.is_none() {
|
||||
build.profile = Some(SERVER_PROFILE.to_string());
|
||||
}
|
||||
let (client_feature, client_platform) = build.auto_detect_client_platform(config);
|
||||
Self::new_with_target_directory_rust_flags_and_features(
|
||||
serve,
|
||||
config,
|
||||
&build,
|
||||
build.target_args.client_feature.clone().or(client_feature),
|
||||
client_platform,
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,18 +1,213 @@
|
|||
/// The primary entrypoint for our build + optimize + bundle engine
|
||||
///
|
||||
/// Handles multiple ongoing tasks and allows you to queue up builds from interactive and non-interactive contexts
|
||||
///
|
||||
/// Uses a request -> response architecture that allows you to monitor the progress with an optional message
|
||||
/// receiver.
|
||||
mod builder;
|
||||
mod cargo;
|
||||
mod platform;
|
||||
mod profiles;
|
||||
mod progress;
|
||||
mod request;
|
||||
mod web;
|
||||
use crate::cli::serve::ServeArguments;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use crate::Result;
|
||||
use crate::{build::Build, TraceSrc};
|
||||
use dioxus_cli_config::{Platform, RuntimeCLIArguments};
|
||||
use futures_util::stream::select_all;
|
||||
use futures_util::StreamExt;
|
||||
use std::net::SocketAddr;
|
||||
use std::str::FromStr;
|
||||
use std::{path::PathBuf, process::Stdio};
|
||||
use tokio::process::{Child, Command};
|
||||
|
||||
pub(crate) use builder::*;
|
||||
pub(crate) use platform::*;
|
||||
pub(crate) use progress::*;
|
||||
pub(crate) use request::*;
|
||||
mod cargo;
|
||||
mod fullstack;
|
||||
mod prepare_html;
|
||||
mod progress;
|
||||
mod web;
|
||||
pub use progress::{Stage, UpdateBuildProgress, UpdateStage};
|
||||
|
||||
/// The target platform for the build
|
||||
/// This is very similar to the Platform enum, but we need to be able to differentiate between the
|
||||
/// server and web targets for the fullstack platform
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum TargetPlatform {
|
||||
Web,
|
||||
Desktop,
|
||||
Server,
|
||||
Liveview,
|
||||
}
|
||||
|
||||
impl FromStr for TargetPlatform {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"web" => Ok(Self::Web),
|
||||
"desktop" => Ok(Self::Desktop),
|
||||
"axum" | "server" => Ok(Self::Server),
|
||||
"liveview" => Ok(Self::Liveview),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetPlatform {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TargetPlatform::Web => write!(f, "web"),
|
||||
TargetPlatform::Desktop => write!(f, "desktop"),
|
||||
TargetPlatform::Server => write!(f, "server"),
|
||||
TargetPlatform::Liveview => write!(f, "liveview"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A request for a project to be built
|
||||
#[derive(Clone)]
|
||||
pub struct BuildRequest {
|
||||
/// Whether the build is for serving the application
|
||||
pub serve: bool,
|
||||
/// The configuration for the crate we are building
|
||||
pub dioxus_crate: DioxusCrate,
|
||||
/// The target platform for the build
|
||||
pub target_platform: TargetPlatform,
|
||||
/// The arguments for the build
|
||||
pub build_arguments: Build,
|
||||
/// The rustc flags to pass to the build
|
||||
pub rust_flags: Vec<String>,
|
||||
/// The target directory for the build
|
||||
pub target_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl BuildRequest {
|
||||
pub fn create(
|
||||
serve: bool,
|
||||
dioxus_crate: &DioxusCrate,
|
||||
build_arguments: impl Into<Build>,
|
||||
) -> crate::Result<Vec<Self>> {
|
||||
let build_arguments = build_arguments.into();
|
||||
let platform = build_arguments.platform();
|
||||
let single_platform = |platform| {
|
||||
let dioxus_crate = dioxus_crate.clone();
|
||||
vec![Self {
|
||||
serve,
|
||||
dioxus_crate,
|
||||
build_arguments: build_arguments.clone(),
|
||||
target_platform: platform,
|
||||
rust_flags: Default::default(),
|
||||
target_dir: Default::default(),
|
||||
}]
|
||||
};
|
||||
Ok(match platform {
|
||||
Platform::Liveview => single_platform(TargetPlatform::Liveview),
|
||||
Platform::Web => single_platform(TargetPlatform::Web),
|
||||
Platform::Desktop => single_platform(TargetPlatform::Desktop),
|
||||
Platform::StaticGeneration | Platform::Fullstack => {
|
||||
Self::new_fullstack(dioxus_crate.clone(), build_arguments, serve)?
|
||||
}
|
||||
_ => unimplemented!("Unknown platform: {platform:?}"),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn build_all_parallel(
|
||||
build_requests: Vec<BuildRequest>,
|
||||
) -> Result<Vec<BuildResult>> {
|
||||
let multi_platform_build = build_requests.len() > 1;
|
||||
let mut build_progress = Vec::new();
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
for build_request in build_requests {
|
||||
let (tx, rx) = futures_channel::mpsc::unbounded();
|
||||
build_progress.push((build_request.build_arguments.platform(), rx));
|
||||
set.spawn(async move { build_request.build(tx).await });
|
||||
}
|
||||
|
||||
// Watch the build progress as it comes in
|
||||
loop {
|
||||
let mut next = select_all(
|
||||
build_progress
|
||||
.iter_mut()
|
||||
.map(|(platform, rx)| rx.map(move |update| (*platform, update))),
|
||||
);
|
||||
match next.next().await {
|
||||
Some((platform, update)) => {
|
||||
if multi_platform_build {
|
||||
print!("{platform} build: ");
|
||||
update.to_std_out();
|
||||
} else {
|
||||
update.to_std_out();
|
||||
}
|
||||
}
|
||||
None => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut all_results = Vec::new();
|
||||
|
||||
while let Some(result) = set.join_next().await {
|
||||
let result = result
|
||||
.map_err(|_| crate::Error::Unique("Failed to build project".to_owned()))??;
|
||||
all_results.push(result);
|
||||
}
|
||||
|
||||
Ok(all_results)
|
||||
}
|
||||
|
||||
/// Check if the build is targeting the web platform
|
||||
pub fn targeting_web(&self) -> bool {
|
||||
self.target_platform == TargetPlatform::Web
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BuildResult {
|
||||
pub executable: PathBuf,
|
||||
pub target_platform: TargetPlatform,
|
||||
}
|
||||
|
||||
impl BuildResult {
|
||||
/// Open the executable if this is a native build
|
||||
pub fn open(
|
||||
&self,
|
||||
serve: &ServeArguments,
|
||||
fullstack_address: Option<SocketAddr>,
|
||||
workspace: &std::path::Path,
|
||||
) -> std::io::Result<Option<Child>> {
|
||||
match self.target_platform {
|
||||
TargetPlatform::Web => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Serving web app on http://{} 🎉", serve.address.address());
|
||||
return Ok(None);
|
||||
}
|
||||
TargetPlatform::Desktop => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Launching desktop app at {} 🎉", self.executable.display());
|
||||
}
|
||||
TargetPlatform::Server => {
|
||||
if let Some(fullstack_address) = fullstack_address {
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Dev,
|
||||
"Launching fullstack server on http://{:?} 🎉",
|
||||
fullstack_address
|
||||
);
|
||||
}
|
||||
}
|
||||
TargetPlatform::Liveview => {
|
||||
if let Some(fullstack_address) = fullstack_address {
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Dev,
|
||||
"Launching liveview server on http://{:?} 🎉",
|
||||
fullstack_address
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Press [o] to open the app manually.");
|
||||
|
||||
let arguments = RuntimeCLIArguments::new(serve.address.address(), fullstack_address);
|
||||
let executable = self.executable.canonicalize()?;
|
||||
let mut cmd = Command::new(executable);
|
||||
cmd
|
||||
// When building the fullstack server, we need to forward the serve arguments (like port) to the fullstack server through env vars
|
||||
.env(
|
||||
dioxus_cli_config::__private::SERVE_ENV,
|
||||
serde_json::to_string(&arguments).unwrap(),
|
||||
)
|
||||
.stderr(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.kill_on_drop(true)
|
||||
.current_dir(workspace);
|
||||
Ok(Some(cmd.spawn()?))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,105 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(
|
||||
Copy,
|
||||
Clone,
|
||||
Hash,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
Debug,
|
||||
Default,
|
||||
clap::ValueEnum,
|
||||
)]
|
||||
#[non_exhaustive]
|
||||
pub(crate) enum Platform {
|
||||
/// Targeting the web platform using WASM
|
||||
#[clap(name = "web")]
|
||||
#[serde(rename = "web")]
|
||||
#[default]
|
||||
Web,
|
||||
|
||||
/// Targeting the desktop platform using Tao/Wry-based webview
|
||||
///
|
||||
/// Will only build for your native architecture - to do cross builds you need to use a VM.
|
||||
/// Read more about cross-builds on the Dioxus Website.
|
||||
#[clap(name = "desktop")]
|
||||
#[serde(rename = "desktop")]
|
||||
Desktop,
|
||||
|
||||
/// Targeting the ios platform
|
||||
///
|
||||
/// Can't work properly if you're not building from an Apple device.
|
||||
#[clap(name = "ios")]
|
||||
#[serde(rename = "ios")]
|
||||
Ios,
|
||||
|
||||
/// Targeting the android platform
|
||||
#[clap(name = "android")]
|
||||
#[serde(rename = "android")]
|
||||
Android,
|
||||
|
||||
/// Targetting the server platform using Axum and Dioxus-Fullstack
|
||||
///
|
||||
/// This is implicitly passed if `fullstack` is enabled as a feature. Using this variant simply
|
||||
/// means you're only building the server variant without the `.wasm` to serve.
|
||||
#[clap(name = "server")]
|
||||
#[serde(rename = "server")]
|
||||
Server,
|
||||
|
||||
/// Targeting the static generation platform using SSR and Dioxus-Fullstack
|
||||
#[clap(name = "liveview")]
|
||||
#[serde(rename = "liveview")]
|
||||
Liveview,
|
||||
}
|
||||
|
||||
/// An error that occurs when a platform is not recognized
|
||||
pub(crate) struct UnknownPlatformError;
|
||||
|
||||
impl std::fmt::Display for UnknownPlatformError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Unknown platform")
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Platform {
|
||||
type Err = UnknownPlatformError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"web" => Ok(Self::Web),
|
||||
"desktop" => Ok(Self::Desktop),
|
||||
"liveview" => Ok(Self::Liveview),
|
||||
"server" => Ok(Self::Server),
|
||||
"ios" => Ok(Self::Ios),
|
||||
"android" => Ok(Self::Android),
|
||||
_ => Err(UnknownPlatformError),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Platform {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let feature = self.feature_name();
|
||||
f.write_str(feature)
|
||||
}
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
/// Get the feature name for the platform in the dioxus crate
|
||||
pub(crate) fn feature_name(&self) -> &str {
|
||||
match self {
|
||||
Platform::Web => "web",
|
||||
Platform::Desktop => "desktop",
|
||||
Platform::Liveview => "liveview",
|
||||
Platform::Ios => "ios",
|
||||
Platform::Android => "android",
|
||||
Platform::Server => "server",
|
||||
}
|
||||
}
|
||||
}
|
205
packages/cli/src/builder/prepare_html.rs
Normal file
205
packages/cli/src/builder/prepare_html.rs
Normal file
|
@ -0,0 +1,205 @@
|
|||
//! Build the HTML file to load a web application. The index.html file may be created from scratch or modified from the `index.html` file in the crate root.
|
||||
|
||||
use super::{BuildRequest, UpdateBuildProgress};
|
||||
use crate::Result;
|
||||
use crate::TraceSrc;
|
||||
use futures_channel::mpsc::UnboundedSender;
|
||||
use manganis_cli_support::AssetManifest;
|
||||
use std::fmt::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
const DEFAULT_HTML: &str = include_str!("../../assets/index.html");
|
||||
const TOAST_HTML: &str = include_str!("../../assets/toast.html");
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) fn prepare_html(
|
||||
&self,
|
||||
assets: Option<&AssetManifest>,
|
||||
_progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> Result<String> {
|
||||
let mut html = html_or_default(&self.dioxus_crate.crate_dir());
|
||||
|
||||
// Inject any resources from the config into the html
|
||||
self.inject_resources(&mut html, assets)?;
|
||||
|
||||
// Inject loading scripts if they are not already present
|
||||
self.inject_loading_scripts(&mut html);
|
||||
|
||||
// Replace any special placeholders in the HTML with resolved values
|
||||
self.replace_template_placeholders(&mut html);
|
||||
|
||||
let title = self.dioxus_crate.dioxus_config.web.app.title.clone();
|
||||
|
||||
replace_or_insert_before("{app_title}", "</title", &title, &mut html);
|
||||
|
||||
Ok(html)
|
||||
}
|
||||
|
||||
// Inject any resources from the config into the html
|
||||
fn inject_resources(&self, html: &mut String, assets: Option<&AssetManifest>) -> Result<()> {
|
||||
// Collect all resources into a list of styles and scripts
|
||||
let resources = &self.dioxus_crate.dioxus_config.web.resource;
|
||||
let mut style_list = resources.style.clone().unwrap_or_default();
|
||||
let mut script_list = resources.script.clone().unwrap_or_default();
|
||||
|
||||
if self.serve {
|
||||
style_list.extend(resources.dev.style.iter().cloned());
|
||||
script_list.extend(resources.dev.script.iter().cloned());
|
||||
}
|
||||
|
||||
let mut head_resources = String::new();
|
||||
// Add all styles to the head
|
||||
for style in &style_list {
|
||||
writeln!(
|
||||
&mut head_resources,
|
||||
"<link rel=\"stylesheet\" href=\"{}\">",
|
||||
&style.to_str().unwrap(),
|
||||
)?;
|
||||
}
|
||||
|
||||
if !style_list.is_empty() {
|
||||
self.send_resource_deprecation_warning(style_list, ResourceType::Style);
|
||||
}
|
||||
|
||||
// Add all scripts to the head
|
||||
for script in &script_list {
|
||||
writeln!(
|
||||
&mut head_resources,
|
||||
"<script src=\"{}\"></script>",
|
||||
&script.to_str().unwrap(),
|
||||
)?;
|
||||
}
|
||||
|
||||
if !script_list.is_empty() {
|
||||
self.send_resource_deprecation_warning(script_list, ResourceType::Script);
|
||||
}
|
||||
|
||||
// Inject any resources from manganis into the head
|
||||
if let Some(assets) = assets {
|
||||
head_resources.push_str(&assets.head());
|
||||
}
|
||||
|
||||
replace_or_insert_before("{style_include}", "</head", &head_resources, html);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Inject loading scripts if they are not already present
|
||||
fn inject_loading_scripts(&self, html: &mut String) {
|
||||
// If it looks like we are already loading wasm or the current build opted out of injecting loading scripts, don't inject anything
|
||||
if !self.build_arguments.inject_loading_scripts || html.contains("__wbindgen_start") {
|
||||
return;
|
||||
}
|
||||
|
||||
// If not, insert the script
|
||||
*html = html.replace(
|
||||
"</body",
|
||||
r#"<script>
|
||||
// We can't use a module script here because we need to start the script immediately when streaming
|
||||
import("/{base_path}/assets/dioxus/{app_name}.js").then(
|
||||
({ default: init }) => {
|
||||
init("/{base_path}/assets/dioxus/{app_name}_bg.wasm").then((wasm) => {
|
||||
if (wasm.__wbindgen_start == undefined) {
|
||||
wasm.main();
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
</script>
|
||||
{DX_TOAST_UTILITIES}
|
||||
</body"#,
|
||||
);
|
||||
|
||||
*html = match self.serve && !self.build_arguments.release {
|
||||
true => html.replace("{DX_TOAST_UTILITIES}", TOAST_HTML),
|
||||
false => html.replace("{DX_TOAST_UTILITIES}", ""),
|
||||
};
|
||||
|
||||
// And try to insert preload links for the wasm and js files
|
||||
*html = html.replace(
|
||||
"</head",
|
||||
r#"<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}_bg.wasm" as="fetch" type="application/wasm" crossorigin="">
|
||||
<link rel="preload" href="/{base_path}/assets/dioxus/{app_name}.js" as="script">
|
||||
</head"#);
|
||||
}
|
||||
|
||||
/// Replace any special placeholders in the HTML with resolved values
|
||||
fn replace_template_placeholders(&self, html: &mut String) {
|
||||
let base_path = self.dioxus_crate.dioxus_config.web.app.base_path();
|
||||
*html = html.replace("{base_path}", base_path);
|
||||
|
||||
let app_name = &self.dioxus_crate.dioxus_config.application.name;
|
||||
*html = html.replace("{app_name}", app_name);
|
||||
}
|
||||
|
||||
fn send_resource_deprecation_warning(&self, paths: Vec<PathBuf>, variant: ResourceType) {
|
||||
const RESOURCE_DEPRECATION_MESSAGE: &str = r#"The `web.resource` config has been deprecated in favor of head components and will be removed in a future release."#;
|
||||
|
||||
let replacement_components = paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
let path = if path.exists() {
|
||||
path.to_path_buf()
|
||||
} else {
|
||||
// If the path is absolute, make it relative to the current directory before we join it
|
||||
// The path is actually a web path which is relative to the root of the website
|
||||
let path = path.strip_prefix("/").unwrap_or(path);
|
||||
let asset_dir_path = self.dioxus_crate.asset_dir().join(path);
|
||||
if let Ok(absolute_path) = asset_dir_path.canonicalize() {
|
||||
let absolute_crate_root =
|
||||
self.dioxus_crate.crate_dir().canonicalize().unwrap();
|
||||
PathBuf::from("./")
|
||||
.join(absolute_path.strip_prefix(absolute_crate_root).unwrap())
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
};
|
||||
match variant {
|
||||
ResourceType::Style => format!(
|
||||
" head::Link {{ rel: \"stylesheet\", href: asset!(css(\"{}\")) }}",
|
||||
path.display()
|
||||
),
|
||||
ResourceType::Script => {
|
||||
format!(" Script {{ src: asset!(file(\"{}\")) }}", path.display())
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let replacement_components = format!("rsx! {{\n{}\n}}", replacement_components.join("\n"));
|
||||
let section_name = match variant {
|
||||
ResourceType::Style => "web.resource.style",
|
||||
ResourceType::Script => "web.resource.script",
|
||||
};
|
||||
|
||||
let message = format!(
|
||||
"{RESOURCE_DEPRECATION_MESSAGE}\nTo migrate to head components, remove `{section_name}` and include the following rsx in your root component:\n```rust\n{replacement_components}\n```"
|
||||
);
|
||||
|
||||
tracing::warn!(dx_src = ?TraceSrc::Build, "{}", message);
|
||||
}
|
||||
}
|
||||
|
||||
enum ResourceType {
|
||||
Style,
|
||||
Script,
|
||||
}
|
||||
|
||||
/// Read the html file from the crate root or use the default html file
|
||||
fn html_or_default(crate_root: &Path) -> String {
|
||||
let custom_html_file = crate_root.join("index.html");
|
||||
std::fs::read_to_string(custom_html_file).unwrap_or_else(|_| String::from(DEFAULT_HTML))
|
||||
}
|
||||
|
||||
/// Replace a string or insert the new contents before a marker
|
||||
fn replace_or_insert_before(
|
||||
replace: &str,
|
||||
or_insert_before: &str,
|
||||
with: &str,
|
||||
content: &mut String,
|
||||
) {
|
||||
if content.contains(replace) {
|
||||
*content = content.replace(replace, with);
|
||||
} else if let Some(pos) = content.find(or_insert_before) {
|
||||
content.insert_str(pos, with);
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
use crate::dioxus_crate::DioxusCrate;
|
||||
use std::io::Write;
|
||||
use toml_edit::Item;
|
||||
|
||||
pub(crate) static CLIENT_PROFILE: &str = "dioxus-client";
|
||||
pub(crate) static SERVER_PROFILE: &str = "dioxus-server";
|
||||
|
||||
// The `opt-level=2` increases build times, but can noticeably decrease time
|
||||
// between saving changes and being able to interact with an app. The "overall"
|
||||
// time difference (between having and not having the optimization) can be
|
||||
// almost imperceptible (~1 s) but also can be very noticeable (~6 s) — depends
|
||||
// on setup (hardware, OS, browser, idle load).
|
||||
// Find or create the client and server profiles in the .cargo/config.toml file
|
||||
pub(crate) fn initialize_profiles(krate: &DioxusCrate) -> crate::Result<()> {
|
||||
let config_path = krate.workspace_dir().join(".cargo/config.toml");
|
||||
let mut config = match std::fs::read_to_string(&config_path) {
|
||||
Ok(config) => config.parse::<toml_edit::DocumentMut>().map_err(|e| {
|
||||
crate::Error::Other(anyhow::anyhow!("Failed to parse .cargo/config.toml: {}", e))
|
||||
})?,
|
||||
Err(_) => Default::default(),
|
||||
};
|
||||
|
||||
if let Item::Table(table) = config
|
||||
.as_table_mut()
|
||||
.entry("profile")
|
||||
.or_insert(Item::Table(Default::default()))
|
||||
{
|
||||
if let toml_edit::Entry::Vacant(entry) = table.entry(CLIENT_PROFILE) {
|
||||
let mut client = toml_edit::Table::new();
|
||||
client.insert("inherits", Item::Value("dev".into()));
|
||||
client.insert("opt-level", Item::Value(2.into()));
|
||||
entry.insert(Item::Table(client));
|
||||
}
|
||||
|
||||
if let toml_edit::Entry::Vacant(entry) = table.entry(SERVER_PROFILE) {
|
||||
let mut server = toml_edit::Table::new();
|
||||
server.insert("inherits", Item::Value("dev".into()));
|
||||
server.insert("opt-level", Item::Value(2.into()));
|
||||
entry.insert(Item::Table(server));
|
||||
}
|
||||
}
|
||||
|
||||
// Write the config back to the file
|
||||
if let Some(parent) = config_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = std::fs::File::create(config_path)?;
|
||||
let mut buf_writer = std::io::BufWriter::new(file);
|
||||
write!(buf_writer, "{}", config)?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,141 +1,18 @@
|
|||
//! Report progress about the build to the user. We use channels to report progress back to the CLI.
|
||||
use super::{BuildRequest, Platform};
|
||||
use crate::TraceSrc;
|
||||
|
||||
use super::BuildRequest;
|
||||
use anyhow::Context;
|
||||
use cargo_metadata::{diagnostic::Diagnostic, Message};
|
||||
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
|
||||
use cargo_metadata::Message;
|
||||
use futures_channel::mpsc::UnboundedSender;
|
||||
use serde::Deserialize;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::{fmt::Display, path::Path};
|
||||
use tokio::{io::AsyncBufReadExt, process::Command};
|
||||
use tracing::Level;
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) fn status_build_diagnostic(&self, message: &Diagnostic) {
|
||||
// _ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
// stage: Stage::Compiling,
|
||||
// update: UpdateStage::AddMessage(message.clone().into()),
|
||||
// platform: self.platform(),
|
||||
// });
|
||||
}
|
||||
|
||||
pub(crate) fn status_build_message(&self, line: String) {
|
||||
// _ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
// platform: self.platform(),
|
||||
// stage: Stage::Compiling,
|
||||
// update: UpdateStage::AddMessage(BuildMessage {
|
||||
// level: Level::DEBUG,
|
||||
// message: MessageType::Text(line),
|
||||
// source: MessageSource::Build,
|
||||
// }),
|
||||
// });
|
||||
}
|
||||
|
||||
pub(crate) fn status_build_progress(&self, build_progress: f64) {
|
||||
_ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
platform: self.platform(),
|
||||
stage: Stage::Compiling,
|
||||
update: UpdateStage::SetProgress((build_progress).clamp(0.0, 1.00)),
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn status_starting_build(&self) {
|
||||
_ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
stage: Stage::Compiling,
|
||||
update: UpdateStage::Start,
|
||||
platform: self.platform(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Try to get the unit graph for the crate. This is a nightly only feature which may not be available with the current version of rustc the user has installed.
|
||||
pub(crate) async fn get_unit_count(&self) -> Option<usize> {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UnitGraph {
|
||||
units: Vec<serde_json::Value>,
|
||||
}
|
||||
|
||||
let output = tokio::process::Command::new("cargo")
|
||||
.arg("+nightly")
|
||||
.arg("build")
|
||||
.arg("--unit-graph")
|
||||
.arg("-Z")
|
||||
.arg("unstable-options")
|
||||
.args(self.build_arguments())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let output_text = String::from_utf8(output.stdout).ok()?;
|
||||
let graph: UnitGraph = serde_json::from_str(&output_text).ok()?;
|
||||
|
||||
Some(graph.units.len())
|
||||
}
|
||||
|
||||
/// Get an estimate of the number of units in the crate. If nightly rustc is not available, this will return an estimate of the number of units in the crate based on cargo metadata.
|
||||
/// TODO: always use https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#unit-graph once it is stable
|
||||
pub(crate) async fn get_unit_count_estimate(&self) -> usize {
|
||||
// Try to get it from nightly
|
||||
self.get_unit_count().await.unwrap_or_else(|| {
|
||||
// Otherwise, use cargo metadata
|
||||
(self
|
||||
.krate
|
||||
.krates
|
||||
.krates_filtered(krates::DepKind::Dev)
|
||||
.iter()
|
||||
.map(|k| k.targets.len())
|
||||
.sum::<usize>() as f64
|
||||
/ 3.5) as usize
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn status_build_finished(&self) {
|
||||
tracing::info!("🚩 Build completed: [{}]", self.krate.out_dir().display());
|
||||
|
||||
_ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
platform: self.platform(),
|
||||
stage: Stage::Finished,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn status_copying_asset(&self, cur: usize, total: usize, asset: &Path) {
|
||||
// Update the progress
|
||||
// _ = self.progress.unbounded_send(UpdateBuildProgress {
|
||||
// stage: Stage::OptimizingAssets,
|
||||
// update: UpdateStage::AddMessage(BuildMessage {
|
||||
// level: Level::INFO,
|
||||
// message: MessageType::Text(format!(
|
||||
// "Optimized static asset {}",
|
||||
// asset.display()
|
||||
// )),
|
||||
// source: MessageSource::Build,
|
||||
// }),
|
||||
// platform: self.target_platform,
|
||||
// });
|
||||
}
|
||||
|
||||
pub(crate) fn status_finished_asset(&self, idx: usize, total: usize, asset: &Path) {
|
||||
// Update the progress
|
||||
// _ = self.progress.unbounded_send(UpdateBuildProgress {
|
||||
// stage: Stage::OptimizingAssets,
|
||||
// update: UpdateStage::SetProgress(finished as f64 / asset_count as f64),
|
||||
// platform: self.target_platform,
|
||||
// });
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type ProgressTx = UnboundedSender<BuildUpdateProgress>;
|
||||
pub(crate) type ProgressRx = UnboundedReceiver<BuildUpdateProgress>;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
#[derive(Default, Debug, PartialOrd, Ord, PartialEq, Eq, Clone, Copy)]
|
||||
pub(crate) enum Stage {
|
||||
pub enum Stage {
|
||||
#[default]
|
||||
Initializing = 0,
|
||||
InstallingWasmTooling = 1,
|
||||
|
@ -167,14 +44,13 @@ impl std::fmt::Display for Stage {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BuildUpdateProgress {
|
||||
pub(crate) stage: Stage,
|
||||
pub(crate) update: UpdateStage,
|
||||
pub(crate) platform: Platform,
|
||||
pub struct UpdateBuildProgress {
|
||||
pub stage: Stage,
|
||||
pub update: UpdateStage,
|
||||
}
|
||||
|
||||
impl BuildUpdateProgress {
|
||||
pub(crate) fn to_std_out(&self) {
|
||||
impl UpdateBuildProgress {
|
||||
pub fn to_std_out(&self) {
|
||||
match &self.update {
|
||||
UpdateStage::Start => println!("--- {} ---", self.stage),
|
||||
UpdateStage::SetProgress(progress) => {
|
||||
|
@ -188,69 +64,162 @@ impl BuildUpdateProgress {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum UpdateStage {
|
||||
pub enum UpdateStage {
|
||||
Start,
|
||||
SetProgress(f64),
|
||||
Failed(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct BuildMessage {
|
||||
pub(crate) level: Level,
|
||||
pub(crate) message: MessageType,
|
||||
pub(crate) source: MessageSource,
|
||||
}
|
||||
pub(crate) async fn build_cargo(
|
||||
crate_count: usize,
|
||||
mut cmd: tokio::process::Command,
|
||||
progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> anyhow::Result<CargoBuildResult> {
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::Compiling,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum MessageType {
|
||||
Cargo(Diagnostic),
|
||||
Text(String),
|
||||
}
|
||||
let mut child = cmd
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.context("Failed to spawn cargo build")?;
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let stdout = tokio::io::BufReader::new(stdout);
|
||||
let stderr = tokio::io::BufReader::new(stderr);
|
||||
let mut output_location = None;
|
||||
|
||||
/// Represents the source of where a message came from.
|
||||
///
|
||||
/// The CLI will render a prefix according to the message type
|
||||
/// but this prefix, [`MessageSource::to_string()`] shouldn't be used if a strict message source is required.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum MessageSource {
|
||||
/// Represents any message from the running application. Renders `[app]`
|
||||
App,
|
||||
let mut stdout = stdout.lines();
|
||||
let mut stderr = stderr.lines();
|
||||
let mut units_compiled = 0;
|
||||
let mut errors = Vec::new();
|
||||
loop {
|
||||
let line = tokio::select! {
|
||||
line = stdout.next_line() => {
|
||||
line
|
||||
}
|
||||
line = stderr.next_line() => {
|
||||
line
|
||||
}
|
||||
};
|
||||
let Some(line) = line? else {
|
||||
break;
|
||||
};
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line.trim());
|
||||
deserializer.disable_recursion_limit();
|
||||
|
||||
/// Represents any generic message from the CLI. Renders `[dev]`
|
||||
///
|
||||
/// Usage of Tracing inside of the CLI will be routed to this type.
|
||||
Dev,
|
||||
let message = Message::deserialize(&mut deserializer).unwrap_or(Message::TextLine(line));
|
||||
match message {
|
||||
Message::CompilerMessage(msg) => {
|
||||
let message = msg.message;
|
||||
tracing::info!(dx_src = ?TraceSrc::Cargo, dx_no_fmt = true, "{}", message.to_string());
|
||||
|
||||
/// Represents a message from the build process. Renders `[bld]`
|
||||
///
|
||||
/// This is anything emitted from a build process such as cargo and optimizations.
|
||||
Build,
|
||||
}
|
||||
|
||||
impl Display for MessageSource {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::App => write!(f, "app"),
|
||||
Self::Dev => write!(f, "dev"),
|
||||
Self::Build => write!(f, "bld"),
|
||||
const WARNING_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Help,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Note,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Warning,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Error,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
|
||||
];
|
||||
const FATAL_LEVELS: &[cargo_metadata::diagnostic::DiagnosticLevel] = &[
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Error,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::FailureNote,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Ice,
|
||||
];
|
||||
if WARNING_LEVELS.contains(&message.level) {
|
||||
if let Some(rendered) = message.rendered {
|
||||
errors.push(rendered);
|
||||
}
|
||||
}
|
||||
if FATAL_LEVELS.contains(&message.level) {
|
||||
return Err(anyhow::anyhow!(errors.join("\n")));
|
||||
}
|
||||
}
|
||||
Message::CompilerArtifact(artifact) => {
|
||||
units_compiled += 1;
|
||||
if let Some(executable) = artifact.executable {
|
||||
output_location = Some(executable.into());
|
||||
} else {
|
||||
let build_progress = units_compiled as f64 / crate_count as f64;
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::Compiling,
|
||||
update: UpdateStage::SetProgress((build_progress).clamp(0.0, 1.00)),
|
||||
});
|
||||
}
|
||||
}
|
||||
Message::BuildScriptExecuted(_) => {
|
||||
units_compiled += 1;
|
||||
}
|
||||
Message::BuildFinished(finished) => {
|
||||
if !finished.success {
|
||||
return Err(anyhow::anyhow!("Build failed"));
|
||||
}
|
||||
}
|
||||
Message::TextLine(line) => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Cargo, dx_no_fmt = true, "{}", line);
|
||||
}
|
||||
_ => {
|
||||
// Unknown message
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(CargoBuildResult { output_location })
|
||||
}
|
||||
|
||||
impl From<Diagnostic> for BuildMessage {
|
||||
fn from(message: Diagnostic) -> Self {
|
||||
Self {
|
||||
level: match message.level {
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Ice
|
||||
| cargo_metadata::diagnostic::DiagnosticLevel::FailureNote
|
||||
| cargo_metadata::diagnostic::DiagnosticLevel::Error => Level::ERROR,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Warning => Level::WARN,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Note => Level::INFO,
|
||||
cargo_metadata::diagnostic::DiagnosticLevel::Help => Level::DEBUG,
|
||||
_ => Level::DEBUG,
|
||||
},
|
||||
source: MessageSource::Build,
|
||||
message: MessageType::Cargo(message),
|
||||
pub(crate) struct CargoBuildResult {
|
||||
pub(crate) output_location: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl BuildRequest {
|
||||
/// Try to get the unit graph for the crate. This is a nightly only feature which may not be available with the current version of rustc the user has installed.
|
||||
async fn get_unit_count(&self) -> Option<usize> {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UnitGraph {
|
||||
units: Vec<serde_json::Value>,
|
||||
}
|
||||
|
||||
let mut cmd = tokio::process::Command::new("cargo");
|
||||
cmd.arg("+nightly");
|
||||
cmd.arg("build");
|
||||
cmd.arg("--unit-graph");
|
||||
cmd.arg("-Z").arg("unstable-options");
|
||||
|
||||
cmd.args(self.build_arguments());
|
||||
|
||||
let output = cmd
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.ok()?;
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let output_text = String::from_utf8(output.stdout).ok()?;
|
||||
let graph: UnitGraph = serde_json::from_str(&output_text).ok()?;
|
||||
|
||||
Some(graph.units.len())
|
||||
}
|
||||
|
||||
/// Get an estimate of the number of units in the crate. If nightly rustc is not available, this will return an estimate of the number of units in the crate based on cargo metadata.
|
||||
/// TODO: always use https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#unit-graph once it is stable
|
||||
pub(crate) async fn get_unit_count_estimate(&self) -> usize {
|
||||
// Try to get it from nightly
|
||||
self.get_unit_count().await.unwrap_or_else(|| {
|
||||
// Otherwise, use cargo metadata
|
||||
(self
|
||||
.dioxus_crate
|
||||
.krates
|
||||
.krates_filtered(krates::DepKind::Dev)
|
||||
.iter()
|
||||
.map(|k| k.targets.len())
|
||||
.sum::<usize>() as f64
|
||||
/ 3.5) as usize
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
use super::progress::ProgressTx;
|
||||
use super::{platform, profiles::*};
|
||||
use crate::build::BuildArgs;
|
||||
use crate::builder::Platform;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// An app that's built, bundled, processed, and a handle to its running app, if it exists
|
||||
///
|
||||
/// As the build progresses, we'll fill in fields like assets, executable, entitlements, etc
|
||||
///
|
||||
/// If the app needs to be bundled, we'll add the bundle info here too
|
||||
pub(crate) struct BuildRequest {
|
||||
/// The configuration for the crate we are building
|
||||
pub(crate) krate: DioxusCrate,
|
||||
|
||||
/// The arguments for the build
|
||||
pub(crate) build: BuildArgs,
|
||||
|
||||
/// The rustc flags to pass to the build
|
||||
pub(crate) rust_flags: Vec<String>,
|
||||
|
||||
/// The target directory for the build
|
||||
pub(crate) custom_target_dir: Option<PathBuf>,
|
||||
|
||||
/// Status channel to send our progress updates to
|
||||
pub(crate) progress: ProgressTx,
|
||||
}
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) fn new_client(
|
||||
krate: &DioxusCrate,
|
||||
mut build: BuildArgs,
|
||||
progress: ProgressTx,
|
||||
) -> Self {
|
||||
if build.profile.is_none() {
|
||||
build.profile = Some(CLIENT_PROFILE.to_string());
|
||||
}
|
||||
|
||||
let (client_feature, client_platform) = build.auto_detect_client_platform(krate);
|
||||
|
||||
let client_feature = match build.platform {
|
||||
Some(platform::Platform::Ios) => Some("mobile".to_string()),
|
||||
Some(platform::Platform::Android) => Some("android".to_string()),
|
||||
Some(plat) => Some(plat.to_string()),
|
||||
None => client_feature,
|
||||
};
|
||||
|
||||
let features = build.target_args.client_feature.clone().or(client_feature);
|
||||
|
||||
tracing::info!("Client feature: {features:?}");
|
||||
|
||||
let mut build = Self::new_with_target_directory_rust_flags_and_features(
|
||||
krate, &build, features, progress,
|
||||
);
|
||||
|
||||
build.build.platform = build.build.platform.or(Some(client_platform));
|
||||
build
|
||||
}
|
||||
|
||||
pub(crate) fn new_server(
|
||||
krate: &DioxusCrate,
|
||||
mut build: BuildArgs,
|
||||
progress: ProgressTx,
|
||||
) -> Self {
|
||||
if build.profile.is_none() {
|
||||
build.profile = Some(SERVER_PROFILE.to_string());
|
||||
}
|
||||
|
||||
let client_feature = build.auto_detect_server_feature(krate);
|
||||
let features = build.target_args.server_feature.clone().or(client_feature);
|
||||
tracing::info!("Server feature: {features:?}");
|
||||
let mut build = Self::new_with_target_directory_rust_flags_and_features(
|
||||
krate, &build, features, progress,
|
||||
);
|
||||
|
||||
build.build.platform = Some(platform::Platform::Server);
|
||||
build
|
||||
}
|
||||
|
||||
fn new_with_target_directory_rust_flags_and_features(
|
||||
krate: &DioxusCrate,
|
||||
build: &BuildArgs,
|
||||
feature: Option<String>,
|
||||
progress: ProgressTx,
|
||||
) -> Self {
|
||||
let config = krate.clone();
|
||||
let mut build = build.clone();
|
||||
|
||||
// Add the server feature to the features we pass to the build
|
||||
if let Some(feature) = feature {
|
||||
build.target_args.features.push(feature);
|
||||
}
|
||||
|
||||
// Add the server flags to the build arguments
|
||||
Self {
|
||||
build: build.clone(),
|
||||
krate: config,
|
||||
rust_flags: Default::default(),
|
||||
custom_target_dir: None,
|
||||
progress,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the platform for this build
|
||||
pub(crate) fn platform(&self) -> Platform {
|
||||
self.build
|
||||
.platform
|
||||
.unwrap_or_else(|| self.krate.dioxus_config.application.default_platform)
|
||||
}
|
||||
|
||||
/// The final output name of the app, primarly to be used when bundled
|
||||
///
|
||||
/// Needs to be very disambiguated
|
||||
/// Eg: my-app-web-macos-x86_64.app
|
||||
/// {app_name}-{platform}-{arch}
|
||||
///
|
||||
/// Does not include the extension
|
||||
pub(crate) fn app_name(&self) -> String {
|
||||
match self.platform() {
|
||||
Platform::Web => "web".to_string(),
|
||||
Platform::Desktop => todo!(),
|
||||
Platform::Ios => todo!(),
|
||||
Platform::Server => "server".to_string(),
|
||||
Platform::Android => todo!(),
|
||||
Platform::Liveview => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,37 +1,87 @@
|
|||
use super::{BuildRequest, Platform};
|
||||
use super::BuildRequest;
|
||||
use super::BuildResult;
|
||||
use crate::assets::pre_compress_folder;
|
||||
use crate::builder::progress::Stage;
|
||||
use crate::builder::progress::UpdateBuildProgress;
|
||||
use crate::builder::progress::UpdateStage;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::{
|
||||
builder::progress::{
|
||||
BuildMessage, BuildUpdateProgress, MessageSource, MessageType, Stage, UpdateStage,
|
||||
},
|
||||
TraceSrc,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use std::fmt::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use crate::TraceSrc;
|
||||
use futures_channel::mpsc::UnboundedSender;
|
||||
use manganis_cli_support::AssetManifest;
|
||||
use std::path::Path;
|
||||
use tokio::process::Command;
|
||||
use tracing::Level;
|
||||
use wasm_bindgen_cli_support::Bindgen;
|
||||
|
||||
const DEFAULT_HTML: &str = include_str!("../../assets/index.html");
|
||||
const TOAST_HTML: &str = include_str!("../../assets/toast.html");
|
||||
// Attempt to automatically recover from a bindgen failure by updating the wasm-bindgen version
|
||||
async fn update_wasm_bindgen_version() -> Result<()> {
|
||||
let cli_bindgen_version = wasm_bindgen_shared::version();
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "Attempting to recover from bindgen failure by setting the wasm-bindgen version to {cli_bindgen_version}...");
|
||||
|
||||
let output = Command::new("cargo")
|
||||
.args([
|
||||
"update",
|
||||
"-p",
|
||||
"wasm-bindgen",
|
||||
"--precise",
|
||||
&cli_bindgen_version,
|
||||
])
|
||||
.output()
|
||||
.await;
|
||||
let mut error_message = None;
|
||||
if let Ok(output) = output {
|
||||
if output.status.success() {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Successfully updated wasm-bindgen to {cli_bindgen_version}");
|
||||
return Ok(());
|
||||
} else {
|
||||
error_message = Some(output);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(output) = error_message {
|
||||
tracing::error!(dx_src = ?TraceSrc::Dev, "Failed to update wasm-bindgen: {:#?}", output);
|
||||
}
|
||||
|
||||
Err(Error::BuildFailed(format!("WASM bindgen build failed!\nThis is probably due to the Bindgen version, dioxus-cli is using `{cli_bindgen_version}` which is not compatible with your crate.\nPlease reinstall the dioxus cli to fix this issue.\nYou can reinstall the dioxus cli by running `cargo install dioxus-cli --force` and then rebuild your project")))
|
||||
}
|
||||
|
||||
/// Check if the wasm32-unknown-unknown target is installed and try to install it if not
|
||||
pub(crate) async fn install_web_build_tooling(
|
||||
progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> Result<()> {
|
||||
// If the user has rustup, we can check if the wasm32-unknown-unknown target is installed
|
||||
// Otherwise we can just assume it is installed - which is not great...
|
||||
// Eventually we can poke at the errors and let the user know they need to install the target
|
||||
if let Ok(wasm_check_command) = Command::new("rustup").args(["show"]).output().await {
|
||||
let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
|
||||
if !wasm_check_output.contains("wasm32-unknown-unknown") {
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::InstallingWasmTooling,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "`wasm32-unknown-unknown` target not detected, installing..");
|
||||
let _ = Command::new("rustup")
|
||||
.args(["target", "add", "wasm32-unknown-unknown"])
|
||||
.output()
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl BuildRequest {
|
||||
pub(crate) async fn run_wasm_bindgen(
|
||||
&self,
|
||||
input_path: &Path,
|
||||
bindgen_outdir: &Path,
|
||||
) -> Result<()> {
|
||||
tracing::info!("Running wasm-bindgen");
|
||||
|
||||
async fn run_wasm_bindgen(&self, input_path: &Path, bindgen_outdir: &Path) -> Result<()> {
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "Running wasm-bindgen");
|
||||
let input_path = input_path.to_path_buf();
|
||||
let bindgen_outdir = bindgen_outdir.to_path_buf();
|
||||
let name = self.krate.dioxus_config.application.name.clone();
|
||||
let keep_debug = self.krate.dioxus_config.web.wasm_opt.debug || (!self.build.release);
|
||||
let keep_debug =
|
||||
self.dioxus_crate.dioxus_config.web.wasm_opt.debug || (!self.build_arguments.release);
|
||||
let name = self.dioxus_crate.dioxus_config.application.name.clone();
|
||||
let run_wasm_bindgen = move || {
|
||||
// [3] Bindgen the final binary for use easy linking
|
||||
let mut bindgen_builder = Bindgen::new();
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
Bindgen::new()
|
||||
bindgen_builder
|
||||
.input_path(&input_path)
|
||||
.web(true)
|
||||
.unwrap()
|
||||
|
@ -43,263 +93,101 @@ impl BuildRequest {
|
|||
.remove_producers_section(!keep_debug)
|
||||
.out_name(&name)
|
||||
.generate(&bindgen_outdir)
|
||||
})
|
||||
.await
|
||||
.context("Wasm-bindgen crashed while optimizing the wasm binary")?
|
||||
.context("Failed to generate wasm-bindgen bindings")?;
|
||||
.unwrap();
|
||||
};
|
||||
let bindgen_result = tokio::task::spawn_blocking(run_wasm_bindgen.clone()).await;
|
||||
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "wasm-bindgen complete in {:?}", start.elapsed());
|
||||
// WASM bindgen requires the exact version of the bindgen schema to match the version the CLI was built with
|
||||
// If we get an error, we can try to recover by pinning the user's wasm-bindgen version to the version we used
|
||||
if let Err(err) = bindgen_result {
|
||||
tracing::error!(dx_src = ?TraceSrc::Build, "Bindgen build failed: {:?}", err);
|
||||
update_wasm_bindgen_version().await?;
|
||||
run_wasm_bindgen();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn run_wasm_opt(&self, bindgen_outdir: &std::path::PathBuf) -> Result<(), Error> {
|
||||
if !self.build.release {
|
||||
return Ok(());
|
||||
};
|
||||
/// Post process the WASM build artifacts
|
||||
pub(crate) async fn post_process_web_build(
|
||||
&self,
|
||||
build_result: &BuildResult,
|
||||
assets: Option<&AssetManifest>,
|
||||
progress: &mut UnboundedSender<UpdateBuildProgress>,
|
||||
) -> Result<()> {
|
||||
_ = progress.start_send(UpdateBuildProgress {
|
||||
stage: Stage::OptimizingWasm,
|
||||
update: UpdateStage::Start,
|
||||
});
|
||||
|
||||
// Find the wasm file
|
||||
let output_location = build_result.executable.clone();
|
||||
let input_path = output_location.with_extension("wasm");
|
||||
|
||||
// Create the directory where the bindgen output will be placed
|
||||
let bindgen_outdir = self.target_out_dir().join("assets").join("dioxus");
|
||||
|
||||
// Run wasm-bindgen
|
||||
self.run_wasm_bindgen(&input_path, &bindgen_outdir).await?;
|
||||
|
||||
// Only run wasm-opt if the feature is enabled
|
||||
// Wasm-opt has an expensive build script that makes it annoying to keep enabled for iterative dev
|
||||
#[cfg(feature = "wasm-opt")]
|
||||
{
|
||||
use crate::config::WasmOptLevel;
|
||||
// Run wasm-opt if this is a release build
|
||||
if self.build_arguments.release {
|
||||
use dioxus_cli_config::WasmOptLevel;
|
||||
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "Running optimization with wasm-opt...");
|
||||
|
||||
let mut options = match self.dioxus_crate.dioxus_config.web.wasm_opt.level {
|
||||
WasmOptLevel::Z => {
|
||||
wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively()
|
||||
}
|
||||
WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
|
||||
WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
|
||||
WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
|
||||
WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
|
||||
WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
|
||||
WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
|
||||
};
|
||||
let wasm_file = bindgen_outdir.join(format!(
|
||||
"{}_bg.wasm",
|
||||
self.dioxus_crate.dioxus_config.application.name
|
||||
));
|
||||
let old_size = wasm_file.metadata()?.len();
|
||||
options
|
||||
// WASM bindgen relies on reference types
|
||||
.enable_feature(wasm_opt::Feature::ReferenceTypes)
|
||||
.debug_info(self.dioxus_crate.dioxus_config.web.wasm_opt.debug)
|
||||
.run(&wasm_file, &wasm_file)
|
||||
.map_err(|err| Error::Other(anyhow::anyhow!(err)))?;
|
||||
|
||||
let new_size = wasm_file.metadata()?.len();
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Build,
|
||||
"wasm-opt reduced WASM size from {} to {} ({:2}%)",
|
||||
old_size,
|
||||
new_size,
|
||||
(new_size as f64 - old_size as f64) / old_size as f64 * 100.0
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn prepare_html(&self) -> Result<String> {
|
||||
let mut html = {
|
||||
let crate_root: &Path = &self.krate.crate_dir();
|
||||
let custom_html_file = crate_root.join("index.html");
|
||||
std::fs::read_to_string(custom_html_file).unwrap_or_else(|_| String::from(DEFAULT_HTML))
|
||||
};
|
||||
|
||||
// Inject any resources from the config into the html
|
||||
self.inject_resources(&mut html)?;
|
||||
|
||||
// Inject loading scripts if they are not already present
|
||||
self.inject_loading_scripts(&mut html);
|
||||
|
||||
// Replace any special placeholders in the HTML with resolved values
|
||||
self.replace_template_placeholders(&mut html);
|
||||
|
||||
let title = self.krate.dioxus_config.web.app.title.clone();
|
||||
|
||||
replace_or_insert_before("{app_title}", "</title", &title, &mut html);
|
||||
|
||||
Ok(html)
|
||||
}
|
||||
|
||||
fn is_dev_build(&self) -> bool {
|
||||
!self.build.release
|
||||
}
|
||||
|
||||
// Inject any resources from the config into the html
|
||||
fn inject_resources(&self, html: &mut String) -> Result<()> {
|
||||
// Collect all resources into a list of styles and scripts
|
||||
let resources = &self.krate.dioxus_config.web.resource;
|
||||
let mut style_list = resources.style.clone().unwrap_or_default();
|
||||
let mut script_list = resources.script.clone().unwrap_or_default();
|
||||
|
||||
if self.is_dev_build() {
|
||||
style_list.extend(resources.dev.style.iter().cloned());
|
||||
script_list.extend(resources.dev.script.iter().cloned());
|
||||
}
|
||||
|
||||
let mut head_resources = String::new();
|
||||
|
||||
// Add all styles to the head
|
||||
for style in &style_list {
|
||||
writeln!(
|
||||
&mut head_resources,
|
||||
"<link rel=\"stylesheet\" href=\"{}\">",
|
||||
&style.to_str().unwrap(),
|
||||
)?;
|
||||
}
|
||||
|
||||
// Add all scripts to the head
|
||||
for script in &script_list {
|
||||
writeln!(
|
||||
&mut head_resources,
|
||||
"<script src=\"{}\"></script>",
|
||||
&script.to_str().unwrap(),
|
||||
)?;
|
||||
}
|
||||
|
||||
if !style_list.is_empty() {
|
||||
self.send_resource_deprecation_warning(style_list, ResourceType::Style);
|
||||
}
|
||||
if !script_list.is_empty() {
|
||||
self.send_resource_deprecation_warning(script_list, ResourceType::Script);
|
||||
}
|
||||
|
||||
// Inject any resources from manganis into the head
|
||||
// if let Some(assets) = assets {
|
||||
// head_resources.push_str(&assets.head());
|
||||
// }
|
||||
|
||||
replace_or_insert_before("{style_include}", "</head", &head_resources, html);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Inject loading scripts if they are not already present
|
||||
fn inject_loading_scripts(&self, html: &mut String) {
|
||||
// If it looks like we are already loading wasm or the current build opted out of injecting loading scripts, don't inject anything
|
||||
if !self.build.inject_loading_scripts || html.contains("__wbindgen_start") {
|
||||
return;
|
||||
}
|
||||
|
||||
// If not, insert the script
|
||||
*html = html.replace(
|
||||
"</body",
|
||||
r#"<script>
|
||||
// We can't use a module script here because we need to start the script immediately when streaming
|
||||
import("/{base_path}/wasm/{app_name}.js").then(
|
||||
({ default: init }) => {
|
||||
init("/{base_path}/wasm/{app_name}_bg.wasm").then((wasm) => {
|
||||
if (wasm.__wbindgen_start == undefined) {
|
||||
wasm.main();
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
</script>
|
||||
{DX_TOAST_UTILITIES}
|
||||
</body"#,
|
||||
);
|
||||
|
||||
// Trim out the toasts if we're in release, or add them if we're serving
|
||||
*html = match self.is_dev_build() {
|
||||
true => html.replace("{DX_TOAST_UTILITIES}", TOAST_HTML),
|
||||
false => html.replace("{DX_TOAST_UTILITIES}", ""),
|
||||
};
|
||||
|
||||
// And try to insert preload links for the wasm and js files
|
||||
*html = html.replace(
|
||||
"</head",
|
||||
r#"<link rel="preload" href="/{base_path}/wasm/{app_name}_bg.wasm" as="fetch" type="application/wasm" crossorigin="">
|
||||
<link rel="preload" href="/{base_path}/wasm/{app_name}.js" as="script">
|
||||
</head"#
|
||||
);
|
||||
}
|
||||
|
||||
/// Replace any special placeholders in the HTML with resolved values
|
||||
fn replace_template_placeholders(&self, html: &mut String) {
|
||||
let base_path = self.krate.dioxus_config.web.app.base_path();
|
||||
*html = html.replace("{base_path}", base_path);
|
||||
|
||||
let app_name = &self.krate.dioxus_config.application.name;
|
||||
*html = html.replace("{app_name}", app_name);
|
||||
}
|
||||
|
||||
fn send_resource_deprecation_warning(&self, paths: Vec<PathBuf>, variant: ResourceType) {
|
||||
const RESOURCE_DEPRECATION_MESSAGE: &str = r#"The `web.resource` config has been deprecated in favor of head components and will be removed in a future release. Instead of including assets in the config, you can include assets with the `asset!` macro and add them to the head with `document::Link` and `Script` components."#;
|
||||
|
||||
let replacement_components = paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
let path = if path.exists() {
|
||||
path.to_path_buf()
|
||||
} else {
|
||||
// If the path is absolute, make it relative to the current directory before we join it
|
||||
// The path is actually a web path which is relative to the root of the website
|
||||
let path = path.strip_prefix("/").unwrap_or(path);
|
||||
let asset_dir_path = self.krate.legacy_asset_dir().join(path);
|
||||
if let Ok(absolute_path) = asset_dir_path.canonicalize() {
|
||||
let absolute_crate_root = self.krate.crate_dir().canonicalize().unwrap();
|
||||
PathBuf::from("./")
|
||||
.join(absolute_path.strip_prefix(absolute_crate_root).unwrap())
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
tracing::info!(dx_src = ?TraceSrc::Build, "Running optimization with wasm-opt...");
|
||||
let mut options = match self.dioxus_crate.dioxus_config.web.wasm_opt.level {
|
||||
WasmOptLevel::Z => {
|
||||
wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively()
|
||||
}
|
||||
WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
|
||||
WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
|
||||
WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
|
||||
WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
|
||||
WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
|
||||
WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
|
||||
};
|
||||
match variant {
|
||||
ResourceType::Style => {
|
||||
format!(" Stylesheet {{ href: asset!(\"{}\") }}", path.display())
|
||||
}
|
||||
ResourceType::Script => {
|
||||
format!(" Script {{ src: asset!(\"{}\") }}", path.display())
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let replacement_components = format!("rsx! {{\n{}\n}}", replacement_components.join("\n"));
|
||||
let section_name = match variant {
|
||||
ResourceType::Style => "web.resource.style",
|
||||
ResourceType::Script => "web.resource.script",
|
||||
};
|
||||
let wasm_file = bindgen_outdir.join(format!(
|
||||
"{}_bg.wasm",
|
||||
self.dioxus_crate.dioxus_config.application.name
|
||||
));
|
||||
let old_size = wasm_file.metadata()?.len();
|
||||
options
|
||||
// WASM bindgen relies on reference types
|
||||
.enable_feature(wasm_opt::Feature::ReferenceTypes)
|
||||
.debug_info(self.dioxus_crate.dioxus_config.web.wasm_opt.debug)
|
||||
.run(&wasm_file, &wasm_file)
|
||||
.map_err(|err| Error::Other(anyhow::anyhow!(err)))?;
|
||||
let new_size = wasm_file.metadata()?.len();
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Build,
|
||||
"wasm-opt reduced WASM size from {} to {} ({:2}%)",
|
||||
old_size,
|
||||
new_size,
|
||||
(new_size as f64 - old_size as f64) / old_size as f64 * 100.0
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let message = format!(
|
||||
"{RESOURCE_DEPRECATION_MESSAGE}\nTo migrate to head components, remove `{section_name}` and include the following rsx in your root component:\n```rust\n{replacement_components}\n```"
|
||||
);
|
||||
// If pre-compressing is enabled, we can pre_compress the wasm-bindgen output
|
||||
let pre_compress = self
|
||||
.dioxus_crate
|
||||
.should_pre_compress_web_assets(self.build_arguments.release);
|
||||
tokio::task::spawn_blocking(move || pre_compress_folder(&bindgen_outdir, pre_compress))
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
// _ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
// platform: self.platform(),
|
||||
// stage: Stage::OptimizingWasm,
|
||||
// update: UpdateStage::AddMessage(BuildMessage {
|
||||
// level: Level::WARN,
|
||||
// message: MessageType::Text(message),
|
||||
// source: MessageSource::Build,
|
||||
// }),
|
||||
// });
|
||||
}
|
||||
// Create the index.html file
|
||||
// Note that we do this last since the webserver will attempt to serve the index.html file
|
||||
// If we do this too early, the wasm won't be ready but the index.html will be served, leading
|
||||
// to test failures and broken pages.
|
||||
let html = self.prepare_html(assets, progress)?;
|
||||
let html_path = self.target_out_dir().join("index.html");
|
||||
std::fs::write(html_path, html)?;
|
||||
|
||||
/// Check if the build is targeting the web platform
|
||||
pub(crate) fn targeting_web(&self) -> bool {
|
||||
self.platform() == Platform::Web
|
||||
}
|
||||
}
|
||||
|
||||
enum ResourceType {
|
||||
Style,
|
||||
Script,
|
||||
}
|
||||
|
||||
/// Replace a string or insert the new contents before a marker
|
||||
fn replace_or_insert_before(
|
||||
replace: &str,
|
||||
or_insert_before: &str,
|
||||
with: &str,
|
||||
content: &mut String,
|
||||
) {
|
||||
if content.contains(replace) {
|
||||
*content = content.replace(replace, with);
|
||||
} else if let Some(pos) = content.find(or_insert_before) {
|
||||
content.insert_str(pos, with);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,165 +0,0 @@
|
|||
// use dioxus_cli_config::BundleConfig;
|
||||
|
||||
use crate::config::BundleConfig;
|
||||
|
||||
pub(crate) fn make_tauri_bundler_settings(bundle_config: BundleConfig) -> tauri_bundler::BundleSettings {
|
||||
todo!()
|
||||
}
|
||||
|
||||
// impl From<NsisSettings> for tauri_bundler::NsisSettings {
|
||||
// fn from(val: NsisSettings) -> Self {
|
||||
// tauri_bundler::NsisSettings {
|
||||
// header_image: val.header_image,
|
||||
// sidebar_image: val.sidebar_image,
|
||||
// installer_icon: val.installer_icon,
|
||||
// install_mode: val.install_mode.into(),
|
||||
// languages: val.languages,
|
||||
// display_language_selector: val.display_language_selector,
|
||||
// custom_language_files: None,
|
||||
// template: None,
|
||||
// compression: tauri_utils::config::NsisCompression::None,
|
||||
// start_menu_folder: todo!(),
|
||||
// installer_hooks: todo!(),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<BundleConfig> for tauri_bundler::BundleSettings {
|
||||
// fn from(val: BundleConfig) -> Self {
|
||||
// tauri_bundler::BundleSettings {
|
||||
// identifier: val.identifier,
|
||||
// publisher: val.publisher,
|
||||
// icon: val.icon,
|
||||
// resources: val.resources,
|
||||
// copyright: val.copyright,
|
||||
// category: val.category.and_then(|c| c.parse().ok()),
|
||||
// short_description: val.short_description,
|
||||
// long_description: val.long_description,
|
||||
// external_bin: val.external_bin,
|
||||
// deb: val.deb.map(Into::into).unwrap_or_default(),
|
||||
// macos: val.macos.map(Into::into).unwrap_or_default(),
|
||||
// windows: val.windows.map(Into::into).unwrap_or_default(),
|
||||
// ..Default::default()
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<DebianSettings> for tauri_bundler::DebianSettings {
|
||||
// fn from(val: DebianSettings) -> Self {
|
||||
// tauri_bundler::DebianSettings {
|
||||
// depends: val.depends,
|
||||
// files: val.files,
|
||||
// desktop_template: None,
|
||||
// provides: todo!(),
|
||||
// conflicts: todo!(),
|
||||
// replaces: todo!(),
|
||||
// section: todo!(),
|
||||
// priority: todo!(),
|
||||
// changelog: todo!(),
|
||||
// pre_install_script: todo!(),
|
||||
// post_install_script: todo!(),
|
||||
// pre_remove_script: todo!(),
|
||||
// post_remove_script: todo!(),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<WixSettings> for tauri_bundler::WixSettings {
|
||||
// fn from(val: WixSettings) -> Self {
|
||||
// tauri_bundler::WixSettings {
|
||||
// language: tauri_bundler::bundle::WixLanguage({
|
||||
// let mut languages: Vec<_> = val
|
||||
// .language
|
||||
// .iter()
|
||||
// .map(|l| {
|
||||
// (
|
||||
// l.0.clone(),
|
||||
// tauri_bundler::bundle::WixLanguageConfig {
|
||||
// locale_path: l.1.clone(),
|
||||
// },
|
||||
// )
|
||||
// })
|
||||
// .collect();
|
||||
// if languages.is_empty() {
|
||||
// languages.push(("en-US".into(), Default::default()));
|
||||
// }
|
||||
// languages
|
||||
// }),
|
||||
// template: val.template,
|
||||
// fragment_paths: val.fragment_paths,
|
||||
// component_group_refs: val.component_group_refs,
|
||||
// component_refs: val.component_refs,
|
||||
// feature_group_refs: val.feature_group_refs,
|
||||
// feature_refs: val.feature_refs,
|
||||
// merge_refs: val.merge_refs,
|
||||
// enable_elevated_update_task: val.enable_elevated_update_task,
|
||||
// banner_path: val.banner_path,
|
||||
// dialog_image_path: val.dialog_image_path,
|
||||
// fips_compliant: val.fips_compliant,
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<MacOsSettings> for tauri_bundler::MacOsSettings {
|
||||
// fn from(val: MacOsSettings) -> Self {
|
||||
// tauri_bundler::MacOsSettings {
|
||||
// frameworks: val.frameworks,
|
||||
// minimum_system_version: val.minimum_system_version,
|
||||
// exception_domain: val.exception_domain,
|
||||
// signing_identity: val.signing_identity,
|
||||
// provider_short_name: val.provider_short_name,
|
||||
// entitlements: val.entitlements,
|
||||
// info_plist_path: val.info_plist_path,
|
||||
// files: todo!(),
|
||||
// hardened_runtime: todo!(),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<WindowsSettings> for tauri_bundler::WindowsSettings {
|
||||
// fn from(val: WindowsSettings) -> Self {
|
||||
// tauri_bundler::WindowsSettings {
|
||||
// digest_algorithm: val.digest_algorithm,
|
||||
// certificate_thumbprint: val.certificate_thumbprint,
|
||||
// timestamp_url: val.timestamp_url,
|
||||
// tsp: val.tsp,
|
||||
// wix: val.wix.map(Into::into),
|
||||
// icon_path: val.icon_path.unwrap_or("icons/icon.ico".into()),
|
||||
// webview_install_mode: val.webview_install_mode.into(),
|
||||
// webview_fixed_runtime_path: val.webview_fixed_runtime_path,
|
||||
// allow_downgrades: val.allow_downgrades,
|
||||
// nsis: val.nsis.map(Into::into),
|
||||
// sign_command: todo!(),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<NSISInstallerMode> for tauri_utils::config::NSISInstallerMode {
|
||||
// fn from(val: NSISInstallerMode) -> Self {
|
||||
// match val {
|
||||
// NSISInstallerMode::CurrentUser => tauri_utils::config::NSISInstallerMode::CurrentUser,
|
||||
// NSISInstallerMode::PerMachine => tauri_utils::config::NSISInstallerMode::PerMachine,
|
||||
// NSISInstallerMode::Both => tauri_utils::config::NSISInstallerMode::Both,
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl WebviewInstallMode {
|
||||
// fn into(self) -> tauri_utils::config::WebviewInstallMode {
|
||||
// match self {
|
||||
// Self::Skip => tauri_utils::config::WebviewInstallMode::Skip,
|
||||
// Self::DownloadBootstrapper { silent } => {
|
||||
// tauri_utils::config::WebviewInstallMode::DownloadBootstrapper { silent }
|
||||
// }
|
||||
// Self::EmbedBootstrapper { silent } => {
|
||||
// tauri_utils::config::WebviewInstallMode::EmbedBootstrapper { silent }
|
||||
// }
|
||||
// Self::OfflineInstaller { silent } => {
|
||||
// tauri_utils::config::WebviewInstallMode::OfflineInstaller { silent }
|
||||
// }
|
||||
// Self::FixedRuntime { path } => {
|
||||
// tauri_utils::config::WebviewInstallMode::FixedRuntime { path }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
|
@ -1,246 +0,0 @@
|
|||
use crate::assets::AssetManifest;
|
||||
use crate::builder::{BuildRequest, Platform};
|
||||
use crate::Result;
|
||||
use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
|
||||
pub(crate) struct AppBundle {
|
||||
pub(crate) build: BuildRequest,
|
||||
pub(crate) workdir: PathBuf,
|
||||
pub(crate) executable: PathBuf,
|
||||
pub(crate) assets: AssetManifest,
|
||||
}
|
||||
|
||||
impl AppBundle {
|
||||
pub(crate) async fn new(
|
||||
build: BuildRequest,
|
||||
assets: AssetManifest,
|
||||
executable: PathBuf,
|
||||
) -> Result<Self> {
|
||||
let bundle = Self {
|
||||
workdir: build.krate.workdir(build.build.platform()),
|
||||
build,
|
||||
executable,
|
||||
assets,
|
||||
};
|
||||
|
||||
bundle.prepare_workdir()?;
|
||||
bundle.write_main_executable().await?;
|
||||
bundle.write_assets().await?;
|
||||
bundle.write_metadata().await?;
|
||||
bundle.optimize().await?;
|
||||
|
||||
Ok(bundle)
|
||||
}
|
||||
|
||||
// Create the workdir and then clean its contents, in case it already exists
|
||||
fn prepare_workdir(&self) -> Result<()> {
|
||||
// _ = std::fs::remove_dir_all(&self.workdir);
|
||||
_ = std::fs::create_dir_all(&self.workdir);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Take the output of rustc and make it into the main exe of the bundle
|
||||
///
|
||||
/// For wasm, we'll want to run `wasm-bindgen` to make it a wasm binary along with some other optimizations
|
||||
/// Other platforms we might do some stripping or other optimizations
|
||||
async fn write_main_executable(&self) -> Result<()> {
|
||||
match self.build.platform() {
|
||||
// Run wasm-bindgen on the wasm binary and set its output to be in the bundle folder
|
||||
// Also run wasm-opt on the wasm binary, and sets the index.html since that's also the "executable".
|
||||
//
|
||||
// The wasm stuff will be in a folder called "wasm" in the workdir.
|
||||
//
|
||||
// Final output format:
|
||||
// ```
|
||||
// dist/
|
||||
// web
|
||||
// index.html
|
||||
// wasm/
|
||||
// app.wasm
|
||||
// glue.js
|
||||
// snippets/
|
||||
// ...
|
||||
// assets/
|
||||
// logo.png
|
||||
// ```
|
||||
Platform::Web => {
|
||||
// Run wasm-bindgen and drop its output into the assets folder under "dioxus"
|
||||
self.build
|
||||
.run_wasm_bindgen(&self.executable.with_extension("wasm"), &self.bindgen_dir())
|
||||
.await?;
|
||||
|
||||
// Only run wasm-opt if the feature is enabled
|
||||
// Wasm-opt has an expensive build script that makes it annoying to keep enabled for iterative dev
|
||||
// We put it behind the "wasm-opt" feature flag so that it can be disabled when iterating on the cli
|
||||
self.build.run_wasm_opt(&self.bindgen_dir())?;
|
||||
|
||||
// Write the index.html file
|
||||
std::fs::write(self.workdir.join("index.html"), self.build.prepare_html()?)?;
|
||||
}
|
||||
|
||||
// Move the executable to the workdir
|
||||
Platform::Desktop => {
|
||||
std::fs::copy(self.executable.clone(), self.workdir.join("app"))?;
|
||||
}
|
||||
|
||||
Platform::Ios => {}
|
||||
Platform::Server => {}
|
||||
Platform::Liveview => {}
|
||||
Platform::Android => todo!("android not yet supported!"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy the assets out of the manifest and into the target location
|
||||
///
|
||||
/// Should be the same on all platforms - just copy over the assets from the manifest into the output directory
|
||||
async fn write_assets(&self) -> Result<()> {
|
||||
// Server doesn't need assets - web will provide them
|
||||
if self.build.platform() == Platform::Server {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let asset_dir = self.asset_dir();
|
||||
let assets = self.all_source_assets();
|
||||
|
||||
let asset_count = assets.len();
|
||||
let assets_finished = AtomicUsize::new(0);
|
||||
let optimize = false;
|
||||
let pre_compress = false;
|
||||
|
||||
// Parallel Copy over the assets and keep track of progress with an atomic counter
|
||||
assets.par_iter().try_for_each(|asset| {
|
||||
self.build.status_copying_asset(
|
||||
assets_finished.fetch_add(0, std::sync::atomic::Ordering::SeqCst),
|
||||
asset_count,
|
||||
asset,
|
||||
);
|
||||
|
||||
let res = self
|
||||
.assets
|
||||
.copy_asset_to(&asset_dir, asset, optimize, pre_compress);
|
||||
|
||||
if let Err(err) = res {
|
||||
tracing::error!("Failed to copy asset {asset:?}: {err}");
|
||||
}
|
||||
|
||||
self.build.status_finished_asset(
|
||||
assets_finished.fetch_add(1, std::sync::atomic::Ordering::SeqCst),
|
||||
asset_count,
|
||||
asset,
|
||||
);
|
||||
|
||||
Ok(()) as anyhow::Result<()>
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Take the workdir and copy it to the output location, returning the path to final bundle
|
||||
///
|
||||
/// Perform any finishing steps here:
|
||||
/// - Signing the bundle
|
||||
pub(crate) async fn finish(&self, destination: PathBuf) -> Result<PathBuf> {
|
||||
// std::fs::create_dir_all(&destination.join(self.build.app_name()))?;
|
||||
|
||||
match self.build.platform() {
|
||||
// Nothing special to do - just copy the workdir to the output location
|
||||
Platform::Web => {
|
||||
std::fs::create_dir_all(&destination.join("web"))?;
|
||||
crate::fastfs::copy_asset(&self.workdir, &destination.join("web"))?;
|
||||
Ok(destination.join("web"))
|
||||
}
|
||||
|
||||
// Create a final .app/.exe/etc depending on the host platform, not dependent on the host
|
||||
Platform::Desktop => {
|
||||
// for now, until we have bundled hotreload, just copy the executable to the output location
|
||||
// let output_location = destination.join(self.build.app_name());
|
||||
Ok(self.executable.clone())
|
||||
// Ok(output_location)
|
||||
}
|
||||
|
||||
Platform::Server => {
|
||||
std::fs::copy(
|
||||
self.executable.clone(),
|
||||
destination.join(self.build.app_name()),
|
||||
)?;
|
||||
|
||||
Ok(destination.join(self.build.app_name()))
|
||||
}
|
||||
Platform::Liveview => Ok(self.executable.clone()),
|
||||
|
||||
// Create a .ipa, only from macOS
|
||||
Platform::Ios => todo!(),
|
||||
|
||||
// Create a .exe, from linux/mac/windows
|
||||
Platform::Android => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn bindgen_dir(&self) -> PathBuf {
|
||||
self.workdir.join("wasm")
|
||||
}
|
||||
|
||||
pub(crate) fn all_source_assets(&self) -> Vec<PathBuf> {
|
||||
// Merge the legacy asset dir assets with the assets from the manifest
|
||||
// Legacy assets need to retain their name in case they're referenced in the manifest
|
||||
// todo: we should only copy over assets that appear in `img { src: "assets/logo.png" }` to
|
||||
// properly deprecate the legacy asset dir
|
||||
self.assets
|
||||
.assets
|
||||
.keys()
|
||||
.cloned()
|
||||
.chain(self.build.krate.legacy_asset_dir_files())
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
async fn write_metadata(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn asset_dir(&self) -> PathBuf {
|
||||
let dir: PathBuf = match self.build.platform() {
|
||||
Platform::Web => self.workdir.join("assets"),
|
||||
Platform::Desktop => self.workdir.join("Resources"),
|
||||
Platform::Ios => self.workdir.join("Resources"),
|
||||
Platform::Android => self.workdir.join("assets"),
|
||||
Platform::Server => self.workdir.join("assets"),
|
||||
Platform::Liveview => self.workdir.join("assets"),
|
||||
};
|
||||
|
||||
if !dir.exists() {
|
||||
std::fs::create_dir_all(&dir).expect("Failed to create asset dir in temp dir");
|
||||
}
|
||||
|
||||
dir
|
||||
}
|
||||
|
||||
/// Run the optimizers, obfuscators, minimizers, etc
|
||||
pub(crate) async fn optimize(&self) -> Result<()> {
|
||||
match self.build.platform() {
|
||||
Platform::Web => {
|
||||
// Compress the asset dir
|
||||
// // If pre-compressing is enabled, we can pre_compress the wasm-bindgen output
|
||||
// let pre_compress = self
|
||||
// .krate
|
||||
// .should_pre_compress_web_assets(self.build.release);
|
||||
|
||||
// tokio::task::spawn_blocking(move || {
|
||||
// pre_compress_folder(&bindgen_outdir, pre_compress)
|
||||
// })
|
||||
// .await
|
||||
// .unwrap()?;
|
||||
}
|
||||
Platform::Desktop => {}
|
||||
Platform::Ios => {}
|
||||
Platform::Android => {}
|
||||
Platform::Server => {}
|
||||
Platform::Liveview => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Debug)]
|
||||
pub(crate) enum BundleFormat {
|
||||
// Apple
|
||||
Macos,
|
||||
Ios,
|
||||
|
||||
// wasm
|
||||
Web,
|
||||
|
||||
// Android
|
||||
Android,
|
||||
|
||||
// Linux
|
||||
AppImage,
|
||||
Deb,
|
||||
Rpm,
|
||||
|
||||
// Windows
|
||||
Msi,
|
||||
Wix,
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
mod android;
|
||||
mod format;
|
||||
mod ios;
|
||||
mod mac;
|
||||
mod web;
|
||||
mod win;
|
||||
|
||||
mod app;
|
||||
pub(crate) use app::*;
|
|
@ -1,8 +0,0 @@
|
|||
use super::AppBundle;
|
||||
use crate::Result;
|
||||
|
||||
impl AppBundle {
|
||||
pub(crate) fn prepare_html(&self) -> Result<String> {
|
||||
todo!()
|
||||
}
|
||||
}
|
|
@ -10,35 +10,35 @@ use std::{borrow::Cow, fs, path::Path, process::exit};
|
|||
|
||||
/// Format some rsx
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub(crate) struct Autoformat {
|
||||
pub struct Autoformat {
|
||||
/// Format rust code before the formatting the rsx macros
|
||||
#[clap(long)]
|
||||
pub(crate) all_code: bool,
|
||||
pub all_code: bool,
|
||||
|
||||
/// Run in 'check' mode. Exits with 0 if input is formatted correctly. Exits
|
||||
/// with 1 and prints a diff if formatting is required.
|
||||
#[clap(short, long)]
|
||||
pub(crate) check: bool,
|
||||
pub check: bool,
|
||||
|
||||
/// Input rsx (selection)
|
||||
#[clap(short, long)]
|
||||
pub(crate) raw: Option<String>,
|
||||
pub raw: Option<String>,
|
||||
|
||||
/// Input file
|
||||
#[clap(short, long)]
|
||||
pub(crate) file: Option<String>,
|
||||
pub file: Option<String>,
|
||||
|
||||
/// Split attributes in lines or not
|
||||
#[clap(short, long, default_value = "false")]
|
||||
pub(crate) split_line_attributes: bool,
|
||||
pub split_line_attributes: bool,
|
||||
|
||||
/// The package to build
|
||||
#[clap(short, long)]
|
||||
pub(crate) package: Option<String>,
|
||||
pub package: Option<String>,
|
||||
}
|
||||
|
||||
impl Autoformat {
|
||||
pub(crate) fn autoformat(self) -> Result<()> {
|
||||
pub fn autoformat(self) -> Result<()> {
|
||||
let Autoformat {
|
||||
check,
|
||||
raw,
|
||||
|
|
|
@ -1,155 +1,107 @@
|
|||
use super::*;
|
||||
use crate::builder::{Builder, Platform};
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use anyhow::Context;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Context;
|
||||
use dioxus_cli_config::Platform;
|
||||
|
||||
use crate::{
|
||||
builder::{BuildRequest, TargetPlatform},
|
||||
dioxus_crate::DioxusCrate,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Information about the target to build
|
||||
#[derive(Clone, Debug, Default, Deserialize, Parser)]
|
||||
pub struct TargetArgs {
|
||||
/// Build for nightly [default: false]
|
||||
#[clap(long)]
|
||||
pub nightly: bool,
|
||||
|
||||
/// Build a example [default: ""]
|
||||
#[clap(long)]
|
||||
pub example: Option<String>,
|
||||
|
||||
/// Build a binary [default: ""]
|
||||
#[clap(long)]
|
||||
pub bin: Option<String>,
|
||||
|
||||
/// The package to build
|
||||
#[clap(short, long)]
|
||||
pub package: Option<String>,
|
||||
|
||||
/// Space separated list of features to activate
|
||||
#[clap(long)]
|
||||
pub features: Vec<String>,
|
||||
|
||||
/// The feature to use for the client in a fullstack app [default: "web"]
|
||||
#[clap(long)]
|
||||
pub client_feature: Option<String>,
|
||||
|
||||
/// The feature to use for the server in a fullstack app [default: "server"]
|
||||
#[clap(long)]
|
||||
pub server_feature: Option<String>,
|
||||
|
||||
/// Rustc platform triple
|
||||
#[clap(long)]
|
||||
pub target: Option<String>,
|
||||
}
|
||||
|
||||
/// Build the Rust Dioxus app and all of its assets.
|
||||
///
|
||||
/// Produces a final output bundle designed to be run on the target platform.
|
||||
#[derive(Clone, Debug, Default, Deserialize, Parser)]
|
||||
#[clap(name = "build")]
|
||||
pub(crate) struct BuildArgs {
|
||||
pub struct Build {
|
||||
/// Build in release mode [default: false]
|
||||
#[clap(long, short)]
|
||||
#[serde(default)]
|
||||
pub(crate) release: bool,
|
||||
pub release: bool,
|
||||
|
||||
/// This flag only applies to fullstack builds. By default fullstack builds will run with something in between debug and release mode. This flag will force the build to run in debug mode. [default: false]
|
||||
#[clap(long)]
|
||||
#[serde(default)]
|
||||
pub(crate) force_debug: bool,
|
||||
pub force_debug: bool,
|
||||
|
||||
/// This flag only applies to fullstack builds. By default fullstack builds will run the server and client builds in parallel. This flag will force the build to run the server build first, then the client build. [default: false]
|
||||
#[clap(long)]
|
||||
#[serde(default)]
|
||||
pub(crate) force_sequential: bool,
|
||||
pub force_sequential: bool,
|
||||
|
||||
/// Use verbose output [default: false]
|
||||
// Use verbose output [default: false]
|
||||
#[clap(long)]
|
||||
#[serde(default)]
|
||||
pub(crate) verbose: bool,
|
||||
|
||||
/// Pass -Awarnings to the cargo build
|
||||
#[clap(long)]
|
||||
#[serde(default)]
|
||||
pub(crate) silent: bool,
|
||||
pub verbose: bool,
|
||||
|
||||
/// Build with custom profile
|
||||
#[clap(long)]
|
||||
pub(crate) profile: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
|
||||
/// Build platform: support Web & Desktop [default: "default_platform"]
|
||||
#[clap(long, value_enum)]
|
||||
pub(crate) platform: Option<Platform>,
|
||||
|
||||
/// Build the fullstack variant of this app, using that as the fileserver and backend
|
||||
///
|
||||
/// This defaults to `false` but will be overriden to true if the `fullstack` feature is enabled.
|
||||
#[clap(long)]
|
||||
pub(crate) fullstack: bool,
|
||||
|
||||
/// Run the ssg config of the app and generate the files
|
||||
#[clap(long)]
|
||||
pub(crate) ssg: bool,
|
||||
pub platform: Option<Platform>,
|
||||
|
||||
/// Skip collecting assets from dependencies [default: false]
|
||||
#[clap(long)]
|
||||
#[serde(default)]
|
||||
pub(crate) skip_assets: bool,
|
||||
pub skip_assets: bool,
|
||||
|
||||
/// Extra arguments passed to cargo build
|
||||
#[clap(last = true)]
|
||||
pub(crate) cargo_args: Vec<String>,
|
||||
pub cargo_args: Vec<String>,
|
||||
|
||||
/// Inject scripts to load the wasm and js files for your dioxus app if they are not already present [default: true]
|
||||
#[clap(long, default_value_t = true)]
|
||||
pub(crate) inject_loading_scripts: bool,
|
||||
pub inject_loading_scripts: bool,
|
||||
|
||||
/// Information about the target to build
|
||||
#[clap(flatten)]
|
||||
pub(crate) target_args: TargetArgs,
|
||||
pub target_args: TargetArgs,
|
||||
}
|
||||
|
||||
/// Information about the target to build
|
||||
#[derive(Clone, Debug, Default, Deserialize, Parser)]
|
||||
pub(crate) struct TargetArgs {
|
||||
/// Build for nightly [default: false]
|
||||
#[clap(long)]
|
||||
pub(crate) nightly: bool,
|
||||
|
||||
/// Build a example [default: ""]
|
||||
#[clap(long)]
|
||||
pub(crate) example: Option<String>,
|
||||
|
||||
/// Build a binary [default: ""]
|
||||
#[clap(long)]
|
||||
pub(crate) bin: Option<String>,
|
||||
|
||||
/// The package to build
|
||||
#[clap(short, long)]
|
||||
pub(crate) package: Option<String>,
|
||||
|
||||
/// Space separated list of features to activate
|
||||
#[clap(long)]
|
||||
pub(crate) features: Vec<String>,
|
||||
|
||||
/// The feature to use for the client in a fullstack app [default: "web"]
|
||||
#[clap(long)]
|
||||
pub(crate) client_feature: Option<String>,
|
||||
|
||||
/// The feature to use for the server in a fullstack app [default: "server"]
|
||||
#[clap(long)]
|
||||
pub(crate) server_feature: Option<String>,
|
||||
|
||||
/// The architecture to build for [default: "native"]
|
||||
///
|
||||
/// Can either be `arm | arm64 | x86 | x86_64 | native`
|
||||
#[clap(long)]
|
||||
pub(crate) arch: Option<String>,
|
||||
|
||||
/// Rustc platform triple
|
||||
#[clap(long)]
|
||||
pub(crate) target: Option<String>,
|
||||
}
|
||||
|
||||
impl BuildArgs {
|
||||
pub(crate) async fn run(mut self) -> anyhow::Result<()> {
|
||||
let mut dioxus_crate =
|
||||
DioxusCrate::new(&self.target_args).context("Failed to load Dioxus workspace")?;
|
||||
|
||||
self.build(&mut dioxus_crate).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn build(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
|
||||
self.resolve(dioxus_crate)?;
|
||||
|
||||
// todo: probably want to consume the logs from the builder here, instead of just waiting for it to finish
|
||||
let bundles = Builder::start(dioxus_crate, self.clone())?
|
||||
.wait_for_finish()
|
||||
.await?;
|
||||
|
||||
for bundle in bundles {
|
||||
let destination = dioxus_crate.out_dir();
|
||||
bundle.finish(destination).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update the arguments of the CLI by inspecting the DioxusCrate itself and learning about how
|
||||
/// the user has configured their app.
|
||||
///
|
||||
/// IE if they've specified "fullstack" as a feature on `dioxus`, then we want to build the
|
||||
/// fullstack variant even if they omitted the `--fullstack` flag.
|
||||
pub(crate) fn resolve(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
|
||||
impl Build {
|
||||
pub fn resolve(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
|
||||
// Inherit the platform from the defaults
|
||||
let platform = self
|
||||
.platform
|
||||
.unwrap_or_else(|| self.auto_detect_platform(dioxus_crate));
|
||||
|
||||
self.platform = Some(platform);
|
||||
|
||||
// Add any features required to turn on the platform we are building for
|
||||
|
@ -160,20 +112,36 @@ impl BuildArgs {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn build(&mut self, dioxus_crate: &mut DioxusCrate) -> Result<()> {
|
||||
self.resolve(dioxus_crate)?;
|
||||
let build_requests = BuildRequest::create(false, dioxus_crate, self.clone())?;
|
||||
BuildRequest::build_all_parallel(build_requests).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn run(&mut self) -> anyhow::Result<()> {
|
||||
let mut dioxus_crate =
|
||||
DioxusCrate::new(&self.target_args).context("Failed to load Dioxus workspace")?;
|
||||
self.build(&mut dioxus_crate).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn auto_detect_client_platform(
|
||||
&self,
|
||||
resolved: &DioxusCrate,
|
||||
) -> (Option<String>, Platform) {
|
||||
) -> (Option<String>, TargetPlatform) {
|
||||
self.find_dioxus_feature(resolved, |platform| {
|
||||
matches!(platform, Platform::Web | Platform::Desktop)
|
||||
matches!(platform, TargetPlatform::Web | TargetPlatform::Desktop)
|
||||
})
|
||||
.unwrap_or_else(|| (Some("web".to_string()), Platform::Web))
|
||||
.unwrap_or_else(|| (Some("web".to_string()), TargetPlatform::Web))
|
||||
}
|
||||
|
||||
pub(crate) fn auto_detect_server_feature(&self, resolved: &DioxusCrate) -> Option<String> {
|
||||
self.find_dioxus_feature(resolved, |platform| matches!(platform, Platform::Server))
|
||||
.map(|(feature, _)| feature)
|
||||
.unwrap_or_else(|| Some("server".to_string()))
|
||||
self.find_dioxus_feature(resolved, |platform| {
|
||||
matches!(platform, TargetPlatform::Server)
|
||||
})
|
||||
.map(|(feature, _)| feature)
|
||||
.unwrap_or_else(|| Some("server".to_string()))
|
||||
}
|
||||
|
||||
fn auto_detect_platform(&self, resolved: &DioxusCrate) -> Platform {
|
||||
|
@ -237,7 +205,7 @@ impl BuildArgs {
|
|||
}
|
||||
|
||||
/// Get the platform from the build arguments
|
||||
pub(crate) fn platform(&self) -> Platform {
|
||||
pub fn platform(&self) -> Platform {
|
||||
self.platform.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
use crate::build::Build;
|
||||
use crate::DioxusCrate;
|
||||
use crate::{build::BuildArgs, bundle_utils::make_tauri_bundler_settings};
|
||||
use anyhow::Context;
|
||||
use std::env::current_dir;
|
||||
use std::fs::create_dir_all;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
use tauri_bundler::{PackageSettings, SettingsBuilder};
|
||||
use tauri_bundler::{BundleSettings, PackageSettings, SettingsBuilder};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -15,14 +16,21 @@ pub struct Bundle {
|
|||
/// The package types to bundle
|
||||
#[clap(long)]
|
||||
pub packages: Option<Vec<PackageType>>,
|
||||
|
||||
/// The arguments for the dioxus build
|
||||
#[clap(flatten)]
|
||||
pub(crate) build_arguments: BuildArgs,
|
||||
pub build_arguments: Build,
|
||||
}
|
||||
|
||||
impl Deref for Bundle {
|
||||
type Target = Build;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.build_arguments
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) enum PackageType {
|
||||
pub enum PackageType {
|
||||
MacOsBundle,
|
||||
IosBundle,
|
||||
WindowsMsi,
|
||||
|
@ -45,7 +53,6 @@ impl FromStr for PackageType {
|
|||
"rpm" => Ok(PackageType::Rpm),
|
||||
"appimage" => Ok(PackageType::AppImage),
|
||||
"dmg" => Ok(PackageType::Dmg),
|
||||
"updater" => Ok(PackageType::Updater),
|
||||
_ => Err(format!("{} is not a valid package type", s)),
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +74,7 @@ impl From<PackageType> for tauri_bundler::PackageType {
|
|||
}
|
||||
|
||||
impl Bundle {
|
||||
pub(crate) async fn bundle(mut self) -> anyhow::Result<()> {
|
||||
pub async fn bundle(mut self) -> anyhow::Result<()> {
|
||||
let mut dioxus_crate = DioxusCrate::new(&self.build_arguments.target_args)
|
||||
.context("Failed to load Dioxus workspace")?;
|
||||
|
||||
|
@ -90,9 +97,7 @@ impl Bundle {
|
|||
.set_src_path(Some(dioxus_crate.workspace_dir().display().to_string())),
|
||||
];
|
||||
|
||||
let bundle_config = dioxus_crate.dioxus_config.bundle.clone();
|
||||
let mut bundle_settings = make_tauri_bundler_settings(bundle_config);
|
||||
|
||||
let mut bundle_settings: BundleSettings = dioxus_crate.dioxus_config.bundle.clone().into();
|
||||
if cfg!(windows) {
|
||||
let windows_icon_override = dioxus_crate
|
||||
.dioxus_config
|
||||
|
@ -124,7 +129,6 @@ impl Bundle {
|
|||
|
||||
// Copy the assets in the dist directory to the bundle
|
||||
let static_asset_output_dir = &dioxus_crate.dioxus_config.application.out_dir;
|
||||
|
||||
// Make sure the dist directory is relative to the crate directory
|
||||
let static_asset_output_dir = static_asset_output_dir
|
||||
.strip_prefix(dioxus_crate.workspace_dir())
|
||||
|
@ -184,7 +188,7 @@ impl Bundle {
|
|||
settings = settings.package_types(packages.iter().map(|p| (*p).into()).collect());
|
||||
}
|
||||
|
||||
if let Some(target) = &self.build_arguments.target_args.target {
|
||||
if let Some(target) = &self.target_args.target {
|
||||
settings = settings.target(target.to_string());
|
||||
}
|
||||
|
||||
|
@ -194,7 +198,7 @@ impl Bundle {
|
|||
#[cfg(target_os = "macos")]
|
||||
std::env::set_var("CI", "true");
|
||||
|
||||
tauri_bundler::bundle::bundle_project(&settings.unwrap()).unwrap_or_else(|err|{
|
||||
tauri_bundler::bundle::bundle_project(settings.unwrap()).unwrap_or_else(|err|{
|
||||
#[cfg(target_os = "macos")]
|
||||
panic!("Failed to bundle project: {:#?}\nMake sure you have automation enabled in your terminal (https://github.com/tauri-apps/tauri/issues/3055#issuecomment-1624389208) and full disk access enabled for your terminal (https://github.com/tauri-apps/tauri/issues/3055#issuecomment-1624389208)", err);
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
|
|
|
@ -11,19 +11,19 @@ use super::*;
|
|||
|
||||
/// Check the Rust files in the project for issues.
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub(crate) struct Check {
|
||||
pub struct Check {
|
||||
/// Input file
|
||||
#[clap(short, long)]
|
||||
pub(crate) file: Option<PathBuf>,
|
||||
pub file: Option<PathBuf>,
|
||||
|
||||
/// Information about the target to check
|
||||
#[clap(flatten)]
|
||||
pub(crate) target_args: TargetArgs,
|
||||
pub target_args: TargetArgs,
|
||||
}
|
||||
|
||||
impl Check {
|
||||
// Todo: check the entire crate
|
||||
pub(crate) async fn check(self) -> Result<()> {
|
||||
pub async fn check(self) -> Result<()> {
|
||||
match self.file {
|
||||
// Default to checking the project
|
||||
None => {
|
||||
|
|
|
@ -7,10 +7,10 @@ use super::*;
|
|||
/// Clean build artifacts.
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
#[clap(name = "clean")]
|
||||
pub(crate) struct Clean {}
|
||||
pub struct Clean {}
|
||||
|
||||
impl Clean {
|
||||
pub(crate) fn clean(self) -> anyhow::Result<()> {
|
||||
pub fn clean(self) -> anyhow::Result<()> {
|
||||
let dioxus_crate =
|
||||
DioxusCrate::new(&TargetArgs::default()).context("Failed to load Dioxus workspace")?;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ use super::*;
|
|||
/// Dioxus config file controls
|
||||
#[derive(Clone, Debug, Deserialize, Subcommand)]
|
||||
#[clap(name = "config")]
|
||||
pub(crate) enum Config {
|
||||
pub enum Config {
|
||||
/// Init `Dioxus.toml` for project/folder.
|
||||
Init {
|
||||
/// Init project name
|
||||
|
@ -22,10 +22,8 @@ pub(crate) enum Config {
|
|||
#[clap(long, default_value = "web")]
|
||||
platform: String,
|
||||
},
|
||||
|
||||
/// Format print Dioxus config.
|
||||
FormatPrint {},
|
||||
|
||||
/// Create a custom html file.
|
||||
CustomHtml {},
|
||||
|
||||
|
@ -38,7 +36,7 @@ pub(crate) enum Config {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Subcommand)]
|
||||
pub(crate) enum Setting {
|
||||
pub enum Setting {
|
||||
/// Set the value of the always-hot-reload setting.
|
||||
AlwaysHotReload { value: BoolValue },
|
||||
/// Set the value of the always-open-browser setting.
|
||||
|
@ -63,7 +61,7 @@ impl Display for Setting {
|
|||
// Clap complains if we use a bool directly and I can't find much info about it.
|
||||
// "Argument 'value` is positional and it must take a value but action is SetTrue"
|
||||
#[derive(Debug, Clone, Copy, Deserialize, clap::ValueEnum)]
|
||||
pub(crate) enum BoolValue {
|
||||
pub enum BoolValue {
|
||||
True,
|
||||
False,
|
||||
}
|
||||
|
@ -78,7 +76,7 @@ impl From<BoolValue> for bool {
|
|||
}
|
||||
|
||||
impl Config {
|
||||
pub(crate) fn config(self) -> Result<()> {
|
||||
pub fn config(self) -> Result<()> {
|
||||
let crate_root = crate_root()?;
|
||||
match self {
|
||||
Config::Init {
|
||||
|
|
|
@ -8,7 +8,7 @@ pub(crate) static DEFAULT_TEMPLATE: &str = "gh:dioxuslabs/dioxus-template";
|
|||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Parser)]
|
||||
#[clap(name = "new")]
|
||||
pub(crate) struct Create {
|
||||
pub struct Create {
|
||||
/// Project name (required when `--yes` is used)
|
||||
name: Option<String>,
|
||||
|
||||
|
@ -39,7 +39,7 @@ pub(crate) struct Create {
|
|||
}
|
||||
|
||||
impl Create {
|
||||
pub(crate) fn create(mut self) -> Result<()> {
|
||||
pub fn create(mut self) -> Result<()> {
|
||||
let metadata = cargo_metadata::MetadataCommand::new().exec().ok();
|
||||
|
||||
// If we're getting pass a `.` name, that's actually a path
|
||||
|
@ -112,7 +112,7 @@ impl Create {
|
|||
|
||||
/// Post-creation actions for newly setup crates.
|
||||
// Also used by `init`.
|
||||
pub(crate) fn post_create(path: &Path, metadata: Option<Metadata>) -> Result<()> {
|
||||
pub fn post_create(path: &Path, metadata: Option<Metadata>) -> Result<()> {
|
||||
// 1. Add the new project to the workspace, if it exists.
|
||||
// This must be executed first in order to run `cargo fmt` on the new project.
|
||||
metadata.and_then(|metadata| {
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
use clap::Parser;
|
||||
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub struct Doctor {}
|
||||
|
||||
impl Doctor {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
/// Translate some source file into Dioxus code
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
#[clap(name = "http-server")]
|
||||
pub(crate) struct Httpserver {}
|
||||
|
||||
impl Httpserver {
|
||||
pub(crate) async fn serve(self) -> Result<()> {
|
||||
todo!()
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@ use cargo_generate::{GenerateArgs, TemplatePath};
|
|||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Parser)]
|
||||
#[clap(name = "init")]
|
||||
pub(crate) struct Init {
|
||||
pub struct Init {
|
||||
/// Template path
|
||||
#[clap(default_value = DEFAULT_TEMPLATE, short, long)]
|
||||
template: String,
|
||||
|
@ -24,21 +24,19 @@ pub(crate) struct Init {
|
|||
}
|
||||
|
||||
impl Init {
|
||||
pub(crate) fn init(self) -> Result<()> {
|
||||
pub fn init(self) -> Result<()> {
|
||||
let metadata = cargo_metadata::MetadataCommand::new().exec().ok();
|
||||
|
||||
// Get directory name.
|
||||
let name = std::env::current_dir()?
|
||||
.file_name()
|
||||
.map(|f| f.to_str().unwrap().to_string());
|
||||
|
||||
// https://github.com/console-rs/dialoguer/issues/294
|
||||
ctrlc::set_handler(move || {
|
||||
let _ = console::Term::stdout().show_cursor();
|
||||
std::process::exit(0);
|
||||
})
|
||||
.expect("ctrlc::set_handler");
|
||||
|
||||
let args = GenerateArgs {
|
||||
define: self.option,
|
||||
init: true,
|
||||
|
|
|
@ -1,45 +1,39 @@
|
|||
use std::{env::current_dir, path::PathBuf};
|
||||
use crate::{assets, error::Result};
|
||||
use clap::Parser;
|
||||
use std::{fs, path::PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The env var that will be set by the linker intercept cmd to indicate that we should act as a linker
|
||||
pub(crate) const LINK_OUTPUT_ENV_VAR: &str = "dx-magic-link-file";
|
||||
|
||||
/// Should we act as a linker?
|
||||
///
|
||||
/// Just check if the magic env var is set
|
||||
pub(crate) fn should_link() -> bool {
|
||||
std::env::var(LINK_OUTPUT_ENV_VAR).is_ok()
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
#[clap(name = "link", hide = true)]
|
||||
pub struct LinkCommand {
|
||||
// Allow us to accept any argument after `dx link`
|
||||
#[clap(trailing_var_arg = true, allow_hyphen_values = true)]
|
||||
pub args: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct InterceptedArgs {
|
||||
pub(crate) work_dir: PathBuf,
|
||||
pub(crate) args: Vec<String>,
|
||||
}
|
||||
|
||||
/// Write the incoming linker args to a file
|
||||
///
|
||||
/// The file will be given by the dx-magic-link-arg env var itself, so we use
|
||||
/// it both for determining if we should act as a linker and the for the file name itself.
|
||||
///
|
||||
/// This will panic if it fails
|
||||
///
|
||||
/// hmmmmmmmm tbh I'd rather just pass the object files back and do the parsing here, but the interface
|
||||
/// is nicer to just bounce back the args and let the host do the parsing/canonicalization
|
||||
pub(crate) fn dump_link_args() -> anyhow::Result<()> {
|
||||
let output = std::env::var(LINK_OUTPUT_ENV_VAR).expect("Missing env var with target file");
|
||||
|
||||
// get the args and then dump them to the file
|
||||
let args: Vec<_> = std::env::args().collect();
|
||||
let escaped = serde_json::to_string(&InterceptedArgs {
|
||||
args,
|
||||
work_dir: current_dir().unwrap(),
|
||||
})
|
||||
.expect("Failed to escape env args");
|
||||
|
||||
// write the file
|
||||
std::fs::write(output, escaped).expect("Failed to write output file");
|
||||
|
||||
Ok(())
|
||||
impl LinkCommand {
|
||||
pub fn link(self) -> Result<()> {
|
||||
let Some((link_args, object_files)) = manganis_cli_support::linker_intercept(self.args)
|
||||
else {
|
||||
tracing::warn!("Invalid linker arguments.");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Parse object files, deserialize JSON, & create a file to propagate JSON.
|
||||
let json = manganis_cli_support::get_json_from_object_files(object_files);
|
||||
let parsed = serde_json::to_string(&json).unwrap();
|
||||
|
||||
let out_dir = PathBuf::from(link_args.first().unwrap());
|
||||
fs::create_dir_all(&out_dir).unwrap();
|
||||
|
||||
let path = out_dir.join(assets::MG_JSON_OUT);
|
||||
fs::write(path, parsed).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// We need to pass the subcommand name to Manganis so this
|
||||
/// helps centralize where we set the subcommand "name".
|
||||
pub fn command_name() -> String {
|
||||
"link".to_string()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
pub(crate) mod autoformat;
|
||||
pub(crate) mod build;
|
||||
pub(crate) mod bundle;
|
||||
pub(crate) mod check;
|
||||
pub(crate) mod clean;
|
||||
pub(crate) mod config;
|
||||
pub(crate) mod create;
|
||||
pub(crate) mod doctor;
|
||||
pub(crate) mod httpserver;
|
||||
pub(crate) mod init;
|
||||
pub(crate) mod link;
|
||||
pub(crate) mod run;
|
||||
pub(crate) mod serve;
|
||||
pub(crate) mod translate;
|
||||
pub mod autoformat;
|
||||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod check;
|
||||
pub mod clean;
|
||||
pub mod config;
|
||||
pub mod create;
|
||||
pub mod init;
|
||||
pub mod link;
|
||||
pub mod serve;
|
||||
pub mod translate;
|
||||
|
||||
use crate::{custom_error, error::Result, Error};
|
||||
use clap::{Parser, Subcommand};
|
||||
|
@ -26,32 +23,40 @@ use std::{
|
|||
process::{Command, Stdio},
|
||||
};
|
||||
|
||||
pub static VERSION: Lazy<String> = Lazy::new(|| {
|
||||
format!(
|
||||
"{} ({})",
|
||||
crate::dx_build_info::PKG_VERSION,
|
||||
crate::dx_build_info::GIT_COMMIT_HASH_SHORT.unwrap_or("was built without git repository")
|
||||
)
|
||||
});
|
||||
|
||||
/// Build, Bundle & Ship Dioxus Apps.
|
||||
#[derive(Parser)]
|
||||
#[clap(name = "dioxus", version = VERSION.as_str())]
|
||||
pub(crate) struct Cli {
|
||||
pub struct Cli {
|
||||
#[clap(subcommand)]
|
||||
pub(crate) action: Commands,
|
||||
pub action: Commands,
|
||||
|
||||
/// Enable verbose logging.
|
||||
#[clap(short)]
|
||||
pub(crate) v: bool,
|
||||
pub v: bool,
|
||||
|
||||
/// Specify a binary target.
|
||||
#[clap(global = true, long)]
|
||||
pub(crate) bin: Option<String>,
|
||||
pub bin: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub(crate) enum Commands {
|
||||
pub enum Commands {
|
||||
/// Build the Dioxus project and all of its assets.
|
||||
Build(build::BuildArgs),
|
||||
Build(build::Build),
|
||||
|
||||
/// Translate a source file into Dioxus code.
|
||||
Translate(translate::Translate),
|
||||
|
||||
/// Build, watch & serve the Dioxus project and all of its assets.
|
||||
Serve(serve::ServeArgs),
|
||||
Serve(serve::Serve),
|
||||
|
||||
/// Create a new project for Dioxus.
|
||||
New(create::Create),
|
||||
|
@ -74,21 +79,13 @@ pub(crate) enum Commands {
|
|||
#[clap(name = "check")]
|
||||
Check(check::Check),
|
||||
|
||||
/// Start a local http server, akin to a default fullstack app
|
||||
#[clap(name = "http-server")]
|
||||
HttpServer(httpserver::Httpserver),
|
||||
|
||||
/// Run the project without any hotreloading
|
||||
#[clap(name = "run")]
|
||||
Run(run::RunArgs),
|
||||
|
||||
/// Ensure all the tooling is installed and configured correctly
|
||||
#[clap(name = "doctor")]
|
||||
Doctor(doctor::Doctor),
|
||||
|
||||
/// Dioxus config file controls.
|
||||
#[clap(subcommand)]
|
||||
Config(config::Config),
|
||||
|
||||
/// Handles parsing of linker arguments for linker-based systems
|
||||
/// such as Manganis and binary patching.
|
||||
Link(link::LinkCommand),
|
||||
}
|
||||
|
||||
impl Display for Commands {
|
||||
|
@ -104,17 +101,7 @@ impl Display for Commands {
|
|||
Commands::Autoformat(_) => write!(f, "fmt"),
|
||||
Commands::Check(_) => write!(f, "check"),
|
||||
Commands::Bundle(_) => write!(f, "bundle"),
|
||||
Commands::HttpServer(_) => write!(f, "http-server"),
|
||||
Commands::Run(_) => write!(f, "run"),
|
||||
Commands::Doctor(_) => write!(f, "doctor"),
|
||||
Commands::Link(_) => write!(f, "link"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) static VERSION: Lazy<String> = Lazy::new(|| {
|
||||
format!(
|
||||
"{} ({})",
|
||||
crate::build_info::PKG_VERSION,
|
||||
crate::build_info::GIT_COMMIT_HASH_SHORT.unwrap_or("was built without git repository")
|
||||
)
|
||||
});
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
use crate::DioxusCrate;
|
||||
use crate::{builder::Builder, serve::ServeUpdate};
|
||||
use anyhow::Context;
|
||||
use build::BuildArgs;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Check the Rust files in the project for issues.
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub(crate) struct RunArgs {
|
||||
/// Information about the target to check
|
||||
#[clap(flatten)]
|
||||
pub(crate) build_args: BuildArgs,
|
||||
}
|
||||
|
||||
impl RunArgs {
|
||||
pub(crate) async fn run(mut self) -> anyhow::Result<()> {
|
||||
let mut dioxus_crate = DioxusCrate::new(&self.build_args.target_args)
|
||||
.context("Failed to load Dioxus workspace")?;
|
||||
|
||||
self.build_args.resolve(&mut dioxus_crate)?;
|
||||
|
||||
let bundles = Builder::start(&mut dioxus_crate, self.build_args.clone())?
|
||||
.wait_for_finish()
|
||||
.await?;
|
||||
|
||||
let mut runner = crate::serve::AppRunner::start();
|
||||
|
||||
let devserver_ip = "127.0.0.1:8080".parse().unwrap();
|
||||
let fullstack_ip = "127.0.0.1:6955".parse().unwrap();
|
||||
|
||||
for bundle in bundles {
|
||||
runner
|
||||
.open(bundle, devserver_ip, Some(fullstack_ip))
|
||||
.await?;
|
||||
}
|
||||
|
||||
loop {
|
||||
let msg = runner.wait().await;
|
||||
|
||||
match msg {
|
||||
ServeUpdate::StderrReceived { platform, msg } => println!("[{platform}]: {msg}"),
|
||||
ServeUpdate::StdoutReceived { platform, msg } => println!("[{platform}]: {msg}"),
|
||||
ServeUpdate::ProcessExited { platform, status } => {
|
||||
runner.kill(platform).await;
|
||||
eprintln!("[{platform}]: process exited with status: {status:?}")
|
||||
}
|
||||
|
||||
ServeUpdate::TracingLog { log } => todo!(),
|
||||
ServeUpdate::NewConnection => todo!(),
|
||||
ServeUpdate::WsMessage(_) => todo!(),
|
||||
ServeUpdate::BuildUpdate(_) => todo!(),
|
||||
ServeUpdate::FilesChanged { files } => todo!(),
|
||||
ServeUpdate::TuiInput { event } => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,97 +1,96 @@
|
|||
use super::*;
|
||||
use crate::settings;
|
||||
use crate::DioxusCrate;
|
||||
use crate::{builder::Platform, config::AddressArguments};
|
||||
use crate::{settings, tracer::CLILogControl, DioxusCrate};
|
||||
use anyhow::Context;
|
||||
use build::BuildArgs;
|
||||
use crossterm::tty::IsTty;
|
||||
use build::Build;
|
||||
use dioxus_cli_config::AddressArguments;
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Arguments for the serve command
|
||||
#[derive(Clone, Debug, Parser, Default)]
|
||||
pub struct ServeArguments {
|
||||
/// The arguments for the address the server will run on
|
||||
#[clap(flatten)]
|
||||
pub address: AddressArguments,
|
||||
|
||||
/// Open the app in the default browser [default: true - unless cli settings are set]
|
||||
#[arg(long, default_missing_value="true", num_args=0..=1)]
|
||||
pub open: Option<bool>,
|
||||
|
||||
/// Enable full hot reloading for the app [default: true - unless cli settings are set]
|
||||
#[clap(long, group = "release-incompatible")]
|
||||
pub hot_reload: Option<bool>,
|
||||
|
||||
/// Configure always-on-top for desktop apps [default: true - unless cli settings are set]
|
||||
#[clap(long, default_missing_value = "true")]
|
||||
pub always_on_top: Option<bool>,
|
||||
|
||||
/// Set cross-origin-policy to same-origin [default: false]
|
||||
#[clap(name = "cross-origin-policy")]
|
||||
#[clap(long)]
|
||||
pub cross_origin_policy: bool,
|
||||
|
||||
/// Additional arguments to pass to the executable
|
||||
#[clap(long)]
|
||||
pub args: Vec<String>,
|
||||
|
||||
/// Sets the interval in seconds that the CLI will poll for file changes on WSL.
|
||||
#[clap(long, default_missing_value = "2")]
|
||||
pub wsl_file_poll_interval: Option<u16>,
|
||||
}
|
||||
|
||||
/// Run the WASM project on dev-server
|
||||
#[derive(Clone, Debug, Default, Parser)]
|
||||
#[command(group = clap::ArgGroup::new("release-incompatible").multiple(true).conflicts_with("release"))]
|
||||
#[clap(name = "serve")]
|
||||
pub(crate) struct ServeArgs {
|
||||
/// The arguments for the address the server will run on
|
||||
pub struct Serve {
|
||||
/// Arguments for the serve command
|
||||
#[clap(flatten)]
|
||||
pub(crate) address: AddressArguments,
|
||||
pub(crate) server_arguments: ServeArguments,
|
||||
|
||||
/// Open the app in the default browser [default: true - unless cli settings are set]
|
||||
#[arg(long, default_missing_value="true", num_args=0..=1)]
|
||||
pub(crate) open: Option<bool>,
|
||||
|
||||
/// Enable full hot reloading for the app [default: true - unless cli settings are set]
|
||||
#[clap(long, group = "release-incompatible")]
|
||||
pub(crate) hot_reload: Option<bool>,
|
||||
|
||||
/// Configure always-on-top for desktop apps [default: true - unless cli settings are set]
|
||||
#[clap(long, default_missing_value = "true")]
|
||||
pub(crate) always_on_top: Option<bool>,
|
||||
|
||||
/// Set cross-origin-policy to same-origin [default: false]
|
||||
#[clap(name = "cross-origin-policy")]
|
||||
#[clap(long)]
|
||||
pub(crate) cross_origin_policy: bool,
|
||||
|
||||
/// Additional arguments to pass to the executable
|
||||
#[clap(long)]
|
||||
pub(crate) args: Vec<String>,
|
||||
|
||||
/// Sets the interval in seconds that the CLI will poll for file changes on WSL.
|
||||
#[clap(long, default_missing_value = "2")]
|
||||
pub(crate) wsl_file_poll_interval: Option<u16>,
|
||||
/// Arguments for the dioxus build
|
||||
#[clap(flatten)]
|
||||
pub(crate) build_arguments: Build,
|
||||
|
||||
/// Run the server in interactive mode
|
||||
#[arg(long, default_missing_value="true", num_args=0..=1, short = 'i')]
|
||||
pub(crate) interactive: Option<bool>,
|
||||
|
||||
/// Arguments for the build itself
|
||||
#[clap(flatten)]
|
||||
pub(crate) build_arguments: BuildArgs,
|
||||
pub interactive: Option<bool>,
|
||||
}
|
||||
|
||||
impl ServeArgs {
|
||||
/// Start the tui, builder, etc by resolving the arguments and then running the actual top-level serve function
|
||||
pub(crate) async fn serve(mut self) -> Result<()> {
|
||||
let mut krate = DioxusCrate::new(&self.build_arguments.target_args)
|
||||
.context("Failed to load Dioxus workspace")?;
|
||||
|
||||
self.resolve(&mut krate)?;
|
||||
|
||||
crate::serve::serve_all(self, krate).await
|
||||
}
|
||||
|
||||
impl Serve {
|
||||
/// Resolve the serve arguments from the arguments or the config
|
||||
fn resolve(&mut self, crate_config: &mut DioxusCrate) -> Result<()> {
|
||||
// Set config settings.
|
||||
let settings = settings::CliSettings::load();
|
||||
|
||||
// Enable hot reload.
|
||||
if self.hot_reload.is_none() {
|
||||
self.hot_reload = Some(settings.always_hot_reload.unwrap_or(true));
|
||||
if self.server_arguments.hot_reload.is_none() {
|
||||
self.server_arguments.hot_reload = Some(settings.always_hot_reload.unwrap_or(true));
|
||||
}
|
||||
|
||||
// Open browser.
|
||||
if self.open.is_none() {
|
||||
self.open = Some(settings.always_open_browser.unwrap_or_default());
|
||||
if self.server_arguments.open.is_none() {
|
||||
self.server_arguments.open = Some(settings.always_open_browser.unwrap_or_default());
|
||||
}
|
||||
|
||||
// Set WSL file poll interval.
|
||||
if self.wsl_file_poll_interval.is_none() {
|
||||
self.wsl_file_poll_interval = Some(settings.wsl_file_poll_interval.unwrap_or(2));
|
||||
if self.server_arguments.wsl_file_poll_interval.is_none() {
|
||||
self.server_arguments.wsl_file_poll_interval =
|
||||
Some(settings.wsl_file_poll_interval.unwrap_or(2));
|
||||
}
|
||||
|
||||
// Set always-on-top for desktop.
|
||||
if self.always_on_top.is_none() {
|
||||
self.always_on_top = Some(settings.always_on_top.unwrap_or(true))
|
||||
if self.server_arguments.always_on_top.is_none() {
|
||||
self.server_arguments.always_on_top = Some(settings.always_on_top.unwrap_or(true))
|
||||
}
|
||||
|
||||
crate_config.dioxus_config.desktop.always_on_top = self.always_on_top.unwrap_or(true);
|
||||
crate_config.dioxus_config.desktop.always_on_top =
|
||||
self.server_arguments.always_on_top.unwrap_or(true);
|
||||
|
||||
// Resolve the build arguments
|
||||
self.build_arguments.resolve(crate_config)?;
|
||||
|
||||
// Since this is a serve, adjust the outdir to be target/dx-dist/<crate name>
|
||||
let mut dist_dir = crate_config.out_dir();
|
||||
let mut dist_dir = crate_config.workspace_dir().join("target").join("dx-dist");
|
||||
|
||||
if crate_config.target.is_example() {
|
||||
dist_dir = dist_dir.join("examples");
|
||||
|
@ -103,31 +102,19 @@ impl ServeArgs {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn should_hotreload(&self) -> bool {
|
||||
self.hot_reload.unwrap_or(true)
|
||||
}
|
||||
pub async fn serve(mut self, log_control: CLILogControl) -> anyhow::Result<()> {
|
||||
let mut dioxus_crate = DioxusCrate::new(&self.build_arguments.target_args)
|
||||
.context("Failed to load Dioxus workspace")?;
|
||||
|
||||
pub(crate) fn build_args(&self) -> BuildArgs {
|
||||
self.build_arguments.clone()
|
||||
}
|
||||
self.resolve(&mut dioxus_crate)?;
|
||||
|
||||
pub(crate) fn interactive_tty(&self) -> bool {
|
||||
std::io::stdout().is_tty() && self.interactive.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub(crate) fn should_proxy_build(&self) -> bool {
|
||||
match self.build_arguments.platform() {
|
||||
Platform::Server => true,
|
||||
Platform::Liveview => true,
|
||||
Platform::Web | Platform::Desktop | Platform::Ios | Platform::Android => {
|
||||
self.build_arguments.fullstack
|
||||
}
|
||||
}
|
||||
crate::serve::serve_all(self, dioxus_crate, log_control).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ServeArgs {
|
||||
type Target = BuildArgs;
|
||||
impl Deref for Serve {
|
||||
type Target = Build;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.build_arguments
|
||||
|
|
|
@ -9,27 +9,27 @@ use super::*;
|
|||
/// Translate some source file into Dioxus code
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
#[clap(name = "translate")]
|
||||
pub(crate) struct Translate {
|
||||
pub struct Translate {
|
||||
/// Activate debug mode
|
||||
// short and long flags (-d, --debug) will be deduced from the field's name
|
||||
#[clap(short, long)]
|
||||
pub(crate) component: bool,
|
||||
pub component: bool,
|
||||
|
||||
/// Input file
|
||||
#[clap(short, long)]
|
||||
pub(crate) file: Option<String>,
|
||||
pub file: Option<String>,
|
||||
|
||||
/// Input file
|
||||
#[clap(short, long)]
|
||||
pub(crate) raw: Option<String>,
|
||||
pub raw: Option<String>,
|
||||
|
||||
/// Output file, stdout if not present
|
||||
#[arg(short, long)]
|
||||
pub(crate) output: Option<PathBuf>,
|
||||
pub output: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Translate {
|
||||
pub(crate) fn translate(self) -> Result<()> {
|
||||
pub fn translate(self) -> Result<()> {
|
||||
// Get the right input for the translation
|
||||
let contents = determine_input(self.file, self.raw)?;
|
||||
|
||||
|
@ -85,7 +85,7 @@ fn write_svg_section(out: &mut String, svgs: Vec<BodyNode>) {
|
|||
for (idx, icon) in svgs.into_iter().enumerate() {
|
||||
let raw =
|
||||
dioxus_autofmt::write_block_out(&CallBody::new(TemplateBody::new(vec![icon]))).unwrap();
|
||||
out.push_str("\n\n pub(crate) fn icon_");
|
||||
out.push_str("\n\n pub fn icon_");
|
||||
out.push_str(&idx.to_string());
|
||||
out.push_str("() -> Element {\n rsx! {");
|
||||
indent_and_write(&raw, 2, out);
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
mod app;
|
||||
mod bundle;
|
||||
mod desktop;
|
||||
mod dioxus_config;
|
||||
mod platform;
|
||||
mod serve;
|
||||
mod web;
|
||||
|
||||
pub(crate) use app::*;
|
||||
pub(crate) use bundle::*;
|
||||
pub(crate) use desktop::*;
|
||||
pub(crate) use dioxus_config::*;
|
||||
pub(crate) use serve::*;
|
||||
pub(crate) use web::*;
|
||||
|
||||
// mod fullstack;
|
||||
// mod liveview;
|
||||
// mod static_generation;
|
|
@ -1,37 +0,0 @@
|
|||
use crate::builder::Platform;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct ApplicationConfig {
|
||||
#[serde(default = "default_name")]
|
||||
pub(crate) name: String,
|
||||
|
||||
#[serde(default = "default_platform")]
|
||||
pub(crate) default_platform: Platform,
|
||||
|
||||
#[serde(default = "out_dir_default")]
|
||||
pub(crate) out_dir: PathBuf,
|
||||
|
||||
#[serde(default = "asset_dir_default")]
|
||||
pub(crate) asset_dir: PathBuf,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) sub_package: Option<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn default_name() -> String {
|
||||
"dioxus-app".into()
|
||||
}
|
||||
|
||||
pub(crate) fn default_platform() -> Platform {
|
||||
Platform::Web
|
||||
}
|
||||
|
||||
pub(crate) fn asset_dir_default() -> PathBuf {
|
||||
PathBuf::from("assets")
|
||||
}
|
||||
|
||||
pub(crate) fn out_dir_default() -> PathBuf {
|
||||
PathBuf::from("dist")
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct BundleConfig {
|
||||
pub(crate) identifier: Option<String>,
|
||||
pub(crate) publisher: Option<String>,
|
||||
pub(crate) icon: Option<Vec<String>>,
|
||||
pub(crate) resources: Option<Vec<String>>,
|
||||
pub(crate) copyright: Option<String>,
|
||||
pub(crate) category: Option<String>,
|
||||
pub(crate) short_description: Option<String>,
|
||||
pub(crate) long_description: Option<String>,
|
||||
pub(crate) external_bin: Option<Vec<String>>,
|
||||
pub(crate) deb: Option<DebianSettings>,
|
||||
pub(crate) macos: Option<MacOsSettings>,
|
||||
pub(crate) windows: Option<WindowsSettings>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct DebianSettings {
|
||||
pub(crate) depends: Option<Vec<String>>,
|
||||
pub(crate) files: HashMap<PathBuf, PathBuf>,
|
||||
pub(crate) nsis: Option<NsisSettings>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct WixSettings {
|
||||
pub(crate) language: Vec<(String, Option<PathBuf>)>,
|
||||
pub(crate) template: Option<PathBuf>,
|
||||
pub(crate) fragment_paths: Vec<PathBuf>,
|
||||
pub(crate) component_group_refs: Vec<String>,
|
||||
pub(crate) component_refs: Vec<String>,
|
||||
pub(crate) feature_group_refs: Vec<String>,
|
||||
pub(crate) feature_refs: Vec<String>,
|
||||
pub(crate) merge_refs: Vec<String>,
|
||||
pub(crate) skip_webview_install: bool,
|
||||
pub(crate) license: Option<PathBuf>,
|
||||
pub(crate) enable_elevated_update_task: bool,
|
||||
pub(crate) banner_path: Option<PathBuf>,
|
||||
pub(crate) dialog_image_path: Option<PathBuf>,
|
||||
pub(crate) fips_compliant: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct MacOsSettings {
|
||||
pub(crate) frameworks: Option<Vec<String>>,
|
||||
pub(crate) minimum_system_version: Option<String>,
|
||||
pub(crate) license: Option<String>,
|
||||
pub(crate) exception_domain: Option<String>,
|
||||
pub(crate) signing_identity: Option<String>,
|
||||
pub(crate) provider_short_name: Option<String>,
|
||||
pub(crate) entitlements: Option<String>,
|
||||
pub(crate) info_plist_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WindowsSettings {
|
||||
pub(crate) digest_algorithm: Option<String>,
|
||||
pub(crate) certificate_thumbprint: Option<String>,
|
||||
pub(crate) timestamp_url: Option<String>,
|
||||
pub(crate) tsp: bool,
|
||||
pub(crate) wix: Option<WixSettings>,
|
||||
pub(crate) icon_path: Option<PathBuf>,
|
||||
pub(crate) webview_install_mode: WebviewInstallMode,
|
||||
pub(crate) webview_fixed_runtime_path: Option<PathBuf>,
|
||||
pub(crate) allow_downgrades: bool,
|
||||
pub(crate) nsis: Option<NsisSettings>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct NsisSettings {
|
||||
pub(crate) template: Option<PathBuf>,
|
||||
pub(crate) license: Option<PathBuf>,
|
||||
pub(crate) header_image: Option<PathBuf>,
|
||||
pub(crate) sidebar_image: Option<PathBuf>,
|
||||
pub(crate) installer_icon: Option<PathBuf>,
|
||||
pub(crate) install_mode: NSISInstallerMode,
|
||||
pub(crate) languages: Option<Vec<String>>,
|
||||
pub(crate) custom_language_files: Option<HashMap<String, PathBuf>>,
|
||||
pub(crate) display_language_selector: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) enum NSISInstallerMode {
|
||||
CurrentUser,
|
||||
PerMachine,
|
||||
Both,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) enum WebviewInstallMode {
|
||||
Skip,
|
||||
DownloadBootstrapper { silent: bool },
|
||||
EmbedBootstrapper { silent: bool },
|
||||
OfflineInstaller { silent: bool },
|
||||
FixedRuntime { path: PathBuf },
|
||||
}
|
||||
|
||||
impl Default for WebviewInstallMode {
|
||||
fn default() -> Self {
|
||||
Self::OfflineInstaller { silent: false }
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents configuration items for the desktop platform.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct DesktopConfig {
|
||||
/// Describes whether a debug-mode desktop app should be always-on-top.
|
||||
#[serde(default)]
|
||||
pub(crate) always_on_top: bool,
|
||||
}
|
||||
|
||||
impl Default for DesktopConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
always_on_top: true,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
use super::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct DioxusConfig {
|
||||
pub(crate) application: ApplicationConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) web: WebConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) desktop: DesktopConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) bundle: BundleConfig,
|
||||
}
|
||||
|
||||
impl Default for DioxusConfig {
|
||||
fn default() -> Self {
|
||||
let name = default_name();
|
||||
Self {
|
||||
application: ApplicationConfig {
|
||||
name: name.clone(),
|
||||
default_platform: default_platform(),
|
||||
out_dir: out_dir_default(),
|
||||
asset_dir: asset_dir_default(),
|
||||
|
||||
sub_package: None,
|
||||
},
|
||||
web: WebConfig {
|
||||
app: WebAppConfig {
|
||||
title: default_title(),
|
||||
base_path: None,
|
||||
},
|
||||
proxy: vec![],
|
||||
watcher: Default::default(),
|
||||
resource: WebResourceConfig {
|
||||
dev: WebDevResourceConfig {
|
||||
style: vec![],
|
||||
script: vec![],
|
||||
},
|
||||
style: Some(vec![]),
|
||||
script: Some(vec![]),
|
||||
},
|
||||
https: WebHttpsConfig {
|
||||
enabled: None,
|
||||
mkcert: None,
|
||||
key_path: None,
|
||||
cert_path: None,
|
||||
},
|
||||
pre_compress: true,
|
||||
wasm_opt: Default::default(),
|
||||
},
|
||||
desktop: DesktopConfig::default(),
|
||||
bundle: BundleConfig {
|
||||
identifier: Some(format!("io.github.{name}")),
|
||||
publisher: Some(name),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
#![allow(unused)] // lots of configs...
|
||||
|
||||
use clap::Parser;
|
||||
use std::net::{IpAddr, Ipv4Addr, SocketAddr, SocketAddrV4};
|
||||
|
||||
/// The arguments for the address the server will run on
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub(crate) struct AddressArguments {
|
||||
/// The port the server will run on
|
||||
#[clap(long)]
|
||||
#[clap(default_value_t = default_port())]
|
||||
pub(crate) port: u16,
|
||||
|
||||
/// The address the server will run on
|
||||
#[clap(long, default_value_t = default_address())]
|
||||
pub(crate) addr: std::net::IpAddr,
|
||||
}
|
||||
|
||||
impl Default for AddressArguments {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
port: default_port(),
|
||||
addr: default_address(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AddressArguments {
|
||||
/// Get the address the server should run on
|
||||
pub(crate) fn address(&self) -> SocketAddr {
|
||||
SocketAddr::new(self.addr, self.port)
|
||||
}
|
||||
}
|
||||
|
||||
fn default_port() -> u16 {
|
||||
8080
|
||||
}
|
||||
|
||||
fn default_address() -> IpAddr {
|
||||
IpAddr::V4(std::net::Ipv4Addr::new(127, 0, 0, 1))
|
||||
}
|
|
@ -1,180 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebConfig {
|
||||
#[serde(default)]
|
||||
pub(crate) app: WebAppConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) proxy: Vec<WebProxyConfig>,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) watcher: WebWatcherConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) resource: WebResourceConfig,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) https: WebHttpsConfig,
|
||||
|
||||
/// Whether to enable pre-compression of assets and wasm during a web build in release mode
|
||||
#[serde(default = "true_bool")]
|
||||
pub(crate) pre_compress: bool,
|
||||
|
||||
/// The wasm-opt configuration
|
||||
#[serde(default)]
|
||||
pub(crate) wasm_opt: WasmOptConfig,
|
||||
}
|
||||
|
||||
impl Default for WebConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
pre_compress: true_bool(),
|
||||
app: Default::default(),
|
||||
https: Default::default(),
|
||||
wasm_opt: Default::default(),
|
||||
proxy: Default::default(),
|
||||
watcher: Default::default(),
|
||||
resource: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The wasm-opt configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct WasmOptConfig {
|
||||
/// The wasm-opt level to use for release builds [default: s]
|
||||
/// Options:
|
||||
/// - z: optimize aggressively for size
|
||||
/// - s: optimize for size
|
||||
/// - 1: optimize for speed
|
||||
/// - 2: optimize for more for speed
|
||||
/// - 3: optimize for even more for speed
|
||||
/// - 4: optimize aggressively for speed
|
||||
#[serde(default)]
|
||||
pub(crate) level: WasmOptLevel,
|
||||
|
||||
/// Keep debug symbols in the wasm file
|
||||
#[serde(default = "false_bool")]
|
||||
pub(crate) debug: bool,
|
||||
}
|
||||
|
||||
/// The wasm-opt level to use for release web builds [default: 4]
|
||||
#[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub(crate) enum WasmOptLevel {
|
||||
/// Optimize aggressively for size
|
||||
#[serde(rename = "z")]
|
||||
Z,
|
||||
/// Optimize for size
|
||||
#[serde(rename = "s")]
|
||||
S,
|
||||
/// Don't optimize
|
||||
#[serde(rename = "0")]
|
||||
Zero,
|
||||
/// Optimize for speed
|
||||
#[serde(rename = "1")]
|
||||
One,
|
||||
/// Optimize for more for speed
|
||||
#[serde(rename = "2")]
|
||||
Two,
|
||||
/// Optimize for even more for speed
|
||||
#[serde(rename = "3")]
|
||||
Three,
|
||||
/// Optimize aggressively for speed
|
||||
#[serde(rename = "4")]
|
||||
#[default]
|
||||
Four,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebAppConfig {
|
||||
#[serde(default = "default_title")]
|
||||
pub(crate) title: String,
|
||||
pub(crate) base_path: Option<String>,
|
||||
}
|
||||
|
||||
impl WebAppConfig {
|
||||
/// Get the normalized base path for the application with `/` trimmed from both ends. If the base path is not set, this will return `.`.
|
||||
pub(crate) fn base_path(&self) -> &str {
|
||||
match &self.base_path {
|
||||
Some(path) => path.trim_matches('/'),
|
||||
None => ".",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WebAppConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
title: default_title(),
|
||||
base_path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebProxyConfig {
|
||||
pub(crate) backend: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebWatcherConfig {
|
||||
#[serde(default = "watch_path_default")]
|
||||
pub(crate) watch_path: Vec<PathBuf>,
|
||||
|
||||
#[serde(default)]
|
||||
pub(crate) reload_html: bool,
|
||||
|
||||
#[serde(default = "true_bool")]
|
||||
pub(crate) index_on_404: bool,
|
||||
}
|
||||
|
||||
impl Default for WebWatcherConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
watch_path: watch_path_default(),
|
||||
reload_html: false,
|
||||
index_on_404: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn watch_path_default() -> Vec<PathBuf> {
|
||||
vec![PathBuf::from("src"), PathBuf::from("examples")]
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebResourceConfig {
|
||||
pub(crate) dev: WebDevResourceConfig,
|
||||
pub(crate) style: Option<Vec<PathBuf>>,
|
||||
pub(crate) script: Option<Vec<PathBuf>>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebDevResourceConfig {
|
||||
#[serde(default)]
|
||||
pub(crate) style: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
pub(crate) script: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct WebHttpsConfig {
|
||||
pub(crate) enabled: Option<bool>,
|
||||
pub(crate) mkcert: Option<bool>,
|
||||
pub(crate) key_path: Option<String>,
|
||||
pub(crate) cert_path: Option<String>,
|
||||
}
|
||||
|
||||
fn true_bool() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn false_bool() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub(crate) fn default_title() -> String {
|
||||
"dioxus | ⛺".into()
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
use crate::builder::Platform;
|
||||
use crate::{build::TargetArgs, config::DioxusConfig};
|
||||
use crate::build::TargetArgs;
|
||||
use dioxus_cli_config::{DioxusConfig, Platform};
|
||||
use krates::cm::Target;
|
||||
use krates::{cm::TargetKind, Cmd, Krates, NodeId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -11,275 +11,6 @@ use std::{
|
|||
|
||||
use crate::metadata::CargoError;
|
||||
|
||||
// Contains information about the crate we are currently in and the dioxus config for that crate
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct DioxusCrate {
|
||||
pub(crate) krates: Arc<Krates>,
|
||||
pub(crate) package: NodeId,
|
||||
pub(crate) dioxus_config: DioxusConfig,
|
||||
pub(crate) target: Target,
|
||||
}
|
||||
|
||||
impl DioxusCrate {
|
||||
pub(crate) fn new(target: &TargetArgs) -> Result<Self, CrateConfigError> {
|
||||
let mut cmd = Cmd::new();
|
||||
cmd.features(target.features.clone());
|
||||
let builder = krates::Builder::new();
|
||||
let krates = builder.build(cmd, |_| {})?;
|
||||
let package = find_main_package(target.package.clone(), &krates)?;
|
||||
|
||||
let dioxus_config = load_dioxus_config(&krates, package)?.unwrap_or_default();
|
||||
|
||||
let package_name = krates[package].name.clone();
|
||||
let target_kind = if target.example.is_some() {
|
||||
TargetKind::Example
|
||||
} else {
|
||||
TargetKind::Bin
|
||||
};
|
||||
let target_name = target
|
||||
.example
|
||||
.clone()
|
||||
.or(target.bin.clone())
|
||||
.unwrap_or(package_name);
|
||||
let main_package = &krates[package];
|
||||
let target = main_package
|
||||
.targets
|
||||
.iter()
|
||||
.find(|target| {
|
||||
target_name == target.name.as_str() && target.kind.contains(&target_kind)
|
||||
})
|
||||
.ok_or(CrateConfigError::TargetNotFound(target_name))?
|
||||
.clone();
|
||||
|
||||
Ok(Self {
|
||||
krates: Arc::new(krates),
|
||||
package,
|
||||
dioxus_config,
|
||||
target,
|
||||
})
|
||||
}
|
||||
|
||||
/// Compose an asset directory. Represents the typical "public" directory
|
||||
/// with publicly available resources (configurable in the `Dioxus.toml`).
|
||||
pub(crate) fn legacy_asset_dir(&self) -> PathBuf {
|
||||
self.crate_dir()
|
||||
.join(&self.dioxus_config.application.asset_dir)
|
||||
}
|
||||
|
||||
/// Get the list of files in the "legacy" asset directory
|
||||
pub(crate) fn legacy_asset_dir_files(&self) -> Vec<PathBuf> {
|
||||
let mut files = vec![];
|
||||
|
||||
let Ok(read_dir) = self.legacy_asset_dir().read_dir() else {
|
||||
return files;
|
||||
};
|
||||
|
||||
for entry in read_dir {
|
||||
if let Ok(entry) = entry {
|
||||
files.push(entry.path());
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
pub(crate) fn bundle_out_dir(&self) -> PathBuf {
|
||||
todo!("bundle out dir")
|
||||
}
|
||||
|
||||
/// Compose an out directory. Represents the typical "dist" directory that
|
||||
/// is "distributed" after building an application (configurable in the
|
||||
/// `Dioxus.toml`).
|
||||
pub(crate) fn out_dir(&self) -> PathBuf {
|
||||
let dir = self.workspace_dir().join("target").join("dx-dist");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
dir
|
||||
}
|
||||
|
||||
/// Create a workdir for the given platform
|
||||
/// This can be used as a temporary directory for the build, but in an observable way such that
|
||||
/// you can see the files in the directory via `target`
|
||||
pub(crate) fn workdir(&self, platform: Platform) -> PathBuf {
|
||||
let plat_name = match platform {
|
||||
Platform::Web => "web",
|
||||
Platform::Desktop => "desktop",
|
||||
Platform::Ios => "ios",
|
||||
Platform::Android => "android",
|
||||
Platform::Server => "server",
|
||||
Platform::Liveview => "liveview",
|
||||
};
|
||||
let dir = self
|
||||
.workspace_dir()
|
||||
.join("target")
|
||||
.join("dx-workdir")
|
||||
.join(self.dioxus_config.application.name.clone())
|
||||
.join(plat_name);
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
dir
|
||||
}
|
||||
|
||||
/// Get the workspace directory for the crate
|
||||
pub(crate) fn workspace_dir(&self) -> PathBuf {
|
||||
self.krates.workspace_root().as_std_path().to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the directory of the crate
|
||||
pub(crate) fn crate_dir(&self) -> PathBuf {
|
||||
self.package()
|
||||
.manifest_path
|
||||
.parent()
|
||||
.unwrap()
|
||||
.as_std_path()
|
||||
.to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the main source file of the target
|
||||
pub(crate) fn main_source_file(&self) -> PathBuf {
|
||||
self.target.src_path.as_std_path().to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the package we are currently in
|
||||
pub(crate) fn package(&self) -> &krates::cm::Package {
|
||||
&self.krates[self.package]
|
||||
}
|
||||
|
||||
/// Get the name of the package we are compiling
|
||||
pub(crate) fn executable_name(&self) -> &str {
|
||||
&self.target.name
|
||||
}
|
||||
|
||||
/// Get the type of executable we are compiling
|
||||
pub(crate) fn executable_type(&self) -> krates::cm::TargetKind {
|
||||
self.target.kind[0].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn features_for_platform(&mut self, platform: Platform) -> Vec<String> {
|
||||
let package = self.package();
|
||||
// Try to find the feature that activates the dioxus feature for the given platform
|
||||
let dioxus_feature = platform.feature_name();
|
||||
let feature = package.features.iter().find_map(|(key, features)| {
|
||||
// Find a feature that starts with dioxus/ or dioxus?/
|
||||
for feature in features {
|
||||
if let Some((_, after_dioxus)) = feature.split_once("dioxus") {
|
||||
if let Some(dioxus_feature_enabled) =
|
||||
after_dioxus.trim_start_matches('?').strip_prefix('/')
|
||||
{
|
||||
// If that enables the feature we are looking for, return that feature
|
||||
if dioxus_feature_enabled == dioxus_feature {
|
||||
return Some(key.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
|
||||
feature.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Check if assets should be pre_compressed. This will only be true in release mode if the user
|
||||
/// has enabled pre_compress in the web config.
|
||||
pub(crate) fn should_pre_compress_web_assets(&self, release: bool) -> bool {
|
||||
self.dioxus_config.web.pre_compress && release
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct Executable {
|
||||
pub(crate) name: String,
|
||||
pub(crate) ty: ExecutableType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub(crate) enum ExecutableType {
|
||||
Binary,
|
||||
Lib,
|
||||
Example,
|
||||
}
|
||||
|
||||
impl ExecutableType {
|
||||
/// Get the name of the executable if it is a binary or an example.
|
||||
pub(crate) fn executable(&self) -> bool {
|
||||
matches!(self, Self::Binary | Self::Example)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct LoadDioxusConfigError {
|
||||
location: String,
|
||||
error: String,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LoadDioxusConfigError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{} {}", self.location, self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for LoadDioxusConfigError {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub(crate) enum CrateConfigError {
|
||||
Cargo(CargoError),
|
||||
Io(std::io::Error),
|
||||
Toml(toml::de::Error),
|
||||
LoadDioxusConfig(LoadDioxusConfigError),
|
||||
TargetNotFound(String),
|
||||
Krates(krates::Error),
|
||||
PackageNotFound(String),
|
||||
CurrentPackageNotFound,
|
||||
}
|
||||
|
||||
impl From<CargoError> for CrateConfigError {
|
||||
fn from(err: CargoError) -> Self {
|
||||
Self::Cargo(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for CrateConfigError {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
Self::Io(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<toml::de::Error> for CrateConfigError {
|
||||
fn from(err: toml::de::Error) -> Self {
|
||||
Self::Toml(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LoadDioxusConfigError> for CrateConfigError {
|
||||
fn from(err: LoadDioxusConfigError) -> Self {
|
||||
Self::LoadDioxusConfig(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<krates::Error> for CrateConfigError {
|
||||
fn from(err: krates::Error) -> Self {
|
||||
Self::Krates(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CrateConfigError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Cargo(err) => write!(f, "{}", err),
|
||||
Self::Io(err) => write!(f, "{}", err),
|
||||
Self::Toml(err) => write!(f, "{}", err),
|
||||
Self::LoadDioxusConfig(err) => write!(f, "{}", err),
|
||||
Self::TargetNotFound(target) => {
|
||||
write!(f, "Failed to find target with name: {}", target)
|
||||
}
|
||||
Self::Krates(err) => write!(f, "{}", err),
|
||||
Self::PackageNotFound(package) => write!(f, "Package not found: {}", package),
|
||||
Self::CurrentPackageNotFound => write!(f, "Failed to find current package"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for CrateConfigError {}
|
||||
|
||||
/// Load the dioxus config from a path
|
||||
fn load_dioxus_config(
|
||||
krates: &Krates,
|
||||
|
@ -300,7 +31,6 @@ fn load_dioxus_config(
|
|||
.as_std_path()
|
||||
.to_path_buf()
|
||||
.canonicalize()?;
|
||||
|
||||
let workspace_path = krates
|
||||
.workspace_root()
|
||||
.as_std_path()
|
||||
|
@ -398,3 +128,227 @@ fn find_main_package(package: Option<String>, krates: &Krates) -> Result<NodeId,
|
|||
let package = krates.nid_for_kid(kid).unwrap();
|
||||
Ok(package)
|
||||
}
|
||||
|
||||
// Contains information about the crate we are currently in and the dioxus config for that crate
|
||||
#[derive(Clone)]
|
||||
pub struct DioxusCrate {
|
||||
pub krates: Arc<Krates>,
|
||||
pub package: NodeId,
|
||||
pub dioxus_config: DioxusConfig,
|
||||
pub target: Target,
|
||||
}
|
||||
|
||||
impl DioxusCrate {
|
||||
pub fn new(target: &TargetArgs) -> Result<Self, CrateConfigError> {
|
||||
let mut cmd = Cmd::new();
|
||||
cmd.features(target.features.clone());
|
||||
let builder = krates::Builder::new();
|
||||
let krates = builder.build(cmd, |_| {})?;
|
||||
let package = find_main_package(target.package.clone(), &krates)?;
|
||||
|
||||
let dioxus_config = load_dioxus_config(&krates, package)?.unwrap_or_default();
|
||||
|
||||
let package_name = krates[package].name.clone();
|
||||
let target_kind = if target.example.is_some() {
|
||||
TargetKind::Example
|
||||
} else {
|
||||
TargetKind::Bin
|
||||
};
|
||||
let target_name = target
|
||||
.example
|
||||
.clone()
|
||||
.or(target.bin.clone())
|
||||
.unwrap_or(package_name);
|
||||
let main_package = &krates[package];
|
||||
let target = main_package
|
||||
.targets
|
||||
.iter()
|
||||
.find(|target| {
|
||||
target_name == target.name.as_str() && target.kind.contains(&target_kind)
|
||||
})
|
||||
.ok_or(CrateConfigError::TargetNotFound(target_name))?
|
||||
.clone();
|
||||
|
||||
Ok(Self {
|
||||
krates: Arc::new(krates),
|
||||
package,
|
||||
dioxus_config,
|
||||
target,
|
||||
})
|
||||
}
|
||||
|
||||
/// Compose an asset directory. Represents the typical "public" directory
|
||||
/// with publicly available resources (configurable in the `Dioxus.toml`).
|
||||
pub fn asset_dir(&self) -> PathBuf {
|
||||
self.crate_dir()
|
||||
.join(&self.dioxus_config.application.asset_dir)
|
||||
}
|
||||
|
||||
/// Compose an out directory. Represents the typical "dist" directory that
|
||||
/// is "distributed" after building an application (configurable in the
|
||||
/// `Dioxus.toml`).
|
||||
pub fn out_dir(&self) -> PathBuf {
|
||||
self.workspace_dir()
|
||||
.join(&self.dioxus_config.application.out_dir)
|
||||
}
|
||||
|
||||
/// Get the workspace directory for the crate
|
||||
pub fn workspace_dir(&self) -> PathBuf {
|
||||
self.krates.workspace_root().as_std_path().to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the directory of the crate
|
||||
pub fn crate_dir(&self) -> PathBuf {
|
||||
self.package()
|
||||
.manifest_path
|
||||
.parent()
|
||||
.unwrap()
|
||||
.as_std_path()
|
||||
.to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the main source file of the target
|
||||
pub fn main_source_file(&self) -> PathBuf {
|
||||
self.target.src_path.as_std_path().to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the package we are currently in
|
||||
pub fn package(&self) -> &krates::cm::Package {
|
||||
&self.krates[self.package]
|
||||
}
|
||||
|
||||
/// Get the name of the package we are compiling
|
||||
pub fn executable_name(&self) -> &str {
|
||||
&self.target.name
|
||||
}
|
||||
|
||||
/// Get the type of executable we are compiling
|
||||
pub fn executable_type(&self) -> krates::cm::TargetKind {
|
||||
self.target.kind[0].clone()
|
||||
}
|
||||
|
||||
pub fn features_for_platform(&mut self, platform: Platform) -> Vec<String> {
|
||||
let package = self.package();
|
||||
// Try to find the feature that activates the dioxus feature for the given platform
|
||||
let dioxus_feature = platform.feature_name();
|
||||
let feature = package.features.iter().find_map(|(key, features)| {
|
||||
// Find a feature that starts with dioxus/ or dioxus?/
|
||||
for feature in features {
|
||||
if let Some((_, after_dioxus)) = feature.split_once("dioxus") {
|
||||
if let Some(dioxus_feature_enabled) =
|
||||
after_dioxus.trim_start_matches('?').strip_prefix('/')
|
||||
{
|
||||
// If that enables the feature we are looking for, return that feature
|
||||
if dioxus_feature_enabled == dioxus_feature {
|
||||
return Some(key.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
|
||||
feature.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Check if assets should be pre_compressed. This will only be true in release mode if the user has enabled pre_compress in the web config.
|
||||
pub fn should_pre_compress_web_assets(&self, release: bool) -> bool {
|
||||
self.dioxus_config.web.pre_compress && release
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Executable {
|
||||
pub name: String,
|
||||
pub ty: ExecutableType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum ExecutableType {
|
||||
Binary,
|
||||
Lib,
|
||||
Example,
|
||||
}
|
||||
|
||||
impl ExecutableType {
|
||||
/// Get the name of the executable if it is a binary or an example.
|
||||
pub fn executable(&self) -> bool {
|
||||
matches!(self, Self::Binary | Self::Example)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LoadDioxusConfigError {
|
||||
location: String,
|
||||
error: String,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LoadDioxusConfigError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{} {}", self.location, self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for LoadDioxusConfigError {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum CrateConfigError {
|
||||
Cargo(CargoError),
|
||||
Io(std::io::Error),
|
||||
Toml(toml::de::Error),
|
||||
LoadDioxusConfig(LoadDioxusConfigError),
|
||||
TargetNotFound(String),
|
||||
Krates(krates::Error),
|
||||
PackageNotFound(String),
|
||||
CurrentPackageNotFound,
|
||||
}
|
||||
|
||||
impl From<CargoError> for CrateConfigError {
|
||||
fn from(err: CargoError) -> Self {
|
||||
Self::Cargo(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for CrateConfigError {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
Self::Io(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<toml::de::Error> for CrateConfigError {
|
||||
fn from(err: toml::de::Error) -> Self {
|
||||
Self::Toml(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LoadDioxusConfigError> for CrateConfigError {
|
||||
fn from(err: LoadDioxusConfigError) -> Self {
|
||||
Self::LoadDioxusConfig(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<krates::Error> for CrateConfigError {
|
||||
fn from(err: krates::Error) -> Self {
|
||||
Self::Krates(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CrateConfigError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Cargo(err) => write!(f, "{}", err),
|
||||
Self::Io(err) => write!(f, "{}", err),
|
||||
Self::Toml(err) => write!(f, "{}", err),
|
||||
Self::LoadDioxusConfig(err) => write!(f, "{}", err),
|
||||
Self::TargetNotFound(target) => {
|
||||
write!(f, "Failed to find target with name: {}", target)
|
||||
}
|
||||
Self::Krates(err) => write!(f, "{}", err),
|
||||
Self::PackageNotFound(package) => write!(f, "Package not found: {}", package),
|
||||
Self::CurrentPackageNotFound => write!(f, "Failed to find current package"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for CrateConfigError {}
|
||||
|
|
|
@ -2,10 +2,10 @@ use thiserror::Error as ThisError;
|
|||
|
||||
use crate::{metadata::CargoError, CrateConfigError, LoadDioxusConfigError};
|
||||
|
||||
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
pub type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
#[derive(ThisError, Debug)]
|
||||
pub(crate) enum Error {
|
||||
pub enum Error {
|
||||
/// Used when errors need to propagate but are too unique to be typed
|
||||
#[error("{0}")]
|
||||
Unique(String),
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
//! Methods for working with the filesystem that are faster than the std fs methods
|
||||
//! Uses stuff like rayon, caching, and other optimizations
|
||||
//!
|
||||
//! Allows configuration in case you want to do some work while copying and allows you to track progress
|
||||
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use brotli::enc::BrotliEncoderParams;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub fn copy_asset(src: &Path, dest: &Path) -> std::io::Result<()> {
|
||||
if src.is_dir() {
|
||||
copy_dir_to(src, dest, false)?;
|
||||
} else {
|
||||
std::fs::copy(src, dest)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn copy_dir_to(
|
||||
src_dir: &Path,
|
||||
dest_dir: &Path,
|
||||
pre_compress: bool,
|
||||
) -> std::io::Result<()> {
|
||||
let entries = std::fs::read_dir(&src_dir)?;
|
||||
let mut children: Vec<std::thread::JoinHandle<std::io::Result<()>>> = Vec::new();
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let entry_path = entry.path();
|
||||
let path_relative_to_src = entry_path.strip_prefix(&src_dir).unwrap();
|
||||
let output_file_location = dest_dir.join(path_relative_to_src);
|
||||
children.push(std::thread::spawn(move || {
|
||||
if entry.file_type()?.is_dir() {
|
||||
// If the file is a directory, recursively copy it into the output directory
|
||||
if let Err(err) = copy_dir_to(&entry_path, &output_file_location, pre_compress) {
|
||||
tracing::error!(
|
||||
"Failed to pre-compress directory {}: {}",
|
||||
entry_path.display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Make sure the directory exists
|
||||
std::fs::create_dir_all(output_file_location.parent().unwrap())?;
|
||||
// Copy the file to the output directory
|
||||
std::fs::copy(&entry_path, &output_file_location)?;
|
||||
|
||||
// Then pre-compress the file if needed
|
||||
if pre_compress {
|
||||
if let Err(err) = pre_compress_file(&output_file_location) {
|
||||
tracing::error!(
|
||||
"Failed to pre-compress static assets {}: {}",
|
||||
output_file_location.display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
// If pre-compression isn't enabled, we should remove the old compressed file if it exists
|
||||
} else if let Some(compressed_path) = compressed_path(&output_file_location) {
|
||||
_ = std::fs::remove_file(compressed_path);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
for child in children {
|
||||
child.join().unwrap()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the path to the compressed version of a file
|
||||
fn compressed_path(path: &Path) -> Option<PathBuf> {
|
||||
let new_extension = match path.extension() {
|
||||
Some(ext) => {
|
||||
if ext.to_string_lossy().to_lowercase().ends_with("br") {
|
||||
return None;
|
||||
}
|
||||
let mut ext = ext.to_os_string();
|
||||
ext.push(".br");
|
||||
ext
|
||||
}
|
||||
None => OsString::from("br"),
|
||||
};
|
||||
|
||||
Some(path.with_extension(new_extension))
|
||||
}
|
||||
|
||||
/// pre-compress a file with brotli
|
||||
pub(crate) fn pre_compress_file(path: &Path) -> std::io::Result<()> {
|
||||
let Some(compressed_path) = compressed_path(path) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let file = std::fs::File::open(path)?;
|
||||
let mut stream = std::io::BufReader::new(file);
|
||||
let mut buffer = std::fs::File::create(compressed_path)?;
|
||||
let params = BrotliEncoderParams::default();
|
||||
brotli::BrotliCompress(&mut stream, &mut buffer, ¶ms)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// pre-compress all files in a folder
|
||||
pub(crate) fn pre_compress_folder(path: &Path, pre_compress: bool) -> std::io::Result<()> {
|
||||
let walk_dir = WalkDir::new(path);
|
||||
for entry in walk_dir.into_iter().filter_map(|e| e.ok()) {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.is_file() {
|
||||
if pre_compress {
|
||||
if let Err(err) = pre_compress_file(entry_path) {
|
||||
tracing::error!("Failed to pre-compress file {entry_path:?}: {err}");
|
||||
}
|
||||
}
|
||||
// If pre-compression isn't enabled, we should remove the old compressed file if it exists
|
||||
else if let Some(compressed_path) = compressed_path(entry_path) {
|
||||
_ = std::fs::remove_file(compressed_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
|
@ -2,23 +2,17 @@
|
|||
#![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")]
|
||||
#![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
|
||||
|
||||
pub(crate) mod assets;
|
||||
pub(crate) mod build_info;
|
||||
pub(crate) mod builder;
|
||||
pub(crate) mod bundle_utils;
|
||||
pub(crate) mod bundler;
|
||||
pub(crate) mod cli;
|
||||
pub(crate) mod config;
|
||||
pub(crate) mod dioxus_crate;
|
||||
pub(crate) mod error;
|
||||
pub(crate) mod fastfs;
|
||||
pub(crate) mod metadata;
|
||||
pub(crate) mod serve;
|
||||
pub(crate) mod settings;
|
||||
pub(crate) mod tooling;
|
||||
pub(crate) mod tracer;
|
||||
pub mod assets;
|
||||
pub mod builder;
|
||||
pub mod cli;
|
||||
pub mod dioxus_crate;
|
||||
pub mod dx_build_info;
|
||||
pub mod error;
|
||||
pub mod metadata;
|
||||
pub mod serve;
|
||||
pub mod settings;
|
||||
pub mod tracer;
|
||||
|
||||
pub(crate) use builder::Platform;
|
||||
pub(crate) use cli::*;
|
||||
pub(crate) use dioxus_crate::*;
|
||||
pub(crate) use error::*;
|
||||
|
@ -27,45 +21,67 @@ pub(crate) use tracer::{TraceMsg, TraceSrc};
|
|||
|
||||
use anyhow::Context;
|
||||
use clap::Parser;
|
||||
|
||||
use Commands::*;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
// If we have a magic env var set, we want to operate as a linker instead.
|
||||
if link::should_link() {
|
||||
return link::dump_link_args();
|
||||
}
|
||||
let args = Cli::parse();
|
||||
|
||||
// Start the tracer so it captures logs from the build engine before we start the builder
|
||||
crate::serve::TraceController::initialize();
|
||||
let log_control = tracer::build_tracing();
|
||||
|
||||
match Cli::parse().action {
|
||||
match args.action {
|
||||
Translate(opts) => opts
|
||||
.translate()
|
||||
.context("⛔️ Translation of HTML into RSX failed:"),
|
||||
.context(error_wrapper("Translation of HTML into RSX failed")),
|
||||
|
||||
New(opts) => opts.create().context("🚫 Creating new project failed:"),
|
||||
New(opts) => opts
|
||||
.create()
|
||||
.context(error_wrapper("Creating new project failed")),
|
||||
|
||||
Init(opts) => opts.init().context("🚫 Initializing a new project failed:"),
|
||||
Init(opts) => opts
|
||||
.init()
|
||||
.context(error_wrapper("Initializing a new project failed")),
|
||||
|
||||
Config(opts) => opts.config().context("🚫 Configuring new project failed:"),
|
||||
Config(opts) => opts
|
||||
.config()
|
||||
.context(error_wrapper("Configuring new project failed")),
|
||||
|
||||
Autoformat(opts) => opts.autoformat().context("🚫 Error autoformatting RSX:"),
|
||||
Autoformat(opts) => opts
|
||||
.autoformat()
|
||||
.context(error_wrapper("Error autoformatting RSX")),
|
||||
|
||||
Check(opts) => opts.check().await.context("🚫 Error checking RSX:"),
|
||||
Check(opts) => opts
|
||||
.check()
|
||||
.await
|
||||
.context(error_wrapper("Error checking RSX")),
|
||||
|
||||
Clean(opts) => opts.clean().context("🚫 Cleaning project failed:"),
|
||||
Link(opts) => opts
|
||||
.link()
|
||||
.context(error_wrapper("Error with linker passthrough")),
|
||||
|
||||
Build(opts) => opts.run().await.context("🚫 Building project failed:"),
|
||||
Build(mut opts) => opts
|
||||
.run()
|
||||
.await
|
||||
.context(error_wrapper("Building project failed")),
|
||||
|
||||
Serve(opts) => opts.serve().await.context("🚫 Serving project failed:"),
|
||||
Clean(opts) => opts
|
||||
.clean()
|
||||
.context(error_wrapper("Cleaning project failed")),
|
||||
|
||||
Bundle(opts) => opts.bundle().await.context("🚫 Bundling project failed:"),
|
||||
Serve(opts) => opts
|
||||
.serve(log_control)
|
||||
.await
|
||||
.context(error_wrapper("Serving project failed")),
|
||||
|
||||
Run(opts) => opts.run().await.context("🚫 Running project failed:"),
|
||||
|
||||
HttpServer(opts) => opts.serve().await.context("🚫 Serving project failed:"),
|
||||
|
||||
Doctor(opts) => opts.run().await.context("🚫 Checking project failed:"),
|
||||
Bundle(opts) => opts
|
||||
.bundle()
|
||||
.await
|
||||
.context(error_wrapper("Bundling project failed")),
|
||||
}
|
||||
}
|
||||
|
||||
/// Simplifies error messages that use the same pattern.
|
||||
fn error_wrapper(message: &str) -> String {
|
||||
format!("🚫 {message}:")
|
||||
}
|
||||
|
|
|
@ -8,12 +8,12 @@ use std::{
|
|||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CargoError {
|
||||
pub struct CargoError {
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl CargoError {
|
||||
pub(crate) fn new(msg: String) -> Self {
|
||||
pub fn new(msg: String) -> Self {
|
||||
Self { msg }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1,189 @@
|
|||
use crate::builder::BuildRequest;
|
||||
use crate::builder::BuildResult;
|
||||
use crate::builder::TargetPlatform;
|
||||
use crate::builder::UpdateBuildProgress;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use crate::serve::next_or_pending;
|
||||
use crate::serve::Serve;
|
||||
use crate::Result;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use futures_util::future::OptionFuture;
|
||||
use futures_util::stream::select_all;
|
||||
use futures_util::StreamExt;
|
||||
use std::process::Stdio;
|
||||
use tokio::{
|
||||
process::{Child, Command},
|
||||
task::JoinHandle,
|
||||
};
|
||||
|
||||
/// A handle to ongoing builds and then the spawned tasks themselves
|
||||
pub struct Builder {
|
||||
/// The results of the build
|
||||
build_results: Option<JoinHandle<Result<Vec<BuildResult>>>>,
|
||||
|
||||
/// The progress of the builds
|
||||
build_progress: Vec<(TargetPlatform, UnboundedReceiver<UpdateBuildProgress>)>,
|
||||
|
||||
/// The application we are building
|
||||
config: DioxusCrate,
|
||||
|
||||
/// The arguments for the build
|
||||
serve: Serve,
|
||||
|
||||
/// The children of the build process
|
||||
pub children: Vec<(TargetPlatform, Child)>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
/// Create a new builder
|
||||
pub fn new(config: &DioxusCrate, serve: &Serve) -> Self {
|
||||
let serve = serve.clone();
|
||||
let config = config.clone();
|
||||
Self {
|
||||
build_results: None,
|
||||
build_progress: Vec::new(),
|
||||
config: config.clone(),
|
||||
serve,
|
||||
children: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start a new build - killing the current one if it exists
|
||||
pub fn build(&mut self) -> Result<()> {
|
||||
self.shutdown();
|
||||
let build_requests =
|
||||
BuildRequest::create(true, &self.config, self.serve.build_arguments.clone())?;
|
||||
|
||||
let mut set = tokio::task::JoinSet::new();
|
||||
|
||||
for build_request in build_requests {
|
||||
let (mut tx, rx) = futures_channel::mpsc::unbounded();
|
||||
self.build_progress
|
||||
.push((build_request.target_platform, rx));
|
||||
set.spawn(async move {
|
||||
let res = build_request.build(tx.clone()).await;
|
||||
|
||||
if let Err(err) = &res {
|
||||
let _ = tx.start_send(UpdateBuildProgress {
|
||||
stage: crate::builder::Stage::Finished,
|
||||
update: crate::builder::UpdateStage::Failed(format!("{err}")),
|
||||
});
|
||||
}
|
||||
|
||||
res
|
||||
});
|
||||
}
|
||||
|
||||
self.build_results = Some(tokio::spawn(async move {
|
||||
let mut all_results = Vec::new();
|
||||
while let Some(result) = set.join_next().await {
|
||||
let res = result.map_err(|err| {
|
||||
crate::Error::Unique(format!("Panic while building project: {err:?}"))
|
||||
})??;
|
||||
|
||||
all_results.push(res);
|
||||
}
|
||||
Ok(all_results)
|
||||
}));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wait for any new updates to the builder - either it completed or gave us a message etc
|
||||
pub async fn wait(&mut self) -> Result<BuilderUpdate> {
|
||||
// Wait for build progress
|
||||
let mut next = select_all(
|
||||
self.build_progress
|
||||
.iter_mut()
|
||||
.map(|(platform, rx)| rx.map(move |update| (*platform, update))),
|
||||
);
|
||||
let next = next_or_pending(next.next());
|
||||
|
||||
// The ongoing builds directly
|
||||
let results: OptionFuture<_> = self.build_results.as_mut().into();
|
||||
let results = next_or_pending(results);
|
||||
|
||||
// The process exits
|
||||
let children_empty = self.children.is_empty();
|
||||
let process_exited = self
|
||||
.children
|
||||
.iter_mut()
|
||||
.map(|(target, child)| Box::pin(async move { (*target, child.wait().await) }));
|
||||
let process_exited = async move {
|
||||
if children_empty {
|
||||
return futures_util::future::pending().await;
|
||||
}
|
||||
futures_util::future::select_all(process_exited).await
|
||||
};
|
||||
|
||||
// Wait for the next build result
|
||||
tokio::select! {
|
||||
build_results = results => {
|
||||
self.build_results = None;
|
||||
|
||||
// If we have a build result, bubble it up to the main loop
|
||||
let build_results = build_results.map_err(|_| crate::Error::Unique("Build join failed".to_string()))??;
|
||||
|
||||
Ok(BuilderUpdate::Ready { results: build_results })
|
||||
}
|
||||
(platform, update) = next => {
|
||||
// If we have a build progress, send it to the screen
|
||||
Ok(BuilderUpdate::Progress { platform, update })
|
||||
}
|
||||
((target, exit_status), _, _) = process_exited => {
|
||||
Ok(BuilderUpdate::ProcessExited { status: exit_status, target_platform: target })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Shutdown the current build process
|
||||
pub(crate) fn shutdown(&mut self) {
|
||||
for (_, mut child) in self.children.drain(..) {
|
||||
// Gracefully shtudown the desktop app
|
||||
// It might have a receiver to do some cleanup stuff
|
||||
if let Some(pid) = child.id() {
|
||||
// on unix, we can send a signal to the process to shut down
|
||||
#[cfg(unix)]
|
||||
{
|
||||
_ = Command::new("kill")
|
||||
.args(["-s", "TERM", &pid.to_string()])
|
||||
.stderr(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.spawn();
|
||||
}
|
||||
|
||||
// on windows, use the `taskkill` command
|
||||
#[cfg(windows)]
|
||||
{
|
||||
_ = Command::new("taskkill")
|
||||
.args(["/F", "/PID", &pid.to_string()])
|
||||
.stderr(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.spawn();
|
||||
}
|
||||
}
|
||||
|
||||
// Todo: add a timeout here to kill the process if it doesn't shut down within a reasonable time
|
||||
_ = child.start_kill();
|
||||
}
|
||||
|
||||
if let Some(tasks) = self.build_results.take() {
|
||||
tasks.abort();
|
||||
}
|
||||
self.build_progress.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub enum BuilderUpdate {
|
||||
Progress {
|
||||
platform: TargetPlatform,
|
||||
update: UpdateBuildProgress,
|
||||
},
|
||||
Ready {
|
||||
results: Vec<BuildResult>,
|
||||
},
|
||||
ProcessExited {
|
||||
target_platform: TargetPlatform,
|
||||
status: Result<std::process::ExitStatus, std::io::Error>,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
/// Scrollable console widget
|
||||
pub struct ConsoleWidget {}
|
|
@ -1,32 +0,0 @@
|
|||
/// Detects if `dx` is being ran in a WSL environment.
|
||||
///
|
||||
/// We determine this based on whether the keyword `microsoft` or `wsl` is contained within the [`WSL_1`] or [`WSL_2`] files.
|
||||
/// This may fail in the future as it isn't guaranteed by Microsoft.
|
||||
/// See https://github.com/microsoft/WSL/issues/423#issuecomment-221627364
|
||||
pub(crate) fn is_wsl() -> bool {
|
||||
const WSL_1: &str = "/proc/sys/kernel/osrelease";
|
||||
const WSL_2: &str = "/proc/version";
|
||||
const WSL_KEYWORDS: [&str; 2] = ["microsoft", "wsl"];
|
||||
|
||||
// Test 1st File
|
||||
if let Ok(content) = std::fs::read_to_string(WSL_1) {
|
||||
let lowercase = content.to_lowercase();
|
||||
for keyword in WSL_KEYWORDS {
|
||||
if lowercase.contains(keyword) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test 2nd File
|
||||
if let Ok(content) = std::fs::read_to_string(WSL_2) {
|
||||
let lowercase = content.to_lowercase();
|
||||
for keyword in WSL_KEYWORDS {
|
||||
if lowercase.contains(keyword) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
|
@ -1,189 +0,0 @@
|
|||
use crate::{builder::Platform, bundler::AppBundle};
|
||||
use crate::{Result, TraceSrc};
|
||||
use std::{net::SocketAddr, path::PathBuf, process::Stdio};
|
||||
use tokio::{
|
||||
io::AsyncBufReadExt,
|
||||
process::{Child, Command},
|
||||
};
|
||||
use tokio::{
|
||||
io::{BufReader, Lines},
|
||||
process::{ChildStderr, ChildStdout},
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
/// A handle to a running app
|
||||
pub(crate) struct AppHandle {
|
||||
pub(crate) _id: Uuid,
|
||||
pub(crate) app: AppBundle,
|
||||
pub(crate) executable: PathBuf,
|
||||
pub(crate) child: Option<Child>,
|
||||
pub(crate) stdout: Option<Lines<BufReader<ChildStdout>>>,
|
||||
pub(crate) stderr: Option<Lines<BufReader<ChildStderr>>>,
|
||||
}
|
||||
|
||||
impl AppHandle {
|
||||
pub async fn start(
|
||||
app: AppBundle,
|
||||
devserver_ip: SocketAddr,
|
||||
fullstack_address: Option<SocketAddr>,
|
||||
) -> Result<Self> {
|
||||
let platform = app.build.platform();
|
||||
let ip = devserver_ip.to_string();
|
||||
|
||||
if platform == Platform::Server || app.build.build.fullstack {
|
||||
tracing::info!(
|
||||
"Proxying fullstack server from port {:?}",
|
||||
fullstack_address
|
||||
);
|
||||
}
|
||||
|
||||
// let work_dir = std::env::temp_dir();
|
||||
let work_dir = app.build.krate.out_dir().join("launch");
|
||||
std::fs::create_dir_all(&work_dir)?;
|
||||
let executable = app.finish(work_dir).await?;
|
||||
|
||||
let mut handle = AppHandle {
|
||||
app,
|
||||
executable,
|
||||
_id: Uuid::new_v4(),
|
||||
child: None,
|
||||
stderr: None,
|
||||
stdout: None,
|
||||
};
|
||||
|
||||
match platform {
|
||||
Platform::Web => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Serving web app on http://{} 🎉", ip);
|
||||
}
|
||||
Platform::Desktop => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Launching desktop app at {} 🎉", handle.executable.display());
|
||||
}
|
||||
Platform::Server => {
|
||||
if let Some(fullstack_address) = fullstack_address {
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Dev,
|
||||
"Launching fullstack server on http://{:?} 🎉",
|
||||
fullstack_address
|
||||
);
|
||||
}
|
||||
}
|
||||
Platform::Ios => {}
|
||||
Platform::Android => {}
|
||||
Platform::Liveview => {
|
||||
if let Some(fullstack_address) = fullstack_address {
|
||||
tracing::info!(
|
||||
dx_src = ?TraceSrc::Dev,
|
||||
"Launching liveview server on http://{:?} 🎉",
|
||||
fullstack_address
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// open the exe with some arguments/envvars/etc
|
||||
// we're going to try and configure this binary from the environment, if we can
|
||||
//
|
||||
// web can't be configured like this, so instead, we'll need to plumb a meta tag into the
|
||||
// index.html during dev
|
||||
match handle.app.build.platform() {
|
||||
Platform::Desktop | Platform::Server | Platform::Liveview => {
|
||||
let mut cmd = Command::new(handle.executable.clone());
|
||||
cmd.env(
|
||||
dioxus_runtime_config::FULLSTACK_ADDRESS_ENV,
|
||||
fullstack_address
|
||||
.as_ref()
|
||||
.map(|addr| addr.to_string())
|
||||
.unwrap_or_else(|| "127.0.0.1:8080".to_string()),
|
||||
)
|
||||
.env(
|
||||
dioxus_runtime_config::IOS_DEVSERVER_ADDR_ENV,
|
||||
format!("ws://{}/_dioxus", ip),
|
||||
)
|
||||
.env(
|
||||
dioxus_runtime_config::DEVSERVER_RAW_ADDR_ENV,
|
||||
format!("ws://{}/_dioxus", ip),
|
||||
)
|
||||
.env("CARGO_MANIFEST_DIR", handle.app.build.krate.crate_dir())
|
||||
.env(
|
||||
"SIMCTL_CHILD_CARGO_MANIFEST_DIR",
|
||||
handle.app.build.krate.crate_dir(),
|
||||
)
|
||||
.stderr(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.kill_on_drop(true);
|
||||
|
||||
let mut child = cmd.spawn()?;
|
||||
let stdout = BufReader::new(child.stdout.take().unwrap());
|
||||
let stderr = BufReader::new(child.stderr.take().unwrap());
|
||||
handle.stdout = Some(stdout.lines());
|
||||
handle.stderr = Some(stderr.lines());
|
||||
handle.child = Some(child);
|
||||
}
|
||||
Platform::Web => {}
|
||||
Platform::Ios => {}
|
||||
Platform::Android => {}
|
||||
}
|
||||
|
||||
Ok(handle)
|
||||
}
|
||||
/// Update an asset in the running apps
|
||||
///
|
||||
/// Might need to upload the asset to the simulator or overwrite it within the bundle
|
||||
///
|
||||
/// Returns the name of the asset in the bundle if it exists
|
||||
pub(crate) fn hotreload_asset(&self, path: &PathBuf) -> Option<PathBuf> {
|
||||
let resource = self.app.assets.assets.get(path).cloned()?;
|
||||
|
||||
_ = self
|
||||
.app
|
||||
.assets
|
||||
.copy_asset_to(&self.app.asset_dir(), path, false, false);
|
||||
|
||||
Some(resource.bundled.into())
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn open_bundled_ios_app(&self, build: &AppBundle) -> std::io::Result<Option<Child>> {
|
||||
// command = "xcrun"
|
||||
// args = [
|
||||
// "simctl",
|
||||
// "install",
|
||||
// "booted",
|
||||
// "target/aarch64-apple-ios-sim/debug/bundle/ios/DioxusApp.app",
|
||||
// ]
|
||||
|
||||
// [tasks.run_ios_sim]
|
||||
// args = ["simctl", "launch", "--console", "booted", "com.dioxuslabs"]
|
||||
// command = "xcrun"
|
||||
// dependencies = ["build_ios_sim", "install_ios_sim"]
|
||||
|
||||
// [tasks.serve-sim]
|
||||
// dependencies = ["build_ios_sim", "install_ios_sim", "run_ios_sim"]
|
||||
|
||||
// APP_PATH="target/aarch64-apple-ios/debug/bundle/ios/DioxusApp.app"
|
||||
|
||||
// # get the device id by jq-ing the json of the device list
|
||||
// xcrun devicectl list devices --json-output target/deviceid.json
|
||||
// DEVICE_UUID=$(jq -r '.result.devices[0].identifier' target/deviceid.json)
|
||||
|
||||
// xcrun devicectl device install app --device "${DEVICE_UUID}" "${APP_PATH}" --json-output target/xcrun.json
|
||||
|
||||
// # get the installation url by jq-ing the json of the device install
|
||||
// INSTALLATION_URL=$(jq -r '.result.installedApplications[0].installationURL' target/xcrun.json)
|
||||
|
||||
// # launch the app
|
||||
// # todo: we can just background it immediately and then pick it up for loading its logs
|
||||
// xcrun devicectl device process launch --device "${DEVICE_UUID}" "${INSTALLATION_URL}"
|
||||
|
||||
// # # launch the app and put it in background
|
||||
// # xcrun devicectl device process launch --no-activate --verbose --device "${DEVICE_UUID}" "${INSTALLATION_URL}" --json-output "${XCRUN_DEVICE_PROCESS_LAUNCH_LOG_DIR}"
|
||||
|
||||
// # # Extract background PID of status app
|
||||
// # STATUS_PID=$(jq -r '.result.process.processIdentifier' "${XCRUN_DEVICE_PROCESS_LAUNCH_LOG_DIR}")
|
||||
// # "${GIT_ROOT}/scripts/wait-for-metro-port.sh" 2>&1
|
||||
|
||||
// # # now that metro is ready, resume the app from background
|
||||
// # xcrun devicectl device process resume --device "${DEVICE_UUID}" --pid "${STATUS_PID}" > "${XCRUN_DEVICE_PROCESS_RESUME_LOG_DIR}" 2>&1
|
||||
todo!("Open mobile apps")
|
||||
}
|
||||
}
|
|
@ -1,37 +1,37 @@
|
|||
use dioxus_core::internal::{HotReloadTemplateWithLocation, HotReloadedTemplate};
|
||||
use dioxus_core_types::HotReloadingContext;
|
||||
use dioxus_rsx::CallBody;
|
||||
use dioxus_rsx_hotreload::{diff_rsx, ChangedRsx, HotReloadResult};
|
||||
use dioxus_rsx_hotreload::{diff_rsx, ChangedRsx};
|
||||
use krates::cm::MetadataCommand;
|
||||
use krates::Cmd;
|
||||
pub(crate) use std::collections::HashMap;
|
||||
pub use std::collections::HashMap;
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
pub(crate) use std::{fs, io, path::Path};
|
||||
pub(crate) use std::{fs::File, io::Read};
|
||||
pub use std::{fs, io, path::Path};
|
||||
pub use std::{fs::File, io::Read};
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
pub(crate) struct FileMap {
|
||||
pub(crate) map: HashMap<PathBuf, CachedSynFile>,
|
||||
pub struct FileMap {
|
||||
pub map: HashMap<PathBuf, CachedSynFile>,
|
||||
|
||||
/// Any errors that occurred while building the FileMap that were not fatal
|
||||
pub(crate) errors: Vec<io::Error>,
|
||||
pub errors: Vec<io::Error>,
|
||||
|
||||
pub(crate) in_workspace: HashMap<PathBuf, Option<PathBuf>>,
|
||||
pub in_workspace: HashMap<PathBuf, Option<PathBuf>>,
|
||||
}
|
||||
|
||||
/// A cached file that has been parsed
|
||||
///
|
||||
/// We store the templates found in this file
|
||||
pub(crate) struct CachedSynFile {
|
||||
pub(crate) raw: String,
|
||||
pub(crate) templates: HashMap<String, HotReloadedTemplate>,
|
||||
pub struct CachedSynFile {
|
||||
pub raw: String,
|
||||
pub templates: HashMap<String, HotReloadedTemplate>,
|
||||
}
|
||||
|
||||
impl FileMap {
|
||||
/// Create a new FileMap from a crate directory
|
||||
///
|
||||
/// TODO: this should be created with a gitignore filter
|
||||
pub(crate) fn create<Ctx: HotReloadingContext>(path: PathBuf) -> io::Result<FileMap> {
|
||||
pub fn create<Ctx: HotReloadingContext>(path: PathBuf) -> io::Result<FileMap> {
|
||||
Self::create_with_filter::<Ctx>(path, |p| {
|
||||
// skip some stuff we know is large by default
|
||||
p.file_name() == Some(OsStr::new("target"))
|
||||
|
@ -43,7 +43,7 @@ impl FileMap {
|
|||
///
|
||||
/// Takes a filter that when returns true, the file will be filtered out (ie not tracked)
|
||||
/// Note that this is inverted from a typical .filter() method.
|
||||
pub(crate) fn create_with_filter<Ctx: HotReloadingContext>(
|
||||
pub fn create_with_filter<Ctx: HotReloadingContext>(
|
||||
crate_dir: PathBuf,
|
||||
mut filter: impl FnMut(&Path) -> bool,
|
||||
) -> io::Result<FileMap> {
|
||||
|
@ -63,7 +63,7 @@ impl FileMap {
|
|||
/// Start watching assets for changes
|
||||
///
|
||||
/// This just diffs every file against itself and populates the tracked assets as it goes
|
||||
pub(crate) fn load_assets<Ctx: HotReloadingContext>(&mut self, crate_dir: &Path) {
|
||||
pub fn load_assets<Ctx: HotReloadingContext>(&mut self, crate_dir: &Path) {
|
||||
let keys = self.map.keys().cloned().collect::<Vec<_>>();
|
||||
for file in keys {
|
||||
_ = self.update_rsx::<Ctx>(file.as_path(), crate_dir);
|
||||
|
@ -82,7 +82,7 @@ impl FileMap {
|
|||
}
|
||||
|
||||
/// Try to update the rsx in a file
|
||||
pub(crate) fn update_rsx<Ctx: HotReloadingContext>(
|
||||
pub fn update_rsx<Ctx: HotReloadingContext>(
|
||||
&mut self,
|
||||
file_path: &Path,
|
||||
crate_dir: &Path,
|
||||
|
@ -154,7 +154,7 @@ impl FileMap {
|
|||
let template_location = template_location(old_start, file);
|
||||
|
||||
// Returns a list of templates that are hotreloadable
|
||||
let hotreload_result = HotReloadResult::new::<Ctx>(
|
||||
let hotreload_result = dioxus_rsx_hotreload::HotReloadResult::new::<Ctx>(
|
||||
&old_call_body.body,
|
||||
&new_call_body.body,
|
||||
template_location.clone(),
|
||||
|
@ -219,7 +219,7 @@ impl FileMap {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn template_location(old_start: proc_macro2::LineColumn, file: &Path) -> String {
|
||||
pub fn template_location(old_start: proc_macro2::LineColumn, file: &Path) -> String {
|
||||
let line = old_start.line;
|
||||
let column = old_start.column + 1;
|
||||
|
||||
|
@ -233,7 +233,7 @@ pub(crate) fn template_location(old_start: proc_macro2::LineColumn, file: &Path)
|
|||
path + ":" + line.to_string().as_str() + ":" + column.to_string().as_str()
|
||||
}
|
||||
|
||||
pub(crate) fn format_template_name(name: &str, index: usize) -> String {
|
||||
pub fn format_template_name(name: &str, index: usize) -> String {
|
||||
format!("{}:{}", name, index)
|
||||
}
|
||||
|
||||
|
@ -290,7 +290,7 @@ fn find_rs_files(root: PathBuf, filter: &mut impl FnMut(&Path) -> bool) -> FileM
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum HotreloadError {
|
||||
pub enum HotreloadError {
|
||||
Failure(io::Error),
|
||||
Parse,
|
||||
Notreloadable,
|
||||
|
|
|
@ -1,242 +1,243 @@
|
|||
use crate::builder::{BuildUpdate, BuildUpdateProgress, Builder, Platform, Stage, UpdateStage};
|
||||
use crate::cli::serve::ServeArgs;
|
||||
use crate::DioxusCrate;
|
||||
use crate::Result;
|
||||
use crate::TraceSrc;
|
||||
use std::ops::ControlFlow;
|
||||
use std::future::{poll_fn, Future, IntoFuture};
|
||||
use std::task::Poll;
|
||||
|
||||
mod console_widget;
|
||||
mod detect;
|
||||
mod handle;
|
||||
use crate::cli::serve::Serve;
|
||||
use crate::dioxus_crate::DioxusCrate;
|
||||
use crate::tracer::CLILogControl;
|
||||
use crate::Result;
|
||||
use crate::{
|
||||
builder::{Stage, TargetPlatform, UpdateBuildProgress, UpdateStage},
|
||||
TraceSrc,
|
||||
};
|
||||
use futures_util::FutureExt;
|
||||
use tokio::task::yield_now;
|
||||
|
||||
mod builder;
|
||||
mod hot_reloading_file_map;
|
||||
mod logs_tab;
|
||||
mod output;
|
||||
mod proxy;
|
||||
mod runner;
|
||||
mod server;
|
||||
mod tracer;
|
||||
mod update;
|
||||
mod watcher;
|
||||
|
||||
pub(crate) use handle::*;
|
||||
pub(crate) use output::*;
|
||||
pub(crate) use runner::*;
|
||||
pub(crate) use server::*;
|
||||
pub(crate) use tracer::*;
|
||||
pub(crate) use update::*;
|
||||
pub(crate) use watcher::*;
|
||||
use builder::*;
|
||||
use output::*;
|
||||
use server::*;
|
||||
use watcher::*;
|
||||
|
||||
/// For *all* builds, the CLI spins up a dedicated webserver, file watcher, and build infrastructure to serve the project.
|
||||
/// For *all* builds the CLI spins up a dedicated webserver, file watcher, and build infrastructure to serve the project.
|
||||
///
|
||||
/// This includes web, desktop, mobile, fullstack, etc.
|
||||
///
|
||||
/// Platform specifics:
|
||||
/// -------------------
|
||||
/// - Web: we need to attach a filesystem server to our devtools webserver to serve the project. We
|
||||
/// want to emulate GithubPages here since most folks are deploying there and expect things like
|
||||
/// basepath to match.
|
||||
/// - Desktop: We spin up the dev server but without a filesystem server.
|
||||
/// - Mobile: Basically the same as desktop.
|
||||
/// - Web: we need to attach a filesystem server to our devtools webserver to serve the project. We
|
||||
/// want to emulate GithubPages here since most folks are deploying there and expect things like
|
||||
/// basepath to match.
|
||||
/// - Fullstack: We spin up the same dev server but in this case the fullstack server itself needs to
|
||||
/// proxy all dev requests to our dev server
|
||||
/// - Desktop: We spin up the dev server but without a filesystem server.
|
||||
/// - Mobile: Basically the same as desktop.
|
||||
///
|
||||
/// When fullstack is enabled, we'll also build for the `server` target and then hotreload the server.
|
||||
/// The "server" is special here since "fullstack" is functionaly just an addition to the regular client
|
||||
/// setup.
|
||||
/// Notes:
|
||||
/// - All filesystem changes are tracked here
|
||||
/// - We send all updates to connected websocket connections. Even desktop connects via the websocket
|
||||
/// - Right now desktop compiles tokio-tungstenite to do the connection but we could in theory reuse
|
||||
/// the websocket logic from the webview for thinner builds.
|
||||
///
|
||||
/// Todos(Jon):
|
||||
/// - I'd love to be able to configure the CLI while it's running so we can change settings on the fly.
|
||||
/// - I'd love to be able to configure the CLI while it's running so we can change settingaon the fly.
|
||||
/// This would require some light refactoring and potentially pulling in something like ratatui.
|
||||
/// - Build a custom subscriber for logs by tools within this
|
||||
/// - Handle logs from the build engine separately?
|
||||
/// - Consume logs from the wasm for web/fullstack
|
||||
/// - I want us to be able to detect a `server_fn` in the project and then upgrade from a static server
|
||||
/// to a dynamic one on the fly.
|
||||
pub(crate) async fn serve_all(args: ServeArgs, krate: DioxusCrate) -> Result<()> {
|
||||
let mut tracer = tracer::TraceController::start();
|
||||
pub async fn serve_all(
|
||||
serve: Serve,
|
||||
dioxus_crate: DioxusCrate,
|
||||
log_control: CLILogControl,
|
||||
) -> Result<()> {
|
||||
// Start the screen first so we collect build logs.
|
||||
let mut screen = Output::start(&serve, log_control).expect("Failed to open terminal logger");
|
||||
let mut builder = Builder::new(&dioxus_crate, &serve);
|
||||
|
||||
// Note that starting the builder will queue up a build immediately
|
||||
let mut builder = Builder::start(&krate, args.build_args())?;
|
||||
let mut screen = Output::start(&args).expect("Failed to open terminal logger");
|
||||
let mut devserver = DevServer::start(&args, &krate);
|
||||
let mut watcher = Watcher::start(&args, &krate);
|
||||
let mut runner = AppRunner::start();
|
||||
// Start the first build
|
||||
builder.build()?;
|
||||
|
||||
let mut server = Server::start(&serve, &dioxus_crate);
|
||||
let mut watcher = Watcher::start(&serve, &dioxus_crate);
|
||||
|
||||
let is_hot_reload = serve.server_arguments.hot_reload.unwrap_or(true);
|
||||
|
||||
loop {
|
||||
// Make sure we don't hog the CPU: these loop { select! {} } blocks can starve the executor
|
||||
yield_now().await;
|
||||
|
||||
// Draw the state of the server to the screen
|
||||
screen.render(&args, &krate, &builder, &devserver, &watcher);
|
||||
screen.render(&serve, &dioxus_crate, &builder, &server, &watcher);
|
||||
|
||||
// And then wait for any updates before redrawing
|
||||
let msg = tokio::select! {
|
||||
msg = builder.wait() => ServeUpdate::BuildUpdate(msg),
|
||||
msg = watcher.wait() => msg,
|
||||
msg = devserver.wait() => msg,
|
||||
msg = screen.wait() => msg,
|
||||
msg = runner.wait() => msg,
|
||||
msg = tracer.wait() => msg,
|
||||
};
|
||||
tokio::select! {
|
||||
// rebuild the project or hotreload it
|
||||
_ = watcher.wait(), if is_hot_reload => {
|
||||
if !watcher.pending_changes() {
|
||||
continue
|
||||
}
|
||||
|
||||
let res = handle_update(
|
||||
msg,
|
||||
&args,
|
||||
&mut devserver,
|
||||
&mut screen,
|
||||
&mut builder,
|
||||
&mut runner,
|
||||
&mut watcher,
|
||||
);
|
||||
let changed_files = watcher.dequeue_changed_files(&dioxus_crate);
|
||||
let changed = changed_files.first().cloned();
|
||||
|
||||
match res.await {
|
||||
Ok(ControlFlow::Continue(())) => continue,
|
||||
Ok(ControlFlow::Break(())) => {}
|
||||
Err(e) => tracing::error!("Error in TUI: {}", e),
|
||||
// if change is hotreloadable, hotreload it
|
||||
// and then send that update to all connected clients
|
||||
if let Some(hr) = watcher.attempt_hot_reload(&dioxus_crate, changed_files) {
|
||||
// Only send a hotreload message for templates and assets - otherwise we'll just get a full rebuild
|
||||
if hr.templates.is_empty() && hr.assets.is_empty() && hr.unknown_files.is_empty() {
|
||||
continue
|
||||
}
|
||||
|
||||
if let Some(changed_path) = changed {
|
||||
let path_relative = changed_path.strip_prefix(dioxus_crate.crate_dir()).map(|p| p.display().to_string()).unwrap_or_else(|_| changed_path.display().to_string());
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Hotreloaded {}", path_relative);
|
||||
}
|
||||
|
||||
server.send_hotreload(hr).await;
|
||||
} else {
|
||||
// If the change is not binary patchable, rebuild the project
|
||||
// We're going to kick off a new build, interrupting the current build if it's ongoing
|
||||
builder.build()?;
|
||||
|
||||
// Clear the hot reload changes
|
||||
watcher.clear_hot_reload_changes();
|
||||
|
||||
// Tell the server to show a loading page for any new requests
|
||||
server.start_build().await;
|
||||
}
|
||||
}
|
||||
|
||||
// reload the page
|
||||
msg = server.wait() => {
|
||||
// Run the server in the background
|
||||
// Waiting for updates here lets us tap into when clients are added/removed
|
||||
match msg {
|
||||
Some(ServerUpdate::NewConnection) => {
|
||||
if let Some(msg) = watcher.applied_hot_reload_changes() {
|
||||
server.send_hotreload(msg).await;
|
||||
}
|
||||
}
|
||||
Some(ServerUpdate::Message(msg)) => {
|
||||
screen.new_ws_message(TargetPlatform::Web, msg);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle updates from the build engine
|
||||
application = builder.wait() => {
|
||||
// Wait for logs from the build engine
|
||||
// These will cause us to update the screen
|
||||
// We also can check the status of the builds here in case we have multiple ongoing builds
|
||||
match application {
|
||||
Ok(BuilderUpdate::Progress { platform, update }) => {
|
||||
let update_clone = update.clone();
|
||||
screen.new_build_progress(platform, update_clone);
|
||||
server.update_build_status(screen.build_progress.progress(), update.stage.to_string()).await;
|
||||
|
||||
match update {
|
||||
// Send rebuild start message.
|
||||
UpdateBuildProgress { stage: Stage::Compiling, update: UpdateStage::Start } => server.send_reload_start().await,
|
||||
// Send rebuild failed message.
|
||||
UpdateBuildProgress { stage: Stage::Finished, update: UpdateStage::Failed(_) } => server.send_reload_failed().await,
|
||||
_ => {},
|
||||
}
|
||||
}
|
||||
Ok(BuilderUpdate::Ready { results }) => {
|
||||
if !results.is_empty() {
|
||||
builder.children.clear();
|
||||
}
|
||||
|
||||
// If we have a build result, open it
|
||||
for build_result in results.iter() {
|
||||
let child = build_result.open(&serve.server_arguments, server.fullstack_address(), &dioxus_crate.workspace_dir());
|
||||
match child {
|
||||
Ok(Some(child_proc)) => builder.children.push((build_result.target_platform, child_proc)),
|
||||
Err(e) => {
|
||||
tracing::error!(dx_src = ?TraceSrc::Build, "Failed to open build result: {e}");
|
||||
break;
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure we immediately capture the stdout/stderr of the executable -
|
||||
// otherwise it'll clobber our terminal output
|
||||
screen.new_ready_app(&mut builder, results);
|
||||
|
||||
// And then finally tell the server to reload
|
||||
server.send_reload_command().await;
|
||||
},
|
||||
|
||||
// If the process exited *cleanly*, we can exit
|
||||
Ok(BuilderUpdate::ProcessExited { status, target_platform }) => {
|
||||
// Then remove the child process
|
||||
builder.children.retain(|(platform, _)| *platform != target_platform);
|
||||
match status {
|
||||
Ok(status) => {
|
||||
if status.success() {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
tracing::error!(dx_src = ?TraceSrc::Dev, "Application exited with status: {status}");
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
tracing::error!(dx_src = ?TraceSrc::Dev, "Application exited with error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
server.send_build_error(err).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle input from the user using our settings
|
||||
res = screen.wait() => {
|
||||
match res {
|
||||
Ok(false) => {}
|
||||
// Request a rebuild.
|
||||
Ok(true) => {
|
||||
builder.build()?;
|
||||
server.start_build().await
|
||||
},
|
||||
// Shutdown the server.
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
_ = devserver.shutdown().await;
|
||||
// Run our cleanup logic here - maybe printing as we go?
|
||||
// todo: more printing, logging, error handling in this phase
|
||||
_ = screen.shutdown();
|
||||
_ = builder.abort_all();
|
||||
_ = tracer.shutdown();
|
||||
_ = server.shutdown().await;
|
||||
builder.shutdown();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_update(
|
||||
msg: ServeUpdate,
|
||||
args: &ServeArgs,
|
||||
devserver: &mut DevServer,
|
||||
screen: &mut Output,
|
||||
builder: &mut Builder,
|
||||
runner: &mut AppRunner,
|
||||
watcher: &mut Watcher,
|
||||
) -> Result<ControlFlow<()>> {
|
||||
match msg {
|
||||
ServeUpdate::FilesChanged { files } => {
|
||||
if files.is_empty() || !args.should_hotreload() {
|
||||
return Ok(ControlFlow::Continue(()));
|
||||
}
|
||||
|
||||
// if change is hotreloadable, hotreload it
|
||||
// and then send that update to all connected clients
|
||||
if let Some(hr) = watcher.attempt_hot_reload(files, &runner) {
|
||||
// Only send a hotreload message for templates and assets - otherwise we'll just get a full rebuild
|
||||
if hr.templates.is_empty() && hr.assets.is_empty() {
|
||||
return Ok(ControlFlow::Continue(()));
|
||||
}
|
||||
|
||||
devserver.send_hotreload(hr).await;
|
||||
} else {
|
||||
// We're going to kick off a new build, interrupting the current build if it's ongoing
|
||||
builder.build(args.build_arguments.clone())?;
|
||||
|
||||
// Clear the hot reload changes
|
||||
watcher.clear_hot_reload_changes();
|
||||
|
||||
// Tell the server to show a loading page for any new requests
|
||||
devserver.start_build().await;
|
||||
}
|
||||
// Grab the output of a future that returns an option or wait forever
|
||||
pub(crate) fn next_or_pending<F, T>(f: F) -> impl Future<Output = T>
|
||||
where
|
||||
F: IntoFuture<Output = Option<T>>,
|
||||
{
|
||||
let pinned = f.into_future().fuse();
|
||||
let mut pinned = Box::pin(pinned);
|
||||
poll_fn(move |cx| {
|
||||
let next = pinned.as_mut().poll(cx);
|
||||
match next {
|
||||
Poll::Ready(Some(next)) => Poll::Ready(next),
|
||||
_ => Poll::Pending,
|
||||
}
|
||||
|
||||
// Run the server in the background
|
||||
// Waiting for updates here lets us tap into when clients are added/removed
|
||||
ServeUpdate::NewConnection => {
|
||||
if let Some(msg) = watcher.applied_hot_reload_changes() {
|
||||
devserver.send_hotreload(msg).await;
|
||||
}
|
||||
}
|
||||
|
||||
// Received a message from the devtools server - currently we only use this for
|
||||
// logging, so we just forward it the tui
|
||||
ServeUpdate::WsMessage(msg) => {
|
||||
screen.new_ws_message(Platform::Web, msg);
|
||||
}
|
||||
|
||||
// Wait for logs from the build engine
|
||||
// These will cause us to update the screen
|
||||
// We also can check the status of the builds here in case we have multiple ongoing builds
|
||||
ServeUpdate::BuildUpdate(BuildUpdate::Progress(update)) => {
|
||||
let update_clone = update.clone();
|
||||
screen.new_build_logs(update.platform, update_clone);
|
||||
devserver
|
||||
.update_build_status(screen.build_progress.progress(), update.stage.to_string())
|
||||
.await;
|
||||
|
||||
match update {
|
||||
// Send rebuild start message.
|
||||
BuildUpdateProgress {
|
||||
stage: Stage::Compiling,
|
||||
update: UpdateStage::Start,
|
||||
platform: _,
|
||||
} => devserver.send_reload_start().await,
|
||||
|
||||
// Send rebuild failed message.
|
||||
BuildUpdateProgress {
|
||||
stage: Stage::Finished,
|
||||
update: UpdateStage::Failed(_),
|
||||
platform: _,
|
||||
} => devserver.send_reload_failed().await,
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
ServeUpdate::BuildUpdate(BuildUpdate::BuildFailed { err, .. }) => {
|
||||
devserver.send_build_error(err).await;
|
||||
}
|
||||
|
||||
ServeUpdate::BuildUpdate(BuildUpdate::BuildReady { target, result }) => {
|
||||
tracing::info!(dx_src = ?TraceSrc::Dev, "Opening app for [{}]", target);
|
||||
|
||||
let handle = runner
|
||||
.open(result, devserver.ip, devserver.fullstack_address())
|
||||
.await;
|
||||
|
||||
match handle {
|
||||
Ok(handle) => {
|
||||
// Make sure we immediately capture the stdout/stderr of the executable -
|
||||
// otherwise it'll clobber our terminal output
|
||||
screen.new_ready_app(handle);
|
||||
|
||||
// And then finally tell the server to reload
|
||||
devserver.send_reload_command().await;
|
||||
}
|
||||
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to open app: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nothing - the builder just signals that there are no more pending builds
|
||||
ServeUpdate::BuildUpdate(BuildUpdate::AllFinished) => {}
|
||||
|
||||
// If the process exited *cleanly*, we can exit
|
||||
ServeUpdate::ProcessExited { status, platform } => {
|
||||
if !status.success() {
|
||||
tracing::error!(dx_src = ?TraceSrc::Dev, "Application [{platform}] exited with status: {status}");
|
||||
return Ok(ControlFlow::Break(()));
|
||||
}
|
||||
|
||||
runner.kill(platform).await;
|
||||
}
|
||||
|
||||
ServeUpdate::StdoutReceived { platform, msg } => {
|
||||
screen.push_stdout(platform, msg);
|
||||
}
|
||||
|
||||
ServeUpdate::StderrReceived { platform, msg } => {
|
||||
screen.push_stderr(platform, msg);
|
||||
}
|
||||
|
||||
ServeUpdate::TracingLog { log } => {
|
||||
screen.push_inner_log(log);
|
||||
}
|
||||
|
||||
ServeUpdate::TuiInput { event } => {
|
||||
let should_rebuild = screen.handle_input(event)?;
|
||||
if should_rebuild {
|
||||
builder.build(args.build_arguments.clone())?;
|
||||
devserver.start_build().await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ControlFlow::Continue(()))
|
||||
})
|
||||
.fuse()
|
||||
}
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
use crate::{
|
||||
builder::{BuildMessage, BuildUpdateProgress, Platform, Stage, UpdateStage},
|
||||
cli::serve::ServeArgs,
|
||||
builder::{BuildResult, UpdateStage},
|
||||
builder::{Stage, TargetPlatform, UpdateBuildProgress},
|
||||
dioxus_crate::DioxusCrate,
|
||||
serve::{Builder, Watcher},
|
||||
serve::next_or_pending,
|
||||
serve::Serve,
|
||||
serve::{Builder, Server, Watcher},
|
||||
tracer::CLILogControl,
|
||||
TraceMsg, TraceSrc,
|
||||
};
|
||||
|
@ -16,29 +18,24 @@ use crossterm::{
|
|||
tty::IsTty,
|
||||
ExecutableCommand,
|
||||
};
|
||||
use dioxus_devtools_types::ClientMsg;
|
||||
use futures_util::{
|
||||
future::{select_all, OptionFuture},
|
||||
Future, FutureExt, StreamExt,
|
||||
};
|
||||
use ratatui::{
|
||||
prelude::*,
|
||||
widgets::{Block, Borders, Paragraph},
|
||||
TerminalOptions, Viewport,
|
||||
};
|
||||
use dioxus_cli_config::{AddressArguments, Platform};
|
||||
use dioxus_hot_reload::ClientMsg;
|
||||
use futures_util::{future::select_all, Future, FutureExt, StreamExt};
|
||||
use ratatui::{prelude::*, TerminalOptions, Viewport};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Display,
|
||||
io::{self, stdout},
|
||||
rc::Rc,
|
||||
sync::atomic::Ordering,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use tokio::{
|
||||
io::{AsyncBufReadExt, BufReader, Lines},
|
||||
process::{ChildStderr, ChildStdout},
|
||||
};
|
||||
use tracing::Level;
|
||||
|
||||
use super::{AppHandle, DevServer, ServeUpdate};
|
||||
|
||||
mod render;
|
||||
|
||||
// How many lines should be scroll on each mouse scroll or arrow key input.
|
||||
|
@ -48,14 +45,38 @@ const SCROLL_MODIFIER: u16 = 4;
|
|||
// Scroll modifier key.
|
||||
const SCROLL_MODIFIER_KEY: KeyModifiers = KeyModifiers::SHIFT;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct BuildProgress {
|
||||
current_builds: HashMap<TargetPlatform, ActiveBuild>,
|
||||
}
|
||||
|
||||
impl BuildProgress {
|
||||
pub fn progress(&self) -> f64 {
|
||||
self.current_builds
|
||||
.values()
|
||||
.min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
|
||||
.map(|build| match build.stage {
|
||||
Stage::Initializing => 0.0,
|
||||
Stage::InstallingWasmTooling => 0.0,
|
||||
Stage::Compiling => build.progress,
|
||||
Stage::OptimizingWasm | Stage::OptimizingAssets | Stage::Finished => 1.0,
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Output {
|
||||
term: Rc<RefCell<Option<TerminalBackend>>>,
|
||||
log_control: CLILogControl,
|
||||
|
||||
// optional since when there's no tty there's no eventstream to read from - just stdin
|
||||
events: Option<EventStream>,
|
||||
|
||||
pub(crate) build_progress: BuildProgress,
|
||||
running_apps: HashMap<TargetPlatform, RunningApp>,
|
||||
|
||||
// A list of all messages from build, dev, app, and more.
|
||||
messages: Vec<ConsoleMessage>,
|
||||
messages: Vec<TraceMsg>,
|
||||
|
||||
num_lines_wrapping: u16,
|
||||
scroll_position: u16,
|
||||
|
@ -66,7 +87,9 @@ pub struct Output {
|
|||
anim_start: Instant,
|
||||
|
||||
interactive: bool,
|
||||
_is_cli_release: bool,
|
||||
platform: Platform,
|
||||
addr: AddressArguments,
|
||||
|
||||
// Filters
|
||||
show_filter_menu: bool,
|
||||
|
@ -75,17 +98,21 @@ pub struct Output {
|
|||
filter_search_mode: bool,
|
||||
filter_search_input: Option<String>,
|
||||
|
||||
dx_version: String,
|
||||
_rustc_version: String,
|
||||
_rustc_nightly: bool,
|
||||
_dx_version: String,
|
||||
}
|
||||
|
||||
type TerminalBackend = Terminal<CrosstermBackend<io::Stdout>>;
|
||||
|
||||
impl Output {
|
||||
pub(crate) fn start(cfg: &ServeArgs) -> io::Result<Self> {
|
||||
let interactive = cfg.interactive_tty();
|
||||
pub fn start(cfg: &Serve, log_control: CLILogControl) -> io::Result<Self> {
|
||||
let interactive = std::io::stdout().is_tty() && cfg.interactive.unwrap_or(true);
|
||||
|
||||
let mut events = None;
|
||||
|
||||
if interactive {
|
||||
// log_control.output_enabled.store(true, Ordering::SeqCst);
|
||||
log_control.output_enabled.store(true, Ordering::SeqCst);
|
||||
enable_raw_mode()?;
|
||||
stdout().execute(EnterAlternateScreen)?.execute(Hide)?;
|
||||
|
||||
|
@ -109,9 +136,20 @@ impl Output {
|
|||
)
|
||||
.ok();
|
||||
|
||||
let mut dx_version = format!("{}", env!("CARGO_PKG_VERSION"));
|
||||
if crate::build_info::PROFILE != "release" {
|
||||
if let Some(hash) = crate::build_info::GIT_COMMIT_HASH_SHORT {
|
||||
// todo: re-enable rustc version
|
||||
// let rustc_version = rustc_version().await;
|
||||
// let rustc_nightly = rustc_version.contains("nightly") || cfg.target_args.nightly;
|
||||
let _rustc_version = String::from("1.0.0");
|
||||
let _rustc_nightly = false;
|
||||
|
||||
let mut dx_version = String::new();
|
||||
|
||||
dx_version.push_str(env!("CARGO_PKG_VERSION"));
|
||||
|
||||
// todo: we want the binstalls / cargo installs to be exempt, but installs from git are not
|
||||
let is_cli_release = crate::dx_build_info::PROFILE == "release";
|
||||
if !is_cli_release {
|
||||
if let Some(hash) = crate::dx_build_info::GIT_COMMIT_HASH_SHORT {
|
||||
let hash = &hash.trim_start_matches('g')[..4];
|
||||
dx_version.push('-');
|
||||
dx_version.push_str(hash);
|
||||
|
@ -122,19 +160,24 @@ impl Output {
|
|||
|
||||
Ok(Self {
|
||||
term: Rc::new(RefCell::new(term)),
|
||||
log_control,
|
||||
events,
|
||||
|
||||
dx_version,
|
||||
_rustc_version,
|
||||
_rustc_nightly,
|
||||
_dx_version: dx_version,
|
||||
interactive,
|
||||
_is_cli_release: is_cli_release,
|
||||
platform,
|
||||
messages: Vec::new(),
|
||||
more_modal_open: false,
|
||||
build_progress: Default::default(),
|
||||
running_apps: HashMap::new(),
|
||||
scroll_position: 0,
|
||||
num_lines_wrapping: 0,
|
||||
console_width: 0,
|
||||
console_height: 0,
|
||||
anim_start: Instant::now(),
|
||||
addr: cfg.server_arguments.address.clone(),
|
||||
|
||||
// Filter
|
||||
show_filter_menu: false,
|
||||
|
@ -146,12 +189,21 @@ impl Output {
|
|||
}
|
||||
|
||||
/// Add a message from stderr to the logs
|
||||
pub fn push_stderr(&mut self, platform: Platform, stderr: String) {
|
||||
self.messages.push(ConsoleMessage::Log(TraceMsg {
|
||||
fn push_stderr(&mut self, platform: TargetPlatform, stderr: String) {
|
||||
self.running_apps
|
||||
.get_mut(&platform)
|
||||
.unwrap()
|
||||
.output
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.stderr_line
|
||||
.push_str(&stderr);
|
||||
|
||||
self.messages.push(TraceMsg {
|
||||
source: TraceSrc::App(platform),
|
||||
level: Level::ERROR,
|
||||
content: stderr,
|
||||
}));
|
||||
});
|
||||
|
||||
if self.is_snapped() {
|
||||
self.scroll_to_bottom();
|
||||
|
@ -159,12 +211,21 @@ impl Output {
|
|||
}
|
||||
|
||||
/// Add a message from stdout to the logs
|
||||
pub fn push_stdout(&mut self, platform: Platform, stdout: String) {
|
||||
self.messages.push(ConsoleMessage::Log(TraceMsg {
|
||||
fn push_stdout(&mut self, platform: TargetPlatform, stdout: String) {
|
||||
self.running_apps
|
||||
.get_mut(&platform)
|
||||
.unwrap()
|
||||
.output
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.stdout_line
|
||||
.push_str(&stdout);
|
||||
|
||||
self.messages.push(TraceMsg {
|
||||
source: TraceSrc::App(platform),
|
||||
level: Level::INFO,
|
||||
content: stdout,
|
||||
}));
|
||||
});
|
||||
|
||||
if self.is_snapped() {
|
||||
self.scroll_to_bottom();
|
||||
|
@ -176,21 +237,80 @@ impl Output {
|
|||
/// Why is the ctrl_c handler here?
|
||||
///
|
||||
/// Also tick animations every few ms
|
||||
pub(crate) async fn wait(&mut self) -> ServeUpdate {
|
||||
let event = tokio::select! {
|
||||
Some(Some(Ok(event))) = OptionFuture::from(self.events.as_mut().map(|f| f.next())) => event,
|
||||
else => futures_util::future::pending().await
|
||||
pub async fn wait(&mut self) -> io::Result<bool> {
|
||||
fn ok_and_some<F, T, E>(f: F) -> impl Future<Output = T>
|
||||
where
|
||||
F: Future<Output = Result<Option<T>, E>>,
|
||||
{
|
||||
next_or_pending(async move { f.await.ok().flatten() })
|
||||
}
|
||||
let user_input = async {
|
||||
let events = self.events.as_mut()?;
|
||||
events.next().await
|
||||
};
|
||||
let user_input = ok_and_some(user_input.map(|e| e.transpose()));
|
||||
|
||||
let has_running_apps = !self.running_apps.is_empty();
|
||||
let next_stdout = self.running_apps.values_mut().map(|app| {
|
||||
let future = async move {
|
||||
let (stdout, stderr) = match &mut app.output {
|
||||
Some(out) => (
|
||||
ok_and_some(out.stdout.next_line()),
|
||||
ok_and_some(out.stderr.next_line()),
|
||||
),
|
||||
None => return futures_util::future::pending().await,
|
||||
};
|
||||
|
||||
tokio::select! {
|
||||
line = stdout => (app.result.target_platform, Some(line), None),
|
||||
line = stderr => (app.result.target_platform, None, Some(line)),
|
||||
}
|
||||
};
|
||||
Box::pin(future)
|
||||
});
|
||||
|
||||
let next_stdout = async {
|
||||
if has_running_apps {
|
||||
select_all(next_stdout).await.0
|
||||
} else {
|
||||
futures_util::future::pending().await
|
||||
}
|
||||
};
|
||||
|
||||
ServeUpdate::TuiInput { event }
|
||||
let tui_log_rx = &mut self.log_control.output_rx;
|
||||
let next_tui_log = next_or_pending(tui_log_rx.next());
|
||||
|
||||
tokio::select! {
|
||||
(platform, stdout, stderr) = next_stdout => {
|
||||
if let Some(stdout) = stdout {
|
||||
self.push_stdout(platform, stdout);
|
||||
}
|
||||
if let Some(stderr) = stderr {
|
||||
self.push_stderr(platform, stderr);
|
||||
}
|
||||
},
|
||||
|
||||
// Handle internal CLI tracing logs.
|
||||
log = next_tui_log => {
|
||||
self.push_log(log);
|
||||
}
|
||||
|
||||
event = user_input => {
|
||||
if self.handle_events(event).await? {
|
||||
return Ok(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub(crate) fn shutdown(&mut self) -> io::Result<()> {
|
||||
pub fn shutdown(&mut self) -> io::Result<()> {
|
||||
// if we're a tty then we need to disable the raw mode
|
||||
if self.interactive {
|
||||
// self.log_control
|
||||
// .output_enabled
|
||||
// .store(false, Ordering::SeqCst);
|
||||
self.log_control
|
||||
.output_enabled
|
||||
.store(false, Ordering::SeqCst);
|
||||
disable_raw_mode()?;
|
||||
stdout().execute(LeaveAlternateScreen)?.execute(Show)?;
|
||||
self.drain_print_logs();
|
||||
|
@ -208,25 +328,17 @@ impl Output {
|
|||
let messages = self.messages.drain(..);
|
||||
|
||||
for msg in messages {
|
||||
match msg {
|
||||
ConsoleMessage::Log(msg) => {
|
||||
if msg.source != TraceSrc::Cargo {
|
||||
println!("[{}] {}: {}", msg.source, msg.level, msg.content);
|
||||
} else {
|
||||
println!("{}", msg.content);
|
||||
}
|
||||
}
|
||||
|
||||
ConsoleMessage::BuildReady => {
|
||||
// TODO: Better formatting for different content lengths.
|
||||
}
|
||||
ConsoleMessage::OnngoingBuild { stage, progress } => todo!(),
|
||||
// TODO: Better formatting for different content lengths.
|
||||
if msg.source != TraceSrc::Cargo {
|
||||
println!("[{}] {}: {}", msg.source, msg.level, msg.content);
|
||||
} else {
|
||||
println!("{}", msg.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle an input event, returning `true` if the event should cause the program to restart.
|
||||
pub(crate) fn handle_input(&mut self, input: Event) -> io::Result<bool> {
|
||||
pub fn handle_input(&mut self, input: Event) -> io::Result<bool> {
|
||||
// handle ctrlc
|
||||
if let Event::Key(key) = input {
|
||||
if let KeyCode::Char('c') = key.code {
|
||||
|
@ -340,7 +452,7 @@ impl Output {
|
|||
if key.code == KeyCode::Char('o') && key.kind == KeyEventKind::Press =>
|
||||
{
|
||||
// Open the running app.
|
||||
// open::that(format!("http://{}:{}", self.addr, self.port))?;
|
||||
open::that(format!("http://{}:{}", self.addr.addr, self.addr.port))?;
|
||||
}
|
||||
|
||||
Event::Key(key)
|
||||
|
@ -373,9 +485,9 @@ impl Output {
|
|||
Ok(false)
|
||||
}
|
||||
|
||||
pub(crate) fn new_ws_message(
|
||||
pub fn new_ws_message(
|
||||
&mut self,
|
||||
platform: Platform,
|
||||
platform: TargetPlatform,
|
||||
message: axum::extract::ws::Message,
|
||||
) {
|
||||
// Deccode the message and push it to our logs.
|
||||
|
@ -404,29 +516,6 @@ impl Output {
|
|||
}
|
||||
}
|
||||
|
||||
// pub(crate) fn scroll_to_bottom(&mut self) {
|
||||
// self.scroll = (self.num_lines_with_wrapping).saturating_sub(self.term_height);
|
||||
// }
|
||||
|
||||
pub(crate) fn push_inner_log(&mut self, msg: String) {
|
||||
self.push_log(
|
||||
TraceMsg::new(TraceSrc::Build, Level::INFO, msg),
|
||||
// crate::builder::BuildMessage {
|
||||
// level: tracing::Level::INFO,
|
||||
// message: crate::builder::MessageType::Text(msg),
|
||||
// source: crate::builder::MessageSource::Dev,
|
||||
// },
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn new_build_logs(&mut self, platform: Platform, update: BuildUpdateProgress) {
|
||||
self.push_log(TraceMsg::new(
|
||||
TraceSrc::Build,
|
||||
Level::INFO,
|
||||
format!("Build progress for {platform:?}: {update:?}"),
|
||||
))
|
||||
}
|
||||
|
||||
fn is_snapped(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
@ -436,30 +525,69 @@ impl Output {
|
|||
}
|
||||
|
||||
pub fn push_log(&mut self, message: TraceMsg) {
|
||||
self.messages.push(ConsoleMessage::Log(message));
|
||||
self.messages.push(message);
|
||||
|
||||
if self.is_snapped() {
|
||||
self.scroll_to_bottom();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_ready_app(&mut self, handle: &AppHandle) {
|
||||
// Finish the build progress for the platform that just finished building
|
||||
if let Some(build) = self
|
||||
.build_progress
|
||||
pub fn new_build_progress(&mut self, platform: TargetPlatform, update: UpdateBuildProgress) {
|
||||
self.build_progress
|
||||
.current_builds
|
||||
.get_mut(&handle.app.build.platform())
|
||||
{
|
||||
build.stage = Stage::Finished;
|
||||
.entry(platform)
|
||||
.or_default()
|
||||
.update(update);
|
||||
|
||||
if self.is_snapped() {
|
||||
self.scroll_to_bottom();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_ready_app(&mut self, build_engine: &mut Builder, results: Vec<BuildResult>) {
|
||||
for result in results {
|
||||
let out = build_engine
|
||||
.children
|
||||
.iter_mut()
|
||||
.find_map(|(platform, child)| {
|
||||
if platform == &result.target_platform {
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
Some((stdout, stderr))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let platform = result.target_platform;
|
||||
|
||||
let stdout = out.map(|(stdout, stderr)| RunningAppOutput {
|
||||
stdout: BufReader::new(stdout).lines(),
|
||||
stderr: BufReader::new(stderr).lines(),
|
||||
stdout_line: String::new(),
|
||||
stderr_line: String::new(),
|
||||
});
|
||||
|
||||
let app = RunningApp {
|
||||
result,
|
||||
output: stdout,
|
||||
};
|
||||
|
||||
self.running_apps.insert(platform, app);
|
||||
|
||||
// Finish the build progress for the platform that just finished building
|
||||
if let Some(build) = self.build_progress.current_builds.get_mut(&platform) {
|
||||
build.stage = Stage::Finished;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(
|
||||
&mut self,
|
||||
_opts: &ServeArgs,
|
||||
_opts: &Serve,
|
||||
_config: &DioxusCrate,
|
||||
_build_engine: &Builder,
|
||||
_server: &DevServer,
|
||||
_server: &Server,
|
||||
_watcher: &Watcher,
|
||||
) {
|
||||
// just drain the build logs
|
||||
|
@ -521,7 +649,7 @@ impl Output {
|
|||
&self.build_progress,
|
||||
self.more_modal_open,
|
||||
self.show_filter_menu,
|
||||
&self.dx_version,
|
||||
&self._dx_version,
|
||||
);
|
||||
|
||||
if self.more_modal_open {
|
||||
|
@ -536,45 +664,68 @@ impl Output {
|
|||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Our console has "special" messages that get better rendering.
|
||||
///
|
||||
/// We want to display them differently since they have their own state and are rendered differently.
|
||||
enum ConsoleMessage {
|
||||
Log(TraceMsg),
|
||||
OnngoingBuild { stage: Stage, progress: f64 },
|
||||
BuildReady,
|
||||
async fn handle_events(&mut self, event: Event) -> io::Result<bool> {
|
||||
let mut events = vec![event];
|
||||
|
||||
// Collect all the events within the next 10ms in one stream
|
||||
let collect_events = async {
|
||||
loop {
|
||||
let Some(Ok(next)) = self.events.as_mut().unwrap().next().await else {
|
||||
break;
|
||||
};
|
||||
events.push(next);
|
||||
}
|
||||
};
|
||||
tokio::select! {
|
||||
_ = collect_events => {},
|
||||
_ = tokio::time::sleep(Duration::from_millis(10)) => {}
|
||||
}
|
||||
|
||||
// Debounce events within the same frame
|
||||
let mut handled = HashSet::new();
|
||||
for event in events {
|
||||
if !handled.contains(&event) {
|
||||
if self.handle_input(event.clone())? {
|
||||
// Restart the running app.
|
||||
return Ok(true);
|
||||
}
|
||||
handled.insert(event);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq)]
|
||||
pub(crate) struct ActiveBuild {
|
||||
pub struct ActiveBuild {
|
||||
stage: Stage,
|
||||
progress: f64,
|
||||
failed: Option<String>,
|
||||
}
|
||||
|
||||
impl ActiveBuild {
|
||||
// fn update(&mut self, update: BuildUpdateProgress) {
|
||||
// match update.update {
|
||||
// UpdateStage::Start => {
|
||||
// // If we are already past the stage, don't roll back, but allow a fresh build to update.
|
||||
// if self.stage > update.stage && self.stage < Stage::Finished {
|
||||
// return;
|
||||
// }
|
||||
// self.stage = update.stage;
|
||||
// self.progress = 0.0;
|
||||
// self.failed = None;
|
||||
// }
|
||||
// UpdateStage::SetProgress(progress) => {
|
||||
// self.progress = progress;
|
||||
// }
|
||||
// UpdateStage::Failed(failed) => {
|
||||
// self.stage = Stage::Finished;
|
||||
// self.failed = Some(failed.clone());
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
fn update(&mut self, update: UpdateBuildProgress) {
|
||||
match update.update {
|
||||
UpdateStage::Start => {
|
||||
// If we are already past the stage, don't roll back, but allow a fresh build to update.
|
||||
if self.stage > update.stage && self.stage < Stage::Finished {
|
||||
return;
|
||||
}
|
||||
self.stage = update.stage;
|
||||
self.progress = 0.0;
|
||||
self.failed = None;
|
||||
}
|
||||
UpdateStage::SetProgress(progress) => {
|
||||
self.progress = progress;
|
||||
}
|
||||
UpdateStage::Failed(failed) => {
|
||||
self.stage = Stage::Finished;
|
||||
self.failed = Some(failed.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make_spans(&self, area: Rect) -> Vec<Span> {
|
||||
let mut spans = Vec::new();
|
||||
|
@ -658,64 +809,14 @@ async fn rustc_version() -> String {
|
|||
.unwrap_or_else(|| "<unknown>".to_string())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub(crate) enum LogSource {
|
||||
Internal,
|
||||
Target(Platform),
|
||||
pub struct RunningApp {
|
||||
result: BuildResult,
|
||||
output: Option<RunningAppOutput>,
|
||||
}
|
||||
|
||||
impl Display for LogSource {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
LogSource::Internal => write!(f, "CLI"),
|
||||
LogSource::Target(platform) => write!(f, "{platform}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Platform> for LogSource {
|
||||
fn from(platform: Platform) -> Self {
|
||||
LogSource::Target(platform)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct BuildProgress {
|
||||
internal_logs: Vec<BuildMessage>,
|
||||
current_builds: HashMap<Platform, ActiveBuild>,
|
||||
}
|
||||
|
||||
// impl BuildProgress {
|
||||
// pub(crate) fn progress(&self) -> f64 {
|
||||
// self.build_logs
|
||||
// .values()
|
||||
// .min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
|
||||
// .map(|build| match build.stage {
|
||||
// Stage::Initializing => 0.0,
|
||||
// Stage::InstallingWasmTooling => 0.0,
|
||||
// Stage::Compiling => build.progress,
|
||||
// Stage::OptimizingWasm | Stage::OptimizingAssets | Stage::Finished => 1.0,
|
||||
// })
|
||||
// .unwrap_or_default()
|
||||
// }
|
||||
// }
|
||||
|
||||
// #[derive(Default)]
|
||||
// pub struct BuildProgress {
|
||||
// current_builds: HashMap<Platform, ActiveBuild>,
|
||||
// }
|
||||
|
||||
impl BuildProgress {
|
||||
pub fn progress(&self) -> f64 {
|
||||
self.current_builds
|
||||
.values()
|
||||
.min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
|
||||
.map(|build| match build.stage {
|
||||
Stage::Initializing => 0.0,
|
||||
Stage::InstallingWasmTooling => 0.0,
|
||||
Stage::Compiling => build.progress,
|
||||
Stage::OptimizingWasm | Stage::OptimizingAssets | Stage::Finished => 1.0,
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
struct RunningAppOutput {
|
||||
stdout: Lines<BufReader<ChildStdout>>,
|
||||
stderr: Lines<BufReader<ChildStderr>>,
|
||||
stdout_line: String,
|
||||
stderr_line: String,
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use super::{BuildProgress, ConsoleMessage, TraceMsg, TraceSrc};
|
||||
use crate::Platform;
|
||||
use super::{BuildProgress, TraceMsg, TraceSrc};
|
||||
use ansi_to_tui::IntoText as _;
|
||||
use dioxus_cli_config::Platform;
|
||||
use ratatui::{
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
prelude::Buffer,
|
||||
style::{Color, Style, Stylize},
|
||||
text::{Line, Span, Text},
|
||||
widgets::{Block, Borders, Clear, List, ListState, Paragraph, Widget, Wrap},
|
||||
|
@ -118,7 +117,7 @@ impl TuiLayout {
|
|||
&self,
|
||||
frame: &mut Frame,
|
||||
scroll_position: u16,
|
||||
messages: &[ConsoleMessage],
|
||||
messages: &[TraceMsg],
|
||||
enabled_filters: &[String],
|
||||
) -> u16 {
|
||||
const LEVEL_MAX: usize = "BUILD: ".len();
|
||||
|
@ -129,82 +128,75 @@ impl TuiLayout {
|
|||
for msg in messages.iter() {
|
||||
let mut sub_line_padding = 0;
|
||||
|
||||
match msg {
|
||||
ConsoleMessage::BuildReady => {}
|
||||
ConsoleMessage::OnngoingBuild { stage, progress } => {}
|
||||
let text = msg.content.trim_end().into_text().unwrap_or_default();
|
||||
|
||||
ConsoleMessage::Log(msg) => {
|
||||
let text = msg.content.trim_end().into_text().unwrap_or_default();
|
||||
for (idx, line) in text.lines.into_iter().enumerate() {
|
||||
// Don't add any formatting for cargo messages.
|
||||
let out_line = if msg.source != TraceSrc::Cargo {
|
||||
if idx == 0 {
|
||||
match msg.source {
|
||||
TraceSrc::Dev => {
|
||||
let mut spans = vec![Span::from(" DEV: ").light_magenta()];
|
||||
|
||||
for (idx, line) in text.lines.into_iter().enumerate() {
|
||||
// Don't add any formatting for cargo messages.
|
||||
let out_line = if msg.source != TraceSrc::Cargo {
|
||||
if idx == 0 {
|
||||
match msg.source {
|
||||
TraceSrc::Dev => {
|
||||
let mut spans = vec![Span::from(" DEV: ").light_magenta()];
|
||||
|
||||
for span in line.spans {
|
||||
spans.push(span);
|
||||
}
|
||||
spans
|
||||
}
|
||||
TraceSrc::Build => {
|
||||
let mut spans = vec![Span::from("BUILD: ").light_blue()];
|
||||
|
||||
for span in line.spans {
|
||||
spans.push(span);
|
||||
}
|
||||
spans
|
||||
}
|
||||
_ => {
|
||||
// Build level tag: `INFO:``
|
||||
// We don't subtract 1 here for `:` because we still want at least 1 padding.
|
||||
let padding = build_msg_padding(
|
||||
LEVEL_MAX - msg.level.to_string().len() - 2,
|
||||
);
|
||||
let level = format!("{padding}{}: ", msg.level);
|
||||
sub_line_padding += level.len();
|
||||
|
||||
let level_span = Span::from(level);
|
||||
let level_span = match msg.level {
|
||||
Level::TRACE => level_span.black(),
|
||||
Level::DEBUG => level_span.light_magenta(),
|
||||
Level::INFO => level_span.light_green(),
|
||||
Level::WARN => level_span.light_yellow(),
|
||||
Level::ERROR => level_span.light_red(),
|
||||
};
|
||||
|
||||
let mut out_line = vec![level_span];
|
||||
for span in line.spans {
|
||||
out_line.push(span);
|
||||
}
|
||||
|
||||
out_line
|
||||
}
|
||||
for span in line.spans {
|
||||
spans.push(span);
|
||||
}
|
||||
} else {
|
||||
// Not the first line. Append the padding and merge into list.
|
||||
let padding = build_msg_padding(sub_line_padding);
|
||||
spans
|
||||
}
|
||||
TraceSrc::Build => {
|
||||
let mut spans = vec![Span::from("BUILD: ").light_blue()];
|
||||
|
||||
let mut out_line = vec![Span::from(padding)];
|
||||
for span in line.spans {
|
||||
spans.push(span);
|
||||
}
|
||||
spans
|
||||
}
|
||||
_ => {
|
||||
// Build level tag: `INFO:``
|
||||
// We don't subtract 1 here for `:` because we still want at least 1 padding.
|
||||
let padding =
|
||||
build_msg_padding(LEVEL_MAX - msg.level.to_string().len() - 2);
|
||||
let level = format!("{padding}{}: ", msg.level);
|
||||
sub_line_padding += level.len();
|
||||
|
||||
let level_span = Span::from(level);
|
||||
let level_span = match msg.level {
|
||||
Level::TRACE => level_span.black(),
|
||||
Level::DEBUG => level_span.light_magenta(),
|
||||
Level::INFO => level_span.light_green(),
|
||||
Level::WARN => level_span.light_yellow(),
|
||||
Level::ERROR => level_span.light_red(),
|
||||
};
|
||||
|
||||
let mut out_line = vec![level_span];
|
||||
for span in line.spans {
|
||||
out_line.push(span);
|
||||
}
|
||||
|
||||
out_line
|
||||
}
|
||||
} else {
|
||||
line.spans
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// Not the first line. Append the padding and merge into list.
|
||||
let padding = build_msg_padding(sub_line_padding);
|
||||
|
||||
out_text.push_line(Line::from(out_line));
|
||||
let mut out_line = vec![Span::from(padding)];
|
||||
for span in line.spans {
|
||||
out_line.push(span);
|
||||
}
|
||||
out_line
|
||||
}
|
||||
}
|
||||
} else {
|
||||
line.spans
|
||||
};
|
||||
|
||||
out_text.push_line(Line::from(out_line));
|
||||
}
|
||||
}
|
||||
|
||||
// Only show messages for filters that are enabled.
|
||||
let mut included_line_ids = Vec::new();
|
||||
|
||||
for filter in enabled_filters {
|
||||
let re = Regex::new(filter);
|
||||
for (index, line) in out_text.lines.iter().enumerate() {
|
||||
|
@ -244,9 +236,6 @@ impl TuiLayout {
|
|||
}
|
||||
}
|
||||
|
||||
// out_text.push_line(line);
|
||||
// let lines: Buffer = Paragraph::new(out_text).try_into().unwrap();
|
||||
|
||||
let (console_width, _console_height) = self.get_console_size();
|
||||
|
||||
let paragraph = Paragraph::new(out_text)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::config::WebProxyConfig;
|
||||
use crate::TraceSrc;
|
||||
use crate::{Error, Result};
|
||||
use dioxus_cli_config::WebProxyConfig;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use axum::body::Body as MyBody;
|
||||
|
@ -56,7 +56,7 @@ impl ProxyClient {
|
|||
/// - the exact path of the proxy config's backend URL, e.g. /api
|
||||
/// - the exact path with a trailing slash, e.g. /api/
|
||||
/// - any subpath of the backend URL, e.g. /api/foo/bar
|
||||
pub(crate) fn add_proxy(mut router: Router, proxy: &WebProxyConfig) -> Result<Router> {
|
||||
pub fn add_proxy(mut router: Router, proxy: &WebProxyConfig) -> Result<Router> {
|
||||
let url: Uri = proxy.backend.parse()?;
|
||||
let path = url.path().to_string();
|
||||
let trimmed_path = path.trim_start_matches('/');
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
use super::{handle::AppHandle, ServeUpdate};
|
||||
use crate::{builder::Platform, bundler::AppBundle, Result};
|
||||
use futures_util::{future::OptionFuture, stream::FuturesUnordered};
|
||||
use std::{collections::HashMap, net::SocketAddr};
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
pub(crate) struct AppRunner {
|
||||
/// Ongoing apps running in place
|
||||
///
|
||||
/// They might be actively being being, running, or have exited.
|
||||
///
|
||||
/// When a new full rebuild occurs, we will keep these requests here
|
||||
pub(crate) running: HashMap<Platform, AppHandle>,
|
||||
}
|
||||
|
||||
impl AppRunner {
|
||||
pub(crate) fn start() -> Self {
|
||||
Self {
|
||||
running: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn wait(&mut self) -> ServeUpdate {
|
||||
// If there are no running apps, we can just return pending to avoid deadlocking
|
||||
if self.running.is_empty() {
|
||||
return futures_util::future::pending().await;
|
||||
}
|
||||
|
||||
self.running
|
||||
.iter_mut()
|
||||
.map(|(platform, handle)| async {
|
||||
use ServeUpdate::*;
|
||||
let platform = *platform;
|
||||
tokio::select! {
|
||||
Some(Ok(Some(msg))) = OptionFuture::from(handle.stdout.as_mut().map(|f| f.next_line())) => {
|
||||
StdoutReceived { platform, msg }
|
||||
},
|
||||
Some(Ok(Some(msg))) = OptionFuture::from(handle.stderr.as_mut().map(|f| f.next_line())) => {
|
||||
StderrReceived { platform, msg }
|
||||
},
|
||||
Some(status) = OptionFuture::from(handle.child.as_mut().map(|f| f.wait())) => {
|
||||
tracing::info!("Child process exited with status: {status:?}");
|
||||
match status {
|
||||
Ok(status) => ProcessExited { status, platform },
|
||||
Err(_err) => todo!("handle error in process joining?"),
|
||||
}
|
||||
}
|
||||
else => futures_util::future::pending().await
|
||||
}
|
||||
})
|
||||
.collect::<FuturesUnordered<_>>()
|
||||
.next()
|
||||
.await
|
||||
.expect("Stream to pending if not empty")
|
||||
}
|
||||
|
||||
/// Finally "bundle" this app and return a handle to it
|
||||
pub(crate) async fn open(
|
||||
&mut self,
|
||||
app: AppBundle,
|
||||
devserver_ip: SocketAddr,
|
||||
fullstack_address: Option<SocketAddr>,
|
||||
) -> Result<&AppHandle> {
|
||||
let platform = app.build.build.platform();
|
||||
self.kill(platform).await;
|
||||
|
||||
let handle = AppHandle::start(app, devserver_ip, fullstack_address).await?;
|
||||
self.running.insert(platform, handle);
|
||||
|
||||
Ok(self.running.get(&platform).unwrap())
|
||||
}
|
||||
|
||||
pub(crate) async fn kill(&mut self, platform: Platform) {
|
||||
self.running.remove(&platform);
|
||||
}
|
||||
}
|
|
@ -1,42 +1,41 @@
|
|||
use crate::dioxus_crate::DioxusCrate;
|
||||
use crate::TraceSrc;
|
||||
use crate::{builder::Platform, serve::ServeArgs};
|
||||
use crate::{config::WebHttpsConfig, serve::update::ServeUpdate};
|
||||
use crate::serve::{next_or_pending, Serve};
|
||||
use crate::{dioxus_crate::DioxusCrate, TraceSrc};
|
||||
use crate::{Error, Result};
|
||||
use axum::extract::{Request, State};
|
||||
use axum::middleware::{self, Next};
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{
|
||||
ws::{Message, WebSocket},
|
||||
WebSocketUpgrade,
|
||||
},
|
||||
extract::{Request, State},
|
||||
http::{
|
||||
header::{HeaderName, HeaderValue, CACHE_CONTROL, EXPIRES, PRAGMA},
|
||||
Method, Response, StatusCode,
|
||||
},
|
||||
middleware::{self, Next},
|
||||
response::IntoResponse,
|
||||
routing::{get, get_service},
|
||||
Extension, Router,
|
||||
};
|
||||
use axum_server::tls_rustls::RustlsConfig;
|
||||
use dioxus_devtools_types::{DevserverMsg, HotReloadMsg};
|
||||
use dioxus_cli_config::{Platform, WebHttpsConfig};
|
||||
use dioxus_hot_reload::{DevserverMsg, HotReloadMsg};
|
||||
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
|
||||
use futures_util::{
|
||||
future,
|
||||
stream::{self, FuturesUnordered},
|
||||
StreamExt,
|
||||
};
|
||||
use hyper::{header::ACCEPT, HeaderMap};
|
||||
use futures_util::stream;
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
use hyper::header::ACCEPT;
|
||||
use hyper::HeaderMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::net::TcpListener;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::sync::RwLock;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
fs, io,
|
||||
net::{IpAddr, SocketAddr, TcpListener},
|
||||
sync::RwLock,
|
||||
net::{IpAddr, SocketAddr},
|
||||
process::Command,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
use tokio::process::Command;
|
||||
use tokio::task::JoinHandle;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::{
|
||||
|
@ -45,25 +44,62 @@ use tower_http::{
|
|||
ServiceBuilderExt,
|
||||
};
|
||||
|
||||
pub(crate) struct DevServer {
|
||||
pub(crate) _args: ServeArgs,
|
||||
pub(crate) hot_reload_sockets: Vec<WebSocket>,
|
||||
pub(crate) build_status_sockets: Vec<WebSocket>,
|
||||
pub(crate) ip: SocketAddr,
|
||||
pub(crate) new_hot_reload_sockets: UnboundedReceiver<WebSocket>,
|
||||
pub(crate) new_build_status_sockets: UnboundedReceiver<WebSocket>,
|
||||
_server_task: JoinHandle<Result<()>>,
|
||||
pub enum ServerUpdate {
|
||||
NewConnection,
|
||||
Message(Message),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
enum Status {
|
||||
ClientInit {
|
||||
application_name: String,
|
||||
platform: String,
|
||||
},
|
||||
Building {
|
||||
progress: f64,
|
||||
build_message: String,
|
||||
},
|
||||
BuildError {
|
||||
error: String,
|
||||
},
|
||||
Ready,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SharedStatus(Arc<RwLock<Status>>);
|
||||
|
||||
impl SharedStatus {
|
||||
fn new(status: Status) -> Self {
|
||||
Self(Arc::new(RwLock::new(status)))
|
||||
}
|
||||
|
||||
fn set(&self, status: Status) {
|
||||
*self.0.write().unwrap() = status;
|
||||
}
|
||||
|
||||
fn get(&self) -> Status {
|
||||
self.0.read().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Server {
|
||||
pub hot_reload_sockets: Vec<WebSocket>,
|
||||
pub build_status_sockets: Vec<WebSocket>,
|
||||
pub ip: SocketAddr,
|
||||
pub new_hot_reload_sockets: UnboundedReceiver<WebSocket>,
|
||||
pub new_build_status_sockets: UnboundedReceiver<WebSocket>,
|
||||
_server_task: JoinHandle<Result<()>>,
|
||||
/// We proxy (not hot reloading) fullstack requests to this port
|
||||
pub(crate) fullstack_port: Option<u16>,
|
||||
pub fullstack_port: Option<u16>,
|
||||
|
||||
build_status: SharedStatus,
|
||||
application_name: String,
|
||||
platform: String,
|
||||
}
|
||||
|
||||
impl DevServer {
|
||||
pub(crate) fn start(args: &ServeArgs, cfg: &DioxusCrate) -> Self {
|
||||
impl Server {
|
||||
pub fn start(serve: &Serve, cfg: &DioxusCrate) -> Self {
|
||||
let (hot_reload_sockets_tx, hot_reload_sockets_rx) = futures_channel::mpsc::unbounded();
|
||||
let (build_status_sockets_tx, build_status_sockets_rx) = futures_channel::mpsc::unbounded();
|
||||
|
||||
|
@ -72,24 +108,27 @@ impl DevServer {
|
|||
build_message: "Starting the build...".to_string(),
|
||||
});
|
||||
|
||||
let addr = args.address.address();
|
||||
let start_browser = args.open.unwrap_or_default();
|
||||
let addr = serve.server_arguments.address.address();
|
||||
let start_browser = serve.server_arguments.open.unwrap_or_default();
|
||||
|
||||
// If we're serving a fullstack app, we need to find a port to proxy to
|
||||
let proxied_port = if args.should_proxy_build() {
|
||||
let fullstack_port = if matches!(
|
||||
serve.build_arguments.platform(),
|
||||
Platform::Liveview | Platform::Fullstack | Platform::StaticGeneration
|
||||
) {
|
||||
get_available_port(addr.ip())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let proxied_address = proxied_port.map(|port| SocketAddr::new(addr.ip(), port));
|
||||
let fullstack_address = fullstack_port.map(|port| SocketAddr::new(addr.ip(), port));
|
||||
|
||||
let router = Self::setup_router(
|
||||
args,
|
||||
let router = setup_router(
|
||||
serve,
|
||||
cfg,
|
||||
hot_reload_sockets_tx,
|
||||
build_status_sockets_tx,
|
||||
proxied_address,
|
||||
fullstack_address,
|
||||
build_status.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -97,13 +136,7 @@ impl DevServer {
|
|||
// Actually just start the server, cloning in a few bits of config
|
||||
let web_config = cfg.dioxus_config.web.https.clone();
|
||||
let base_path = cfg.dioxus_config.web.app.base_path.clone();
|
||||
let platform = args.platform();
|
||||
|
||||
let listener = std::net::TcpListener::bind(addr).expect("Failed to bind port");
|
||||
_ = listener.set_nonblocking(true);
|
||||
|
||||
let addr = listener.local_addr().unwrap();
|
||||
|
||||
let platform = serve.platform();
|
||||
let _server_task = tokio::spawn(async move {
|
||||
let web_config = web_config.clone();
|
||||
// HTTPS
|
||||
|
@ -118,13 +151,13 @@ impl DevServer {
|
|||
|
||||
// Start the server with or without rustls
|
||||
if let Some(rustls) = rustls {
|
||||
axum_server::from_tcp_rustls(listener, rustls)
|
||||
axum_server::bind_rustls(addr, rustls)
|
||||
.serve(router.into_make_service())
|
||||
.await?
|
||||
} else {
|
||||
// Create a TCP listener bound to the address
|
||||
axum::serve(
|
||||
tokio::net::TcpListener::from_std(listener).unwrap(),
|
||||
tokio::net::TcpListener::bind(&addr).await?,
|
||||
router.into_make_service(),
|
||||
)
|
||||
.await?
|
||||
|
@ -134,18 +167,17 @@ impl DevServer {
|
|||
});
|
||||
|
||||
Self {
|
||||
_args: args.clone(),
|
||||
hot_reload_sockets: Default::default(),
|
||||
build_status_sockets: Default::default(),
|
||||
new_hot_reload_sockets: hot_reload_sockets_rx,
|
||||
new_build_status_sockets: build_status_sockets_rx,
|
||||
_server_task,
|
||||
ip: addr,
|
||||
fullstack_port: proxied_port,
|
||||
fullstack_port,
|
||||
|
||||
build_status,
|
||||
application_name: cfg.dioxus_config.application.name.clone(),
|
||||
platform: args.build_arguments.platform().to_string(),
|
||||
platform: serve.build_arguments.platform().to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,7 +198,7 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Sends a start build message to all clients.
|
||||
pub(crate) async fn start_build(&mut self) {
|
||||
pub async fn start_build(&mut self) {
|
||||
self.build_status.set(Status::Building {
|
||||
progress: 0.0,
|
||||
build_message: "Starting the build...".to_string(),
|
||||
|
@ -175,7 +207,7 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Sends an updated build status to all clients.
|
||||
pub(crate) async fn update_build_status(&mut self, progress: f64, build_message: String) {
|
||||
pub async fn update_build_status(&mut self, progress: f64, build_message: String) {
|
||||
if !matches!(self.build_status.get(), Status::Building { .. }) {
|
||||
return;
|
||||
}
|
||||
|
@ -187,11 +219,7 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Sends hot reloadable changes to all clients.
|
||||
pub(crate) async fn send_hotreload(&mut self, reload: HotReloadMsg) {
|
||||
if !reload.assets.is_empty() {
|
||||
tracing::debug!("Hot reloading assets {:?}", reload.assets);
|
||||
}
|
||||
|
||||
pub async fn send_hotreload(&mut self, reload: HotReloadMsg) {
|
||||
let msg = DevserverMsg::HotReload(reload);
|
||||
let msg = serde_json::to_string(&msg).unwrap();
|
||||
|
||||
|
@ -208,7 +236,7 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Wait for new clients to be connected and then save them
|
||||
pub(crate) async fn wait(&mut self) -> ServeUpdate {
|
||||
pub async fn wait(&mut self) -> Option<ServerUpdate> {
|
||||
let mut new_hot_reload_socket = self.new_hot_reload_sockets.next();
|
||||
let mut new_build_status_socket = self.new_build_status_sockets.next();
|
||||
let mut new_message = self
|
||||
|
@ -217,13 +245,14 @@ impl DevServer {
|
|||
.enumerate()
|
||||
.map(|(idx, socket)| async move { (idx, socket.next().await) })
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
let next_new_message = next_or_pending(new_message.next());
|
||||
|
||||
tokio::select! {
|
||||
new_hot_reload_socket = &mut new_hot_reload_socket => {
|
||||
if let Some(new_socket) = new_hot_reload_socket {
|
||||
drop(new_message);
|
||||
self.hot_reload_sockets.push(new_socket);
|
||||
return ServeUpdate::NewConnection;
|
||||
return Some(ServerUpdate::NewConnection);
|
||||
} else {
|
||||
panic!("Could not receive a socket - the devtools could not boot - the port is likely already in use");
|
||||
}
|
||||
|
@ -238,14 +267,14 @@ impl DevServer {
|
|||
_ = send_build_status_to(&self.build_status, &mut new_socket).await;
|
||||
self.build_status_sockets.push(new_socket);
|
||||
}
|
||||
return future::pending::<ServeUpdate>().await;
|
||||
return None;
|
||||
} else {
|
||||
panic!("Could not receive a socket - the devtools could not boot - the port is likely already in use");
|
||||
}
|
||||
}
|
||||
Some((idx, message)) = new_message.next() => {
|
||||
(idx, message) = next_new_message => {
|
||||
match message {
|
||||
Some(Ok(message)) => return ServeUpdate::WsMessage(message),
|
||||
Some(Ok(message)) => return Some(ServerUpdate::Message(message)),
|
||||
_ => {
|
||||
drop(new_message);
|
||||
_ = self.hot_reload_sockets.remove(idx);
|
||||
|
@ -254,11 +283,11 @@ impl DevServer {
|
|||
}
|
||||
}
|
||||
|
||||
future::pending().await
|
||||
None
|
||||
}
|
||||
|
||||
/// Converts a `cargo` error to HTML and sends it to clients.
|
||||
pub(crate) async fn send_build_error(&mut self, error: Error) {
|
||||
pub async fn send_build_error(&mut self, error: Error) {
|
||||
let error = error.to_string();
|
||||
self.build_status.set(Status::BuildError {
|
||||
error: ansi_to_html::convert(&error).unwrap_or(error),
|
||||
|
@ -267,19 +296,19 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Tells all clients that a full rebuild has started.
|
||||
pub(crate) async fn send_reload_start(&mut self) {
|
||||
pub async fn send_reload_start(&mut self) {
|
||||
self.send_devserver_message(DevserverMsg::FullReloadStart)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Tells all clients that a full rebuild has failed.
|
||||
pub(crate) async fn send_reload_failed(&mut self) {
|
||||
pub async fn send_reload_failed(&mut self) {
|
||||
self.send_devserver_message(DevserverMsg::FullReloadFailed)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Tells all clients to reload if possible for new changes.
|
||||
pub(crate) async fn send_reload_command(&mut self) {
|
||||
pub async fn send_reload_command(&mut self) {
|
||||
self.build_status.set(Status::Ready);
|
||||
self.send_build_status().await;
|
||||
self.send_devserver_message(DevserverMsg::FullReloadCommand)
|
||||
|
@ -287,7 +316,7 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Send a shutdown message to all connected clients.
|
||||
pub(crate) async fn send_shutdown(&mut self) {
|
||||
pub async fn send_shutdown(&mut self) {
|
||||
self.send_devserver_message(DevserverMsg::Shutdown).await;
|
||||
}
|
||||
|
||||
|
@ -300,7 +329,7 @@ impl DevServer {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn shutdown(&mut self) {
|
||||
pub async fn shutdown(&mut self) {
|
||||
self.send_shutdown().await;
|
||||
for socket in self.hot_reload_sockets.drain(..) {
|
||||
_ = socket.close().await;
|
||||
|
@ -308,36 +337,57 @@ impl DevServer {
|
|||
}
|
||||
|
||||
/// Get the address the fullstack server should run on if we're serving a fullstack app
|
||||
pub(crate) fn fullstack_address(&self) -> Option<SocketAddr> {
|
||||
pub fn fullstack_address(&self) -> Option<SocketAddr> {
|
||||
self.fullstack_port
|
||||
.map(|port| SocketAddr::new(self.ip.ip(), port))
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets up and returns a router
|
||||
///
|
||||
/// Steps include:
|
||||
/// - Setting up cors
|
||||
/// - Setting up the proxy to the endpoint specified in the config
|
||||
/// - Setting up the file serve service
|
||||
/// - Setting up the websocket endpoint for devtools
|
||||
fn setup_router(
|
||||
args: &ServeArgs,
|
||||
krate: &DioxusCrate,
|
||||
hot_reload_sockets: UnboundedSender<WebSocket>,
|
||||
build_status_sockets: UnboundedSender<WebSocket>,
|
||||
fullstack_address: Option<SocketAddr>,
|
||||
build_status: SharedStatus,
|
||||
) -> Result<Router> {
|
||||
let mut router = Router::new();
|
||||
/// Sets up and returns a router
|
||||
///
|
||||
/// Steps include:
|
||||
/// - Setting up cors
|
||||
/// - Setting up the proxy to the endpoint specified in the config
|
||||
/// - Setting up the file serve service
|
||||
/// - Setting up the websocket endpoint for devtools
|
||||
fn setup_router(
|
||||
serve: &Serve,
|
||||
config: &DioxusCrate,
|
||||
hot_reload_sockets: UnboundedSender<WebSocket>,
|
||||
build_status_sockets: UnboundedSender<WebSocket>,
|
||||
fullstack_address: Option<SocketAddr>,
|
||||
build_status: SharedStatus,
|
||||
) -> Result<Router> {
|
||||
let mut router = Router::new();
|
||||
let platform = serve.build_arguments.platform();
|
||||
|
||||
// Setup proxy for the endpoint specified in the config
|
||||
for proxy_config in krate.dioxus_config.web.proxy.iter() {
|
||||
router = super::proxy::add_proxy(router, proxy_config)?;
|
||||
// Setup proxy for the endpoint specified in the config
|
||||
for proxy_config in config.dioxus_config.web.proxy.iter() {
|
||||
router = super::proxy::add_proxy(router, proxy_config)?;
|
||||
}
|
||||
|
||||
// server the dir if it's web, otherwise let the fullstack server itself handle it
|
||||
match platform {
|
||||
Platform::Web => {
|
||||
// Route file service to output the .wasm and assets if this is a web build
|
||||
let base_path = format!(
|
||||
"/{}",
|
||||
config
|
||||
.dioxus_config
|
||||
.web
|
||||
.app
|
||||
.base_path
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.trim_matches('/')
|
||||
);
|
||||
|
||||
router = router.nest_service(&base_path, build_serve_dir(serve, config));
|
||||
}
|
||||
|
||||
if args.should_proxy_build() {
|
||||
// For fullstack, liveview, and server, forward all requests to the inner server
|
||||
Platform::Liveview | Platform::Fullstack | Platform::StaticGeneration => {
|
||||
// For fullstack and static generation, forward all requests to the server
|
||||
let address = fullstack_address.unwrap();
|
||||
|
||||
router = router.nest_service("/",super::proxy::proxy_to(
|
||||
format!("http://{address}").parse().unwrap(),
|
||||
true,
|
||||
|
@ -351,69 +401,54 @@ impl DevServer {
|
|||
.unwrap()
|
||||
},
|
||||
));
|
||||
} else {
|
||||
// Otherwise, just serve the dir ourselves
|
||||
// Route file service to output the .wasm and assets if this is a web build
|
||||
let base_path = format!(
|
||||
"/{}",
|
||||
krate
|
||||
.dioxus_config
|
||||
.web
|
||||
.app
|
||||
.base_path
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.trim_matches('/')
|
||||
);
|
||||
|
||||
router = router.nest_service(&base_path, build_serve_dir(args, krate));
|
||||
}
|
||||
|
||||
// Setup middleware to intercept html requests if the build status is "Building"
|
||||
router = router.layer(middleware::from_fn_with_state(
|
||||
build_status,
|
||||
build_status_middleware,
|
||||
));
|
||||
|
||||
// Setup websocket endpoint - and pass in the extension layer immediately after
|
||||
router = router.nest(
|
||||
"/_dioxus",
|
||||
Router::new()
|
||||
.route(
|
||||
"/",
|
||||
get(
|
||||
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
|
||||
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
|
||||
},
|
||||
),
|
||||
)
|
||||
.layer(Extension(hot_reload_sockets))
|
||||
.route(
|
||||
"/build_status",
|
||||
get(
|
||||
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
|
||||
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
|
||||
},
|
||||
),
|
||||
)
|
||||
.layer(Extension(build_status_sockets)),
|
||||
);
|
||||
|
||||
// Setup cors
|
||||
router = router.layer(
|
||||
CorsLayer::new()
|
||||
// allow `GET` and `POST` when accessing the resource
|
||||
.allow_methods([Method::GET, Method::POST])
|
||||
// allow requests from any origin
|
||||
.allow_origin(Any)
|
||||
.allow_headers(Any),
|
||||
);
|
||||
|
||||
Ok(router)
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Setup middleware to intercept html requests if the build status is "Building"
|
||||
router = router.layer(middleware::from_fn_with_state(
|
||||
build_status,
|
||||
build_status_middleware,
|
||||
));
|
||||
|
||||
// Setup websocket endpoint - and pass in the extension layer immediately after
|
||||
router = router.nest(
|
||||
"/_dioxus",
|
||||
Router::new()
|
||||
.route(
|
||||
"/",
|
||||
get(
|
||||
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
|
||||
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
|
||||
},
|
||||
),
|
||||
)
|
||||
.layer(Extension(hot_reload_sockets))
|
||||
.route(
|
||||
"/build_status",
|
||||
get(
|
||||
|ws: WebSocketUpgrade, ext: Extension<UnboundedSender<WebSocket>>| async move {
|
||||
ws.on_upgrade(move |socket| async move { _ = ext.0.unbounded_send(socket) })
|
||||
},
|
||||
),
|
||||
)
|
||||
.layer(Extension(build_status_sockets)),
|
||||
);
|
||||
|
||||
// Setup cors
|
||||
router = router.layer(
|
||||
CorsLayer::new()
|
||||
// allow `GET` and `POST` when accessing the resource
|
||||
.allow_methods([Method::GET, Method::POST])
|
||||
// allow requests from any origin
|
||||
.allow_origin(Any)
|
||||
.allow_headers(Any),
|
||||
);
|
||||
|
||||
Ok(router)
|
||||
}
|
||||
|
||||
fn build_serve_dir(args: &ServeArgs, cfg: &DioxusCrate) -> axum::routing::MethodRouter {
|
||||
fn build_serve_dir(serve: &Serve, cfg: &DioxusCrate) -> axum::routing::MethodRouter {
|
||||
static CORS_UNSAFE: (HeaderValue, HeaderValue) = (
|
||||
HeaderValue::from_static("unsafe-none"),
|
||||
HeaderValue::from_static("unsafe-none"),
|
||||
|
@ -424,12 +459,12 @@ fn build_serve_dir(args: &ServeArgs, cfg: &DioxusCrate) -> axum::routing::Method
|
|||
HeaderValue::from_static("same-origin"),
|
||||
);
|
||||
|
||||
let (coep, coop) = match args.cross_origin_policy {
|
||||
let (coep, coop) = match serve.server_arguments.cross_origin_policy {
|
||||
true => CORS_REQUIRE.clone(),
|
||||
false => CORS_UNSAFE.clone(),
|
||||
};
|
||||
|
||||
let out_dir = cfg.workdir(Platform::Web);
|
||||
let out_dir = cfg.out_dir();
|
||||
let index_on_404 = cfg.dioxus_config.web.watcher.index_on_404;
|
||||
|
||||
get_service(
|
||||
|
@ -476,20 +511,20 @@ fn no_cache(
|
|||
response
|
||||
}
|
||||
|
||||
pub(crate) fn insert_no_cache_headers(headers: &mut HeaderMap) {
|
||||
pub fn insert_no_cache_headers(headers: &mut HeaderMap) {
|
||||
headers.insert(CACHE_CONTROL, HeaderValue::from_static("no-cache"));
|
||||
headers.insert(PRAGMA, HeaderValue::from_static("no-cache"));
|
||||
headers.insert(EXPIRES, HeaderValue::from_static("0"));
|
||||
}
|
||||
|
||||
/// Returns an enum of rustls config
|
||||
pub(crate) async fn get_rustls(web_config: &WebHttpsConfig) -> Result<Option<RustlsConfig>> {
|
||||
pub async fn get_rustls(web_config: &WebHttpsConfig) -> Result<Option<RustlsConfig>> {
|
||||
if web_config.enabled != Some(true) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let (cert_path, key_path) = match web_config.mkcert {
|
||||
Some(true) => get_rustls_with_mkcert(web_config).await?,
|
||||
Some(true) => get_rustls_with_mkcert(web_config)?,
|
||||
_ => get_rustls_without_mkcert(web_config)?,
|
||||
};
|
||||
|
||||
|
@ -498,9 +533,7 @@ pub(crate) async fn get_rustls(web_config: &WebHttpsConfig) -> Result<Option<Rus
|
|||
))
|
||||
}
|
||||
|
||||
pub(crate) async fn get_rustls_with_mkcert(
|
||||
web_config: &WebHttpsConfig,
|
||||
) -> Result<(String, String)> {
|
||||
pub fn get_rustls_with_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
|
||||
const DEFAULT_KEY_PATH: &str = "ssl/key.pem";
|
||||
const DEFAULT_CERT_PATH: &str = "ssl/cert.pem";
|
||||
|
||||
|
@ -546,14 +579,14 @@ pub(crate) async fn get_rustls_with_mkcert(
|
|||
return Err("failed to generate mkcert certificates".into());
|
||||
}
|
||||
Ok(mut cmd) => {
|
||||
cmd.wait().await?;
|
||||
cmd.wait()?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((cert_path, key_path))
|
||||
}
|
||||
|
||||
pub(crate) fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
|
||||
pub fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
|
||||
// get paths to cert & key
|
||||
if let (Some(key), Some(cert)) = (web_config.key_path.clone(), web_config.cert_path.clone()) {
|
||||
Ok((cert, key))
|
||||
|
@ -617,37 +650,3 @@ async fn send_build_status_to(
|
|||
let msg = serde_json::to_string(&build_status.get()).unwrap();
|
||||
socket.send(Message::Text(msg)).await
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
enum Status {
|
||||
ClientInit {
|
||||
application_name: String,
|
||||
platform: String,
|
||||
},
|
||||
Building {
|
||||
progress: f64,
|
||||
build_message: String,
|
||||
},
|
||||
BuildError {
|
||||
error: String,
|
||||
},
|
||||
Ready,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SharedStatus(Arc<RwLock<Status>>);
|
||||
|
||||
impl SharedStatus {
|
||||
fn new(status: Status) -> Self {
|
||||
Self(Arc::new(RwLock::new(status)))
|
||||
}
|
||||
|
||||
fn set(&self, status: Status) {
|
||||
*self.0.write().unwrap() = status;
|
||||
}
|
||||
|
||||
fn get(&self) -> Status {
|
||||
self.0.read().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,104 +0,0 @@
|
|||
use futures_channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender};
|
||||
use futures_util::StreamExt;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::{
|
||||
env, io,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Mutex,
|
||||
},
|
||||
};
|
||||
use tracing_subscriber::{prelude::*, EnvFilter};
|
||||
const LOG_ENV: &str = "DIOXUS_LOG";
|
||||
|
||||
use super::ServeUpdate;
|
||||
|
||||
static TUI_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
static TUI_TX: OnceCell<UnboundedSender<String>> = OnceCell::new();
|
||||
|
||||
pub(crate) struct TraceController {
|
||||
pub(crate) tui_rx: UnboundedReceiver<String>,
|
||||
}
|
||||
|
||||
impl TraceController {
|
||||
pub(crate) fn initialize() {
|
||||
// Start a tracing instance just for serving.
|
||||
// This ensures that any tracing we do while serving doesn't break the TUI itself, and instead is
|
||||
// redirected to the serve process.
|
||||
// If {LOG_ENV} is set, default to env, otherwise filter to cli
|
||||
// and manganis warnings and errors from other crates
|
||||
let mut filter = EnvFilter::new("error,dx=info,devdx=info,dioxus-cli=info");
|
||||
|
||||
if env::var(LOG_ENV).is_ok() {
|
||||
filter = EnvFilter::from_env(LOG_ENV);
|
||||
}
|
||||
|
||||
let sub = tracing_subscriber::registry().with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_writer(Mutex::new(Writer {
|
||||
stdout: io::stdout(),
|
||||
}))
|
||||
.with_filter(filter),
|
||||
);
|
||||
|
||||
#[cfg(feature = "tokio-console")]
|
||||
let sub = sub.with(console_subscriber::spawn());
|
||||
|
||||
sub.init();
|
||||
}
|
||||
|
||||
pub(crate) fn start() -> Self {
|
||||
// Create writer controller and custom writer.
|
||||
let (tui_tx, tui_rx) = unbounded();
|
||||
TUI_TX.set(tui_tx.clone()).unwrap();
|
||||
TUI_ENABLED.store(true, Ordering::SeqCst);
|
||||
|
||||
Self { tui_rx }
|
||||
}
|
||||
|
||||
/// Wait for the internal logger to send a message
|
||||
pub(crate) async fn wait(&mut self) -> ServeUpdate {
|
||||
ServeUpdate::TracingLog {
|
||||
log: self.tui_rx.next().await.expect("tracer should never die"),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn shutdown(&self) {
|
||||
TUI_ENABLED.store(false, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the CLI's custom tracing writer for conditionally writing logs between outputs.
|
||||
struct Writer {
|
||||
stdout: io::Stdout,
|
||||
}
|
||||
|
||||
// Implement a conditional writer so that logs are routed to the appropriate place.
|
||||
impl io::Write for Writer {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
if TUI_ENABLED.load(Ordering::SeqCst) {
|
||||
let len = buf.len();
|
||||
|
||||
let as_string = String::from_utf8(buf.to_vec())
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||
|
||||
TUI_TX
|
||||
.get()
|
||||
.unwrap()
|
||||
.unbounded_send(as_string)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::BrokenPipe, e))?;
|
||||
|
||||
Ok(len)
|
||||
} else {
|
||||
self.stdout.write(buf)
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
if !TUI_ENABLED.load(Ordering::SeqCst) {
|
||||
self.stdout.flush()
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
use crate::builder::{BuildUpdate, Platform};
|
||||
use axum::extract::ws::Message as WsMessage;
|
||||
use std::{path::PathBuf, process::ExitStatus};
|
||||
|
||||
/// One fat enum to rule them all....
|
||||
///
|
||||
/// Thanks to libraries like winit for the inspiration
|
||||
pub(crate) enum ServeUpdate {
|
||||
NewConnection,
|
||||
WsMessage(WsMessage),
|
||||
|
||||
/// A build update from the build engine
|
||||
BuildUpdate(BuildUpdate),
|
||||
|
||||
/// A running process has received a stdout.
|
||||
/// May or may not be a complete line - do not treat it as a line. It will include a line if it is a complete line.
|
||||
///
|
||||
/// We will poll lines and any content in a 50ms interval
|
||||
StdoutReceived {
|
||||
platform: Platform,
|
||||
msg: String,
|
||||
},
|
||||
|
||||
/// A running process has received a stderr.
|
||||
/// May or may not be a complete line - do not treat it as a line. It will include a line if it is a complete line.
|
||||
///
|
||||
/// We will poll lines and any content in a 50ms interval
|
||||
StderrReceived {
|
||||
platform: Platform,
|
||||
msg: String,
|
||||
},
|
||||
|
||||
ProcessExited {
|
||||
platform: Platform,
|
||||
status: ExitStatus,
|
||||
},
|
||||
|
||||
FilesChanged {
|
||||
files: Vec<PathBuf>,
|
||||
},
|
||||
|
||||
TuiInput {
|
||||
event: crossterm::event::Event,
|
||||
},
|
||||
|
||||
TracingLog {
|
||||
log: String,
|
||||
},
|
||||
}
|
|
@ -1,8 +1,11 @@
|
|||
use super::{detect::is_wsl, AppRunner};
|
||||
use super::{hot_reloading_file_map::HotreloadError, update::ServeUpdate};
|
||||
use crate::{cli::serve::ServeArgs, dioxus_crate::DioxusCrate};
|
||||
use crate::{serve::hot_reloading_file_map::FileMap, TraceSrc};
|
||||
use dioxus_devtools_types::HotReloadMsg;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{fs, path::PathBuf, time::Duration};
|
||||
|
||||
use super::hot_reloading_file_map::HotreloadError;
|
||||
use crate::serve::hot_reloading_file_map::FileMap;
|
||||
use crate::TraceSrc;
|
||||
use crate::{cli::serve::Serve, dioxus_crate::DioxusCrate};
|
||||
use dioxus_hot_reload::HotReloadMsg;
|
||||
use dioxus_html::HtmlCtx;
|
||||
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
|
||||
use futures_util::StreamExt;
|
||||
|
@ -11,44 +14,41 @@ use notify::{
|
|||
event::{MetadataKind, ModifyKind},
|
||||
Config, EventKind,
|
||||
};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{path::PathBuf, time::Duration};
|
||||
|
||||
/// This struct stores the file watcher and the filemap for the project.
|
||||
///
|
||||
/// This is where we do workspace discovery and recursively listen for changes in Rust files and asset
|
||||
/// directories.
|
||||
pub(crate) struct Watcher {
|
||||
pub struct Watcher {
|
||||
_tx: UnboundedSender<notify::Event>,
|
||||
rx: UnboundedReceiver<notify::Event>,
|
||||
krate: DioxusCrate,
|
||||
_last_update_time: i64,
|
||||
_watcher: Box<dyn notify::Watcher>,
|
||||
queued_events: Vec<notify::Event>,
|
||||
file_map: FileMap,
|
||||
ignore: Gitignore,
|
||||
applied_hot_reload_message: Option<HotReloadMsg>,
|
||||
_tx: UnboundedSender<notify::Event>,
|
||||
_last_update_time: i64,
|
||||
_watcher: Box<dyn notify::Watcher>,
|
||||
}
|
||||
|
||||
impl Watcher {
|
||||
pub(crate) fn start(serve: &ServeArgs, krate: &DioxusCrate) -> Self {
|
||||
pub fn start(serve: &Serve, config: &DioxusCrate) -> Self {
|
||||
let (tx, rx) = futures_channel::mpsc::unbounded();
|
||||
|
||||
// Extend the watch path to include:
|
||||
// - the assets directory - this is so we can hotreload CSS and other assets by default
|
||||
// - the Cargo.toml file - this is so we can hotreload the project if the user changes dependencies
|
||||
// - the Dioxus.toml file - this is so we can hotreload the project if the user changes the Dioxus config
|
||||
let mut allow_watch_path = krate.dioxus_config.web.watcher.watch_path.clone();
|
||||
allow_watch_path.push(krate.dioxus_config.application.asset_dir.clone());
|
||||
let mut allow_watch_path = config.dioxus_config.web.watcher.watch_path.clone();
|
||||
allow_watch_path.push(config.dioxus_config.application.asset_dir.clone());
|
||||
allow_watch_path.push("Cargo.toml".to_string().into());
|
||||
allow_watch_path.push("Dioxus.toml".to_string().into());
|
||||
allow_watch_path.push("assets".to_string().into());
|
||||
allow_watch_path.dedup();
|
||||
|
||||
let crate_dir = krate.crate_dir();
|
||||
let crate_dir = config.crate_dir();
|
||||
let mut builder = ignore::gitignore::GitignoreBuilder::new(&crate_dir);
|
||||
builder.add(crate_dir.join(".gitignore"));
|
||||
|
||||
let out_dir = krate.out_dir();
|
||||
let out_dir = config.out_dir();
|
||||
let out_dir_str = out_dir.display().to_string();
|
||||
|
||||
let excluded_paths = vec![
|
||||
|
@ -86,8 +86,9 @@ impl Watcher {
|
|||
// Create the file watcher.
|
||||
let mut watcher: Box<dyn notify::Watcher> = match is_wsl {
|
||||
true => {
|
||||
let poll_interval =
|
||||
Duration::from_secs(serve.wsl_file_poll_interval.unwrap_or(2) as u64);
|
||||
let poll_interval = Duration::from_secs(
|
||||
serve.server_arguments.wsl_file_poll_interval.unwrap_or(2) as u64,
|
||||
);
|
||||
|
||||
Box::new(
|
||||
notify::PollWatcher::new(
|
||||
|
@ -105,7 +106,7 @@ impl Watcher {
|
|||
// Watch the specified paths
|
||||
// todo: make sure we don't double-watch paths if they're nested
|
||||
for sub_path in allow_watch_path {
|
||||
let path = &krate.crate_dir().join(sub_path);
|
||||
let path = &config.crate_dir().join(sub_path);
|
||||
|
||||
// If the path is ignored, don't watch it
|
||||
if ignore.matched(path, path.is_dir()).is_ignore() {
|
||||
|
@ -121,38 +122,48 @@ impl Watcher {
|
|||
|
||||
// Probe the entire project looking for our rsx calls
|
||||
// Whenever we get an update from the file watcher, we'll try to hotreload against this file map
|
||||
let file_map = FileMap::create_with_filter::<HtmlCtx>(krate.crate_dir(), |path| {
|
||||
let file_map = FileMap::create_with_filter::<HtmlCtx>(config.crate_dir(), |path| {
|
||||
ignore.matched(path, path.is_dir()).is_ignore()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
Self {
|
||||
_tx: tx,
|
||||
krate: krate.clone(),
|
||||
rx,
|
||||
_watcher: watcher,
|
||||
file_map,
|
||||
ignore,
|
||||
queued_events: Vec::new(),
|
||||
_last_update_time: chrono::Local::now().timestamp(),
|
||||
applied_hot_reload_message: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Wait for changed files to be detected
|
||||
pub(crate) async fn wait(&mut self) -> ServeUpdate {
|
||||
// Wait for the next file to change
|
||||
let mut changes: Vec<_> = self.rx.next().await.into_iter().collect();
|
||||
|
||||
// Dequeue in bulk if we can, we might've received a lot of events in one go
|
||||
while let Some(event) = self.rx.try_next().ok().flatten() {
|
||||
changes.push(event);
|
||||
/// A cancel safe handle to the file watcher
|
||||
///
|
||||
/// todo: this should be simpler logic?
|
||||
pub async fn wait(&mut self) {
|
||||
// Pull off any queued events in succession
|
||||
while let Ok(Some(event)) = self.rx.try_next() {
|
||||
self.queued_events.push(event);
|
||||
}
|
||||
|
||||
// Filter the changes
|
||||
if !self.queued_events.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If there are no queued events, wait for the next event
|
||||
if let Some(event) = self.rx.next().await {
|
||||
self.queued_events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
/// Deques changed files from the event queue, doing the proper intelligent filtering
|
||||
pub fn dequeue_changed_files(&mut self, config: &DioxusCrate) -> Vec<PathBuf> {
|
||||
let mut all_mods: Vec<PathBuf> = vec![];
|
||||
|
||||
// Decompose the events into a list of all the files that have changed
|
||||
for event in changes.drain(..) {
|
||||
for event in self.queued_events.drain(..) {
|
||||
// We only care about certain events.
|
||||
if !is_allowed_notify_event(&event) {
|
||||
continue;
|
||||
|
@ -163,8 +174,18 @@ impl Watcher {
|
|||
}
|
||||
}
|
||||
|
||||
// Collect the files that have changed
|
||||
let mut files = vec![];
|
||||
let mut modified_files = vec![];
|
||||
|
||||
// For the non-rust files, we want to check if it's an asset file
|
||||
// This would mean the asset lives somewhere under the /assets directory or is referenced by magnanis in the linker
|
||||
// todo: mg integration here
|
||||
let _asset_dir = config
|
||||
.dioxus_config
|
||||
.application
|
||||
.asset_dir
|
||||
.canonicalize()
|
||||
.ok();
|
||||
|
||||
for path in all_mods.iter() {
|
||||
if path.extension().is_none() {
|
||||
continue;
|
||||
|
@ -182,35 +203,34 @@ impl Watcher {
|
|||
|
||||
// If the extension is a backup file, or a hidden file, ignore it completely (no rebuilds)
|
||||
if is_backup_file(path.to_path_buf()) {
|
||||
tracing::trace!("Ignoring backup file: {:?}", path);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the path is ignored, don't watch it
|
||||
if self.ignore.matched(path, path.is_dir()).is_ignore() {
|
||||
tracing::info!("Ignoring update to file: {:?}", path);
|
||||
continue;
|
||||
}
|
||||
|
||||
tracing::info!("Enqueuing hotreload update to file: {:?}", path);
|
||||
|
||||
files.push(path.clone());
|
||||
modified_files.push(path.clone());
|
||||
}
|
||||
|
||||
ServeUpdate::FilesChanged { files }
|
||||
modified_files
|
||||
}
|
||||
|
||||
pub(crate) fn attempt_hot_reload(
|
||||
pub fn attempt_hot_reload(
|
||||
&mut self,
|
||||
config: &DioxusCrate,
|
||||
modified_files: Vec<PathBuf>,
|
||||
runner: &AppRunner,
|
||||
) -> Option<HotReloadMsg> {
|
||||
// If we have any changes to the rust files, we need to update the file map
|
||||
let crate_dir = self.krate.crate_dir();
|
||||
let crate_dir = config.crate_dir();
|
||||
let mut templates = vec![];
|
||||
|
||||
// Prepare the hotreload message we need to send
|
||||
let mut edited_rust_files = Vec::new();
|
||||
let mut assets = Vec::new();
|
||||
let mut unknown_files = Vec::new();
|
||||
|
||||
for path in modified_files {
|
||||
// for various assets that might be linked in, we just try to hotreloading them forcefully
|
||||
|
@ -221,35 +241,22 @@ impl Watcher {
|
|||
|
||||
match ext {
|
||||
"rs" => edited_rust_files.push(path),
|
||||
|
||||
// Look through the runners to see if any of them have an asset that matches the path
|
||||
_ => {
|
||||
for runner in runner.running.values() {
|
||||
if let Some(bundled_name) = runner.hotreload_asset(&path) {
|
||||
assets.push(bundled_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ if path.starts_with("assets") => assets.push(path),
|
||||
_ => unknown_files.push(path),
|
||||
}
|
||||
}
|
||||
|
||||
assets.dedup();
|
||||
|
||||
// Process the rust files
|
||||
for rust_file in edited_rust_files {
|
||||
match self.file_map.update_rsx::<HtmlCtx>(&rust_file, &crate_dir) {
|
||||
Ok(hotreloaded_templates) => {
|
||||
templates.extend(hotreloaded_templates);
|
||||
}
|
||||
|
||||
// If the file is not reloadable, we need to rebuild
|
||||
Err(HotreloadError::Notreloadable) => return None,
|
||||
|
||||
// The rust file may have failed to parse, but that is most likely
|
||||
// because the user is in the middle of adding new code
|
||||
// We just ignore the error and let Rust analyzer warn about the problem
|
||||
Err(HotreloadError::Parse) => {}
|
||||
|
||||
// Otherwise just log the error
|
||||
Err(err) => {
|
||||
tracing::error!(dx_src = ?TraceSrc::Dev, "Error hotreloading file {rust_file:?}: {err}")
|
||||
|
@ -257,7 +264,11 @@ impl Watcher {
|
|||
}
|
||||
}
|
||||
|
||||
let msg = HotReloadMsg { templates, assets };
|
||||
let msg = HotReloadMsg {
|
||||
templates,
|
||||
assets,
|
||||
unknown_files,
|
||||
};
|
||||
|
||||
self.add_hot_reload_message(&msg);
|
||||
|
||||
|
@ -265,37 +276,50 @@ impl Watcher {
|
|||
}
|
||||
|
||||
/// Get any hot reload changes that have been applied since the last full rebuild
|
||||
pub(crate) fn applied_hot_reload_changes(&mut self) -> Option<HotReloadMsg> {
|
||||
pub fn applied_hot_reload_changes(&mut self) -> Option<HotReloadMsg> {
|
||||
self.applied_hot_reload_message.clone()
|
||||
}
|
||||
|
||||
/// Clear the hot reload changes. This should be called any time a new build is starting
|
||||
pub(crate) fn clear_hot_reload_changes(&mut self) {
|
||||
pub fn clear_hot_reload_changes(&mut self) {
|
||||
self.applied_hot_reload_message.take();
|
||||
}
|
||||
|
||||
/// Store the hot reload changes for any future clients that connect
|
||||
fn add_hot_reload_message(&mut self, msg: &HotReloadMsg) {
|
||||
let Some(applied) = &mut self.applied_hot_reload_message else {
|
||||
self.applied_hot_reload_message = Some(msg.clone());
|
||||
return;
|
||||
};
|
||||
|
||||
// Merge the assets, unknown files, and templates
|
||||
// We keep the newer change if there is both a old and new change
|
||||
let mut templates: HashMap<String, _> = std::mem::take(&mut applied.templates)
|
||||
.into_iter()
|
||||
.map(|template| (template.location.clone(), template))
|
||||
.collect();
|
||||
let mut assets: HashSet<PathBuf> =
|
||||
std::mem::take(&mut applied.assets).into_iter().collect();
|
||||
for template in &msg.templates {
|
||||
templates.insert(template.location.clone(), template.clone());
|
||||
match &mut self.applied_hot_reload_message {
|
||||
Some(applied) => {
|
||||
// Merge the assets, unknown files, and templates
|
||||
// We keep the newer change if there is both a old and new change
|
||||
let mut templates: HashMap<String, _> = std::mem::take(&mut applied.templates)
|
||||
.into_iter()
|
||||
.map(|template| (template.location.clone(), template))
|
||||
.collect();
|
||||
let mut assets: HashSet<PathBuf> =
|
||||
std::mem::take(&mut applied.assets).into_iter().collect();
|
||||
let mut unknown_files: HashSet<PathBuf> =
|
||||
std::mem::take(&mut applied.unknown_files)
|
||||
.into_iter()
|
||||
.collect();
|
||||
for template in &msg.templates {
|
||||
templates.insert(template.location.clone(), template.clone());
|
||||
}
|
||||
assets.extend(msg.assets.iter().cloned());
|
||||
unknown_files.extend(msg.unknown_files.iter().cloned());
|
||||
applied.templates = templates.into_values().collect();
|
||||
applied.assets = assets.into_iter().collect();
|
||||
applied.unknown_files = unknown_files.into_iter().collect();
|
||||
}
|
||||
None => {
|
||||
self.applied_hot_reload_message = Some(msg.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assets.extend(msg.assets.iter().cloned());
|
||||
applied.templates = templates.into_values().collect();
|
||||
applied.assets = assets.into_iter().collect();
|
||||
/// Ensure the changes we've received from the queue are actually legit changes to either assets or
|
||||
/// rust code. We don't care about changes otherwise, unless we get a signal elsewhere to do a full rebuild
|
||||
pub fn pending_changes(&mut self) -> bool {
|
||||
!self.queued_events.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -323,7 +347,7 @@ fn is_backup_file(path: PathBuf) -> bool {
|
|||
|
||||
/// Tests if the provided [`notify::Event`] is something we listen to so we can avoid unescessary hot reloads.
|
||||
fn is_allowed_notify_event(event: ¬ify::Event) -> bool {
|
||||
let allowed = match event.kind {
|
||||
match event.kind {
|
||||
EventKind::Modify(ModifyKind::Data(_)) => true,
|
||||
EventKind::Modify(ModifyKind::Name(_)) => true,
|
||||
EventKind::Create(_) => true,
|
||||
|
@ -334,11 +358,40 @@ fn is_allowed_notify_event(event: ¬ify::Event) -> bool {
|
|||
EventKind::Modify(ModifyKind::Any) => true,
|
||||
// Don't care about anything else.
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!("is_allowed_notify_event: {allowed:?} for {event:#?}");
|
||||
const WSL_1: &str = "/proc/sys/kernel/osrelease";
|
||||
const WSL_2: &str = "/proc/version";
|
||||
const WSL_KEYWORDS: [&str; 2] = ["microsoft", "wsl"];
|
||||
|
||||
allowed
|
||||
/// Detects if `dx` is being ran in a WSL environment.
|
||||
///
|
||||
/// We determine this based on whether the keyword `microsoft` or `wsl` is contained within the [`WSL_1`] or [`WSL_2`] files.
|
||||
/// This may fail in the future as it isn't guaranteed by Microsoft.
|
||||
/// See https://github.com/microsoft/WSL/issues/423#issuecomment-221627364
|
||||
fn is_wsl() -> bool {
|
||||
// Test 1st File
|
||||
if let Ok(content) = fs::read_to_string(WSL_1) {
|
||||
let lowercase = content.to_lowercase();
|
||||
for keyword in WSL_KEYWORDS {
|
||||
if lowercase.contains(keyword) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test 2nd File
|
||||
if let Ok(content) = fs::read_to_string(WSL_2) {
|
||||
let lowercase = content.to_lowercase();
|
||||
for keyword in WSL_KEYWORDS {
|
||||
if lowercase.contains(keyword) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -18,26 +18,26 @@ const GLOBAL_SETTINGS_FILE_NAME: &str = "dioxus/settings.toml";
|
|||
///
|
||||
/// This allows users to control the cli settings with ease.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub(crate) struct CliSettings {
|
||||
pub struct CliSettings {
|
||||
/// Describes whether hot reload should always be on.
|
||||
pub(crate) always_hot_reload: Option<bool>,
|
||||
pub always_hot_reload: Option<bool>,
|
||||
/// Describes whether the CLI should always open the browser for Web targets.
|
||||
pub(crate) always_open_browser: Option<bool>,
|
||||
pub always_open_browser: Option<bool>,
|
||||
/// Describes whether desktop apps in development will be pinned always-on-top.
|
||||
pub(crate) always_on_top: Option<bool>,
|
||||
pub always_on_top: Option<bool>,
|
||||
/// Describes the interval in seconds that the CLI should poll for file changes on WSL.
|
||||
#[serde(default = "default_wsl_file_poll_interval")]
|
||||
pub(crate) wsl_file_poll_interval: Option<u16>,
|
||||
pub wsl_file_poll_interval: Option<u16>,
|
||||
}
|
||||
|
||||
impl CliSettings {
|
||||
/// Load the settings from the local, global, or default config in that order
|
||||
pub(crate) fn load() -> Self {
|
||||
pub fn load() -> Self {
|
||||
Self::from_global().unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get the current settings structure from global.
|
||||
pub(crate) fn from_global() -> Option<Self> {
|
||||
pub fn from_global() -> Option<Self> {
|
||||
let Some(path) = dirs::data_local_dir() else {
|
||||
warn!("failed to get local data directory, some config keys may be missing");
|
||||
return None;
|
||||
|
@ -63,7 +63,7 @@ impl CliSettings {
|
|||
|
||||
/// Save the current structure to the global settings toml.
|
||||
/// This does not save to project-level settings.
|
||||
pub(crate) fn save(self) -> Result<Self, CrateConfigError> {
|
||||
pub fn save(self) -> Result<Self, CrateConfigError> {
|
||||
let path = Self::get_settings_path().ok_or_else(|| {
|
||||
error!(dx_src = ?TraceSrc::Dev, "failed to get settings path");
|
||||
CrateConfigError::Io(Error::new(
|
||||
|
@ -100,7 +100,7 @@ impl CliSettings {
|
|||
}
|
||||
|
||||
/// Get the path to the settings toml file.
|
||||
pub(crate) fn get_settings_path() -> Option<PathBuf> {
|
||||
pub fn get_settings_path() -> Option<PathBuf> {
|
||||
let Some(path) = dirs::data_local_dir() else {
|
||||
warn!("failed to get local data directory, some config keys may be missing");
|
||||
return None;
|
||||
|
@ -110,9 +110,7 @@ impl CliSettings {
|
|||
}
|
||||
|
||||
/// Modify the settings toml file
|
||||
pub(crate) fn modify_settings(
|
||||
with: impl FnOnce(&mut CliSettings),
|
||||
) -> Result<(), CrateConfigError> {
|
||||
pub fn modify_settings(with: impl FnOnce(&mut CliSettings)) -> Result<(), CrateConfigError> {
|
||||
let mut settings = Self::load();
|
||||
with(&mut settings);
|
||||
settings.save()?;
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
mod android;
|
||||
mod ios;
|
||||
mod mac;
|
||||
mod web;
|
||||
mod wsl;
|
|
@ -1,63 +0,0 @@
|
|||
use crate::{Error, Result};
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct ToolingProvider {}
|
||||
|
||||
impl ToolingProvider {
|
||||
/// Check if the wasm32-unknown-unknown target is installed and try to install it if not
|
||||
pub(crate) async fn install_web_build_tooling(&self) -> Result<()> {
|
||||
// If the user has rustup, we can check if the wasm32-unknown-unknown target is installed
|
||||
// Otherwise we can just assume it is installed - which is not great...
|
||||
// Eventually we can poke at the errors and let the user know they need to install the target
|
||||
if let Ok(wasm_check_command) = Command::new("rustup").args(["show"]).output().await {
|
||||
let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
|
||||
if !wasm_check_output.contains("wasm32-unknown-unknown") {
|
||||
// _ = self.progress.unbounded_send(BuildUpdateProgress {
|
||||
// stage: Stage::InstallingWasmTooling,
|
||||
// update: UpdateStage::Start,
|
||||
// platform: self.platform(),
|
||||
// });
|
||||
tracing::info!("wasm32-unknown-unknown target not detected, installing..");
|
||||
let _ = Command::new("rustup")
|
||||
.args(["target", "add", "wasm32-unknown-unknown"])
|
||||
.output()
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Attempt to automatically recover from a bindgen failure by updating the wasm-bindgen version
|
||||
pub(crate) async fn update_wasm_bindgen_version() -> Result<()> {
|
||||
let cli_bindgen_version = wasm_bindgen_shared::version();
|
||||
tracing::info!("Attempting to recover from bindgen failure by setting the wasm-bindgen version to {cli_bindgen_version}...");
|
||||
|
||||
let output = Command::new("cargo")
|
||||
.args([
|
||||
"update",
|
||||
"-p",
|
||||
"wasm-bindgen",
|
||||
"--precise",
|
||||
&cli_bindgen_version,
|
||||
])
|
||||
.output()
|
||||
.await;
|
||||
|
||||
let mut error_message = None;
|
||||
if let Ok(output) = output {
|
||||
if output.status.success() {
|
||||
tracing::info!("Successfully updated wasm-bindgen to {cli_bindgen_version}");
|
||||
return Ok(());
|
||||
} else {
|
||||
error_message = Some(output);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(output) = error_message {
|
||||
tracing::error!("Failed to update wasm-bindgen: {:#?}", output);
|
||||
}
|
||||
|
||||
Err(Error::BuildFailed(format!("WASM bindgen build failed!\nThis is probably due to the Bindgen version, dioxus-cli is using `{cli_bindgen_version}` which is not compatible with your crate.\nPlease reinstall the dioxus cli to fix this issue.\nYou can reinstall the dioxus cli by running `cargo install dioxus-cli --force` and then rebuild your project")))
|
||||
}
|
||||
}
|
|
@ -14,7 +14,7 @@
|
|||
//! 3. Build CLI layer for routing tracing logs to the TUI.
|
||||
//! 4. Build fmt layer for non-interactive logging with a custom writer that prevents output during interactive mode.
|
||||
|
||||
use crate::Platform as TargetPlatform;
|
||||
use crate::builder::TargetPlatform;
|
||||
use console::strip_ansi_codes;
|
||||
use futures_channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender};
|
||||
use std::fmt::Display;
|
||||
|
@ -404,8 +404,6 @@ impl Display for TraceSrc {
|
|||
TargetPlatform::Desktop => write!(f, "desktop"),
|
||||
TargetPlatform::Server => write!(f, "server"),
|
||||
TargetPlatform::Liveview => write!(f, "server"),
|
||||
TargetPlatform::Ios => write!(f, "ios"),
|
||||
TargetPlatform::Android => write!(f, "android"),
|
||||
},
|
||||
Self::Dev => write!(f, "dev"),
|
||||
Self::Build => write!(f, "build"),
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
[package]
|
||||
name = "manganis-core"
|
||||
edition = "2021"
|
||||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
serde = { workspace = true, features = ["derive"] }
|
|
@ -1,119 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
hash::{Hash, Hasher},
|
||||
path::PathBuf,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
/// The location we'll write to the link section - needs to be serializable
|
||||
///
|
||||
/// This basically is 1:1 with `manganis/Asset` but with more metadata to be useful to the macro and cli
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
pub struct ResourceAsset {
|
||||
/// The input path `/assets/blah.css`
|
||||
pub input: PathBuf,
|
||||
|
||||
/// The canonicalized asset
|
||||
///
|
||||
/// `Users/dioxus/dev/app/assets/blah.css`
|
||||
pub absolute: PathBuf,
|
||||
|
||||
/// The post-bundle name of the asset - do we include the `assets` name?
|
||||
///
|
||||
/// `blahcss123.css`
|
||||
pub bundled: String,
|
||||
}
|
||||
|
||||
/// The maximum length of a path segment
|
||||
const MAX_PATH_LENGTH: usize = 128;
|
||||
|
||||
/// The length of the hash in the output path
|
||||
const HASH_SIZE: usize = 16;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AssetError {}
|
||||
|
||||
impl ResourceAsset {
|
||||
pub fn parse_any(raw: &str) -> Result<Self, AssetError> {
|
||||
// get the location where the asset is absolute, relative to
|
||||
//
|
||||
// IE
|
||||
// /users/dioxus/dev/app/
|
||||
// is the root of
|
||||
// /users/dioxus/dev/app/assets/blah.css
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap();
|
||||
|
||||
// 1. the input file should be a pathbuf
|
||||
let input = PathBuf::from(raw);
|
||||
|
||||
// 2. absolute path to the asset
|
||||
let absolute = manifest_dir
|
||||
.join(raw.trim_start_matches('/'))
|
||||
.canonicalize()
|
||||
.unwrap();
|
||||
|
||||
// 3. the bundled path is the unique name of the asset
|
||||
let bundled = Self::make_unique_name(absolute.clone());
|
||||
|
||||
Ok(Self {
|
||||
input,
|
||||
absolute,
|
||||
bundled,
|
||||
})
|
||||
}
|
||||
|
||||
fn make_unique_name(file_path: PathBuf) -> String {
|
||||
// Create a hasher
|
||||
let mut hash = std::collections::hash_map::DefaultHasher::new();
|
||||
|
||||
// Open the file to get its options
|
||||
let file = std::fs::File::open(&file_path).unwrap();
|
||||
let metadata = file.metadata().unwrap();
|
||||
let modified = metadata
|
||||
.modified()
|
||||
.unwrap_or_else(|_| SystemTime::UNIX_EPOCH);
|
||||
|
||||
// Hash a bunch of metadata
|
||||
// name, options, modified time, and maybe the version of our crate
|
||||
// Hash the last time the file was updated and the file source. If either of these change, we need to regenerate the unique name
|
||||
modified.hash(&mut hash);
|
||||
file_path.hash(&mut hash);
|
||||
|
||||
let uuid = hash.finish();
|
||||
let extension = file_path
|
||||
.extension()
|
||||
.map(|f| f.to_string_lossy())
|
||||
.map(|e| format!(".{e}"))
|
||||
.unwrap_or_default();
|
||||
let file_name = Self::normalize_file_name(file_path);
|
||||
|
||||
let out = format!("{file_name}{uuid:x}{extension}");
|
||||
assert!(out.len() <= MAX_PATH_LENGTH);
|
||||
out
|
||||
}
|
||||
|
||||
fn normalize_file_name(location: PathBuf) -> String {
|
||||
let file_name = location.file_name().unwrap();
|
||||
let last_segment = file_name.to_string_lossy();
|
||||
let extension = location.extension();
|
||||
let mut file_name = Self::to_alphanumeric_string_lossy(&last_segment);
|
||||
|
||||
let extension_len = extension.map(|e| e.len() + 1).unwrap_or_default();
|
||||
let extension_and_hash_size = extension_len + HASH_SIZE;
|
||||
|
||||
// If the file name is too long, we need to truncate it
|
||||
if file_name.len() + extension_and_hash_size > MAX_PATH_LENGTH {
|
||||
file_name = file_name[..MAX_PATH_LENGTH - extension_and_hash_size].to_string();
|
||||
}
|
||||
|
||||
file_name
|
||||
}
|
||||
|
||||
fn to_alphanumeric_string_lossy(name: &str) -> String {
|
||||
name.chars()
|
||||
.filter(|c| c.is_alphanumeric())
|
||||
.collect::<String>()
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
mod asset;
|
||||
mod folder;
|
||||
mod linker;
|
||||
mod options;
|
||||
|
||||
pub use asset::*;
|
||||
pub use linker::*;
|
||||
pub use options::*;
|
|
@ -1,69 +0,0 @@
|
|||
/// Information about the manganis link section for a given platform
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct LinkSection {
|
||||
/// The link section we pass to the static
|
||||
pub link_section: &'static str,
|
||||
/// The name of the section we find in the binary
|
||||
pub name: &'static str,
|
||||
}
|
||||
|
||||
impl LinkSection {
|
||||
/// The list of link sections for all supported platforms
|
||||
pub const ALL: &'static [&'static LinkSection] =
|
||||
&[Self::WASM, Self::MACOS, Self::WINDOWS, Self::ILLUMOS];
|
||||
|
||||
/// Returns the link section used in linux, android, fuchsia, psp, freebsd, and wasm32
|
||||
pub const WASM: &'static LinkSection = &LinkSection {
|
||||
link_section: "manganis",
|
||||
name: "manganis",
|
||||
};
|
||||
|
||||
/// Returns the link section used in macOS, iOS, tvOS
|
||||
pub const MACOS: &'static LinkSection = &LinkSection {
|
||||
link_section: "__DATA,manganis,regular,no_dead_strip",
|
||||
name: "manganis",
|
||||
};
|
||||
|
||||
/// Returns the link section used in windows
|
||||
pub const WINDOWS: &'static LinkSection = &LinkSection {
|
||||
link_section: "mg",
|
||||
name: "mg",
|
||||
};
|
||||
|
||||
/// Returns the link section used in illumos
|
||||
pub const ILLUMOS: &'static LinkSection = &LinkSection {
|
||||
link_section: "set_manganis",
|
||||
name: "set_manganis",
|
||||
};
|
||||
|
||||
/// The link section used on the current platform
|
||||
pub const CURRENT: &'static LinkSection = {
|
||||
#[cfg(any(
|
||||
target_os = "none",
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "fuchsia",
|
||||
target_os = "psp",
|
||||
target_os = "freebsd",
|
||||
target_arch = "wasm32"
|
||||
))]
|
||||
{
|
||||
Self::WASM
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "tvos"))]
|
||||
{
|
||||
Self::MACOS
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
Self::WINDOWS
|
||||
}
|
||||
|
||||
#[cfg(target_os = "illumos")]
|
||||
{
|
||||
Self::ILLUMOS
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
[package]
|
||||
name = "manganis-macro"
|
||||
version.workspace = true
|
||||
edition = "2021"
|
||||
authors = ["Evan Almloff"]
|
||||
description = "Ergonomic, automatic, cross crate asset collection and optimization"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/DioxusLabs/manganis/"
|
||||
homepage = "https://dioxuslabs.com"
|
||||
keywords = ["assets"]
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = { version = "1.0" }
|
||||
quote = "1.0"
|
||||
syn = { version = "2.0", features = ["full", "extra-traits"] }
|
||||
serde_json = "1.0"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
manganis-core = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = []
|
|
@ -1,3 +0,0 @@
|
|||
# Manganis Macro
|
||||
|
||||
This crate contains the macro used to interact with the Manganis asset system.
|
|
@ -1,139 +0,0 @@
|
|||
use core::panic;
|
||||
use manganis_core::ResourceAsset;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::HashMap, fs::File, path::Path, sync::atomic::AtomicBool};
|
||||
use std::{path::PathBuf, sync::atomic::Ordering};
|
||||
use syn::{
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream},
|
||||
parse_macro_input,
|
||||
punctuated::Punctuated,
|
||||
token::Token,
|
||||
Expr, ExprLit, Lit, LitStr, PatLit, Token,
|
||||
};
|
||||
|
||||
use crate::asset_options::MethodCallOption;
|
||||
|
||||
pub struct AssetParser {
|
||||
/// The source of the trailing builder pattern
|
||||
option_source: TokenStream2,
|
||||
|
||||
/// The asset itself
|
||||
asset: ResourceAsset,
|
||||
}
|
||||
|
||||
impl Parse for AssetParser {
|
||||
// we can take
|
||||
//
|
||||
// This gives you the Asset type - it's generic and basically unrefined
|
||||
// ```
|
||||
// asset!("myfile.png")
|
||||
// ```
|
||||
//
|
||||
// To narrow the type, use a method call to get the refined type
|
||||
// ```
|
||||
// asset!(
|
||||
// "myfile.png"
|
||||
// .image()
|
||||
// .format(ImageType::Jpg)
|
||||
// .size(512, 512)
|
||||
// )
|
||||
// ```
|
||||
//
|
||||
// But we need to decide the hint first before parsing the options
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
// Get the source of the macro, excluding the first token
|
||||
let option_source = {
|
||||
let fork = input.fork();
|
||||
fork.parse::<LitStr>()?;
|
||||
fork.parse::<TokenStream2>()?
|
||||
};
|
||||
|
||||
// And then parse the options
|
||||
let src = input.parse::<LitStr>()?;
|
||||
let src = src.value();
|
||||
let resource = ResourceAsset::parse_any(&src).unwrap();
|
||||
|
||||
fn parse_call(input: ParseStream) -> syn::Result<MethodCallOption> {
|
||||
let method = input.parse::<syn::Ident>()?;
|
||||
let content;
|
||||
parenthesized!(content in input);
|
||||
|
||||
let args = Punctuated::<Lit, Token![,]>::parse_separated_nonempty(&content)?;
|
||||
|
||||
Ok(MethodCallOption { method, args })
|
||||
}
|
||||
|
||||
let mut options = vec![];
|
||||
|
||||
while !input.is_empty() {
|
||||
let option = parse_call(input);
|
||||
if let Ok(option) = option {
|
||||
options.push(option);
|
||||
} else {
|
||||
// todo: make sure we toss a warning in the output
|
||||
let _remaining: TokenStream2 = input.parse()?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
option_source,
|
||||
asset: resource,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for AssetParser {
|
||||
// Need to generate:
|
||||
//
|
||||
// - 1. absolute file path on the user's system: `/users/dioxus/dev/project/assets/blah.css`
|
||||
// - 2. original input in case that's useful: `../blah.css`
|
||||
// - 3. path relative to the CARGO_MANIFEST_DIR - and then we'll add a `/`: `/assets/blah.css
|
||||
// - 4. file from which this macro was called: `/users/dioxus/dev/project/src/lib.rs`
|
||||
// - 5: The link section containing all this data
|
||||
// - 6: the input tokens such that the builder gets validated by the const code
|
||||
// - 7: the bundled name `/blahcss123.css`
|
||||
//
|
||||
// Not that we'll use everything, but at least we have this metadata for more post-processing.
|
||||
//
|
||||
// For now, `2` and `3` will be the same since we don't support relative paths... a bit of
|
||||
// a limitation from rust itself. We technically could support them but not without some hoops
|
||||
// to jump through
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
// 1. the link section itself
|
||||
let link_section = crate::generate_link_section(&self.asset);
|
||||
|
||||
// 2. original
|
||||
let input = self.asset.input.display().to_string();
|
||||
|
||||
// 3. resolved on the user's system
|
||||
let local = self.asset.absolute.display().to_string();
|
||||
|
||||
// 4. bundled
|
||||
let bundled = self.asset.bundled.to_string();
|
||||
|
||||
// 5. source tokens
|
||||
let option_source = &self.option_source;
|
||||
|
||||
tokens.extend(quote! {
|
||||
Asset::new(
|
||||
{
|
||||
#link_section
|
||||
manganis::Asset {
|
||||
// "/assets/blah.css"
|
||||
input: #input,
|
||||
|
||||
// "/users/dioxus/dev/app/assets/blah.css"
|
||||
local: #local,
|
||||
|
||||
bundled: #bundled,
|
||||
}
|
||||
}
|
||||
) #option_source
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
use core::panic;
|
||||
use manganis_core::ResourceAsset;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::HashMap, fs::File, path::Path, sync::atomic::AtomicBool};
|
||||
use std::{path::PathBuf, sync::atomic::Ordering};
|
||||
use syn::{
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream},
|
||||
parse_macro_input,
|
||||
punctuated::Punctuated,
|
||||
token::Token,
|
||||
Expr, ExprLit, Lit, LitStr, PatLit, Token,
|
||||
};
|
||||
|
||||
pub struct MethodCalls {
|
||||
pub options: Vec<MethodCallOption>,
|
||||
}
|
||||
|
||||
/// A builder method in the form of `.method(arg1, arg2)`
|
||||
pub struct MethodCallOption {
|
||||
pub method: syn::Ident,
|
||||
pub args: Punctuated<syn::Lit, Token![,]>,
|
||||
}
|
||||
|
||||
impl MethodCalls {
|
||||
// fn new(args: Vec<MethodCallOption>) -> Option<FileOptions> {
|
||||
// let asset_type = args.first()?.method.to_string();
|
||||
|
||||
// let stack = args
|
||||
// .into_iter()
|
||||
// .skip(1)
|
||||
// .map(|x| (x.method.to_string(), x.args.into_iter().collect::<Vec<_>>()))
|
||||
// .collect::<HashMap<String, Vec<syn::Lit>>>();
|
||||
|
||||
// let opts = match asset_type.as_str() {
|
||||
// "image" => {
|
||||
// let mut opts = ImageOptions::new(manganis_common::ImageType::Avif, Some((32, 32)));
|
||||
// // opts.set_preload(preload);
|
||||
// // opts.set_url_encoded(url_encoded);
|
||||
// // opts.set_low_quality_preview(low_quality_preview);
|
||||
// FileOptions::Image(opts)
|
||||
// }
|
||||
|
||||
// "video" => FileOptions::Video(VideoOptions::new(todo!())),
|
||||
// "font" => FileOptions::Font(FontOptions::new(todo!())),
|
||||
// "css" => FileOptions::Css(CssOptions::new()),
|
||||
// "js" => FileOptions::Js(JsOptions::new(todo!())),
|
||||
// "json" => FileOptions::Json(JsonOptions::new()),
|
||||
// other => FileOptions::Other(UnknownFileOptions::new(todo!())),
|
||||
// };
|
||||
|
||||
// Some(opts)
|
||||
// None
|
||||
// }
|
||||
}
|
||||
|
||||
// let local = match asset.local.as_ref() {
|
||||
// Some(local) => {
|
||||
// let local = local.display().to_string();
|
||||
// quote! { #local }
|
||||
// }
|
||||
// None => {
|
||||
// todo!("relative paths are not supported yet")
|
||||
// // quote! {
|
||||
// // {
|
||||
// // // ensure it exists by throwing away the include_bytes
|
||||
// // static _BLAH: &[u8] = include_bytes!(#input);
|
||||
// // // But then pass along the path
|
||||
// // concat!(env!("CARGO_MANIFEST_DIR"), "/", file!(), "/<split>/", #input)
|
||||
// // }
|
||||
// // }
|
||||
// }
|
||||
// };
|
|
@ -1,173 +0,0 @@
|
|||
use manganis_common::{AssetType, CssOptions, ManganisSupportError, ResourceAsset};
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{bracketed, parenthesized, parse::Parse};
|
||||
|
||||
use crate::{generate_link_section, resource::ResourceAssetParser};
|
||||
|
||||
#[derive(Default)]
|
||||
struct FontFamilies {
|
||||
families: Vec<String>,
|
||||
}
|
||||
|
||||
impl Parse for FontFamilies {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let inside;
|
||||
bracketed!(inside in input);
|
||||
let array =
|
||||
syn::punctuated::Punctuated::<syn::LitStr, syn::Token![,]>::parse_separated_nonempty(
|
||||
&inside,
|
||||
)?;
|
||||
Ok(FontFamilies {
|
||||
families: array.into_iter().map(|f| f.value()).collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FontWeights {
|
||||
weights: Vec<u32>,
|
||||
}
|
||||
|
||||
impl Parse for FontWeights {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let inside;
|
||||
bracketed!(inside in input);
|
||||
let array =
|
||||
syn::punctuated::Punctuated::<syn::LitInt, syn::Token![,]>::parse_separated_nonempty(
|
||||
&inside,
|
||||
)?;
|
||||
Ok(FontWeights {
|
||||
weights: array
|
||||
.into_iter()
|
||||
.map(|f| f.base10_parse().unwrap())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct ParseFontOptions {
|
||||
families: FontFamilies,
|
||||
weights: FontWeights,
|
||||
text: Option<String>,
|
||||
display: Option<String>,
|
||||
}
|
||||
|
||||
impl ParseFontOptions {
|
||||
fn url(&self) -> String {
|
||||
let mut segments = Vec::new();
|
||||
|
||||
let families: Vec<_> = self
|
||||
.families
|
||||
.families
|
||||
.iter()
|
||||
.map(|f| f.replace(' ', "+"))
|
||||
.collect();
|
||||
if !families.is_empty() {
|
||||
segments.push(format!("family={}", families.join("&")));
|
||||
}
|
||||
|
||||
let weights: Vec<_> = self.weights.weights.iter().map(|w| w.to_string()).collect();
|
||||
if !weights.is_empty() {
|
||||
segments.push(format!("weight={}", weights.join(",")));
|
||||
}
|
||||
|
||||
if let Some(text) = &self.text {
|
||||
segments.push(format!("text={}", text.replace(' ', "+")));
|
||||
}
|
||||
|
||||
if let Some(display) = &self.display {
|
||||
segments.push(format!("display={}", display.replace(' ', "+")));
|
||||
}
|
||||
|
||||
let query = if segments.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("?{}", segments.join("&"))
|
||||
};
|
||||
|
||||
format!("https://fonts.googleapis.com/css2{}", query)
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for ParseFontOptions {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let mut families = None;
|
||||
let mut weights = None;
|
||||
let mut text = None;
|
||||
let mut display = None;
|
||||
loop {
|
||||
if input.is_empty() {
|
||||
break;
|
||||
}
|
||||
let _ = input.parse::<syn::Token![.]>()?;
|
||||
let ident = input.parse::<syn::Ident>()?;
|
||||
let inside;
|
||||
parenthesized!(inside in input);
|
||||
match ident.to_string().to_lowercase().as_str() {
|
||||
"families" => {
|
||||
families = Some(inside.parse::<FontFamilies>()?);
|
||||
}
|
||||
"weights" => {
|
||||
weights = Some(inside.parse::<FontWeights>()?);
|
||||
}
|
||||
"text" => {
|
||||
text = Some(inside.parse::<syn::LitStr>()?.value());
|
||||
}
|
||||
"display" => {
|
||||
display = Some(inside.parse::<syn::LitStr>()?.value());
|
||||
}
|
||||
_ => {
|
||||
return Err(syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
format!("Unknown font option: {ident}. Supported options are families, weights, text, display"),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ParseFontOptions {
|
||||
families: families.unwrap_or_default(),
|
||||
weights: weights.unwrap_or_default(),
|
||||
text,
|
||||
display,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FontAssetParser {
|
||||
asset: ResourceAsset,
|
||||
}
|
||||
|
||||
impl Parse for FontAssetParser {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let inside;
|
||||
parenthesized!(inside in input);
|
||||
if !inside.is_empty() {
|
||||
return Err(syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
"Font assets do not support paths. Please use file() if you want to import a local font file",
|
||||
));
|
||||
}
|
||||
|
||||
let options = input.parse::<ParseFontOptions>()?;
|
||||
|
||||
let url = options.url();
|
||||
let asset: ResourceAsset = match ResourceAsset::parse_file(&url) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
return Err(syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
format!("Failed to parse url: {url:?}\n{e}"),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(FontAssetParser { asset })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for FontAssetParser {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
ResourceAssetParser::to_ref_tokens(&self.asset, tokens)
|
||||
}
|
||||
}
|
|
@ -1,305 +0,0 @@
|
|||
use manganis_common::ManganisSupportError;
|
||||
use manganis_common::{AssetType, FileOptions, ImageOptions, ResourceAsset};
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{parenthesized, parse::Parse, Token};
|
||||
|
||||
use crate::generate_link_section;
|
||||
|
||||
struct ParseImageOptions {
|
||||
options: Vec<ParseImageOption>,
|
||||
}
|
||||
|
||||
impl ParseImageOptions {
|
||||
fn apply_to_options(self, file: &mut ResourceAsset, low_quality_preview: &mut bool) {
|
||||
for option in self.options {
|
||||
option.apply_to_options(file, low_quality_preview);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for ParseImageOptions {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let mut options = Vec::new();
|
||||
while !input.is_empty() {
|
||||
options.push(input.parse::<ParseImageOption>()?);
|
||||
}
|
||||
Ok(ParseImageOptions { options })
|
||||
}
|
||||
}
|
||||
|
||||
enum ParseImageOption {
|
||||
Format(manganis_common::ImageType),
|
||||
Size((u32, u32)),
|
||||
Preload(bool),
|
||||
UrlEncoded(bool),
|
||||
Lqip(bool),
|
||||
}
|
||||
|
||||
impl ParseImageOption {
|
||||
fn apply_to_options(self, file: &mut ResourceAsset, low_quality_preview: &mut bool) {
|
||||
match self {
|
||||
ParseImageOption::Format(_)
|
||||
| ParseImageOption::Size(_)
|
||||
| ParseImageOption::Preload(_) => file.with_options_mut(|options| {
|
||||
if let FileOptions::Image(options) = options {
|
||||
match self {
|
||||
ParseImageOption::Format(format) => {
|
||||
options.set_ty(format);
|
||||
}
|
||||
ParseImageOption::Size(size) => {
|
||||
options.set_size(Some(size));
|
||||
}
|
||||
ParseImageOption::Preload(preload) => {
|
||||
options.set_preload(preload);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}),
|
||||
ParseImageOption::UrlEncoded(url_encoded) => {
|
||||
file.set_url_encoded(url_encoded);
|
||||
}
|
||||
ParseImageOption::Lqip(lqip) => {
|
||||
*low_quality_preview = lqip;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for ParseImageOption {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let _ = input.parse::<syn::Token![.]>()?;
|
||||
let ident = input.parse::<syn::Ident>()?;
|
||||
let content;
|
||||
parenthesized!(content in input);
|
||||
match ident.to_string().as_str() {
|
||||
"format" => {
|
||||
let format = content.parse::<ImageType>()?;
|
||||
Ok(ParseImageOption::Format(format.into()))
|
||||
}
|
||||
"size" => {
|
||||
let size = content.parse::<ImageSize>()?;
|
||||
Ok(ParseImageOption::Size((size.width, size.height)))
|
||||
}
|
||||
"preload" => {
|
||||
crate::verify_preload_valid(&ident)?;
|
||||
Ok(ParseImageOption::Preload(true))
|
||||
}
|
||||
"url_encoded" => {
|
||||
Ok(ParseImageOption::UrlEncoded(true))
|
||||
}
|
||||
"low_quality_preview" => {
|
||||
Ok(ParseImageOption::Lqip(true))
|
||||
}
|
||||
_ => Err(syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
format!(
|
||||
"Unknown image option: {}. Supported options are format, size, preload, url_encoded, low_quality_preview",
|
||||
ident
|
||||
),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ImageSize {
|
||||
width: u32,
|
||||
height: u32,
|
||||
}
|
||||
|
||||
impl Parse for ImageSize {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let width = input.parse::<syn::LitInt>()?;
|
||||
let _ = input.parse::<syn::Token![,]>()?;
|
||||
let height = input.parse::<syn::LitInt>()?;
|
||||
Ok(ImageSize {
|
||||
width: width.base10_parse()?,
|
||||
height: height.base10_parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ImageType> for manganis_common::ImageType {
|
||||
fn from(val: ImageType) -> Self {
|
||||
val.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for ImageType {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let _ = input.parse::<syn::Ident>()?;
|
||||
let _ = input.parse::<Token![::]>()?;
|
||||
let ident = input.parse::<syn::Ident>()?;
|
||||
ident
|
||||
.to_string()
|
||||
.to_lowercase()
|
||||
.as_str()
|
||||
.parse::<manganis_common::ImageType>()
|
||||
.map_err(|_| {
|
||||
syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
format!(
|
||||
"Unknown image type: {}. Supported types are png, jpeg, webp, avif",
|
||||
ident
|
||||
),
|
||||
)
|
||||
})
|
||||
.map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct ImageType(manganis_common::ImageType);
|
||||
|
||||
impl Default for ImageType {
|
||||
fn default() -> Self {
|
||||
Self(manganis_common::ImageType::Avif)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ImageAssetParser {
|
||||
asset: ResourceAsset,
|
||||
low_quality_preview: Option<String>,
|
||||
}
|
||||
|
||||
impl Parse for ImageAssetParser {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let inside;
|
||||
parenthesized!(inside in input);
|
||||
let path = inside.parse::<syn::LitStr>()?;
|
||||
|
||||
let parsed_options = {
|
||||
if input.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(input.parse::<ParseImageOptions>()?)
|
||||
}
|
||||
};
|
||||
|
||||
let path_as_str = path.value();
|
||||
let mut asset: ResourceAsset = match ResourceAsset::parse_file(&path_as_str) {
|
||||
Ok(path) => path.with_options(manganis_common::FileOptions::Image(ImageOptions::new(
|
||||
manganis_common::ImageType::Avif,
|
||||
None,
|
||||
))),
|
||||
Err(e) => {
|
||||
return Err(syn::Error::new(
|
||||
proc_macro2::Span::call_site(),
|
||||
format!("{e}"),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let mut low_quality_preview = false;
|
||||
if let Some(parsed_options) = parsed_options {
|
||||
parsed_options.apply_to_options(&mut asset, &mut low_quality_preview);
|
||||
}
|
||||
|
||||
// let asset = manganis_common::AssetType::Resource(asset.clone());
|
||||
|
||||
// let file_name = if asset.url_encoded() {
|
||||
// #[cfg(not(feature = "url-encoding"))]
|
||||
// return Err(syn::Error::new(
|
||||
// proc_macro2::Span::call_site(),
|
||||
// "URL encoding is not enabled. Enable the url-encoding feature to use this feature",
|
||||
// ));
|
||||
// #[cfg(feature = "url-encoding")]
|
||||
// Ok(crate::url_encoded_asset(&asset).map_err(|e| {
|
||||
// syn::Error::new(
|
||||
// proc_macro2::Span::call_site(),
|
||||
// format!("Failed to encode file: {}", e),
|
||||
// )
|
||||
// })?)
|
||||
// } else {
|
||||
// asset.served_location()
|
||||
// };
|
||||
|
||||
// let low_quality_preview = if low_quality_preview {
|
||||
// #[cfg(not(feature = "url-encoding"))]
|
||||
// return Err(syn::Error::new(
|
||||
// proc_macro2::Span::call_site(),
|
||||
// "Low quality previews require URL encoding. Enable the url-encoding feature to use this feature",
|
||||
// ));
|
||||
|
||||
// #[cfg(feature = "url-encoding")]
|
||||
// {
|
||||
// let current_image_size = match asset.options() {
|
||||
// manganis_common::FileOptions::Image(options) => options.size(),
|
||||
// _ => None,
|
||||
// };
|
||||
// let low_quality_preview_size = current_image_size
|
||||
// .map(|(width, height)| {
|
||||
// let width = width / 10;
|
||||
// let height = height / 10;
|
||||
// (width, height)
|
||||
// })
|
||||
// .unwrap_or((32, 32));
|
||||
// let lqip = ResourceAsset::new(asset).with_options(
|
||||
// manganis_common::FileOptions::Image(ImageOptions::new(
|
||||
// manganis_common::ImageType::Avif,
|
||||
// Some(low_quality_preview_size),
|
||||
// )),
|
||||
// );
|
||||
|
||||
// Some(crate::url_encoded_asset(&lqip).map_err(|e| {
|
||||
// syn::Error::new(
|
||||
// proc_macro2::Span::call_site(),
|
||||
// format!("Failed to encode file: {}", e),
|
||||
// )
|
||||
// })?)
|
||||
// }
|
||||
// } else {
|
||||
// None
|
||||
// };
|
||||
|
||||
let low_quality_preview = None;
|
||||
|
||||
Ok(ImageAssetParser {
|
||||
low_quality_preview,
|
||||
asset,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ImageAssetParser {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
let link_section = generate_link_section(&self.asset);
|
||||
let input = self.asset.input.to_string();
|
||||
|
||||
let bundled = self.asset.bundled.to_string();
|
||||
|
||||
let low_quality_preview = match &self.low_quality_preview {
|
||||
Some(lqip) => quote! { Some(#lqip) },
|
||||
None => quote! { None },
|
||||
};
|
||||
|
||||
// If the asset is relative, we use concat!(env!("CARGO_MANIFEST_DIR"), "/", asset.input.path())
|
||||
let local = match self.asset.local.as_ref() {
|
||||
Some(local) => {
|
||||
let local = local.to_string();
|
||||
quote! { #local }
|
||||
}
|
||||
None => {
|
||||
quote! {
|
||||
{
|
||||
static _: &[_] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/", #input.path()));
|
||||
concat!(env!("CARGO_MANIFEST_DIR"), "/", #input.path())
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokens.extend(quote! {
|
||||
{
|
||||
#link_section
|
||||
manganis::ImageAsset::new(
|
||||
manganis::Asset {
|
||||
input: #input,
|
||||
local: #local,
|
||||
bundled: #bundled,
|
||||
}
|
||||
).with_preview(#low_quality_preview)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use serde::Serialize;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use syn::{parse::Parse, parse_macro_input, LitStr};
|
||||
|
||||
pub(crate) mod asset;
|
||||
pub(crate) mod asset_options;
|
||||
pub(crate) mod linker;
|
||||
|
||||
use linker::generate_link_section;
|
||||
|
||||
/// The mg macro collects assets that will be included in the final binary
|
||||
///
|
||||
/// # Files
|
||||
///
|
||||
/// The file builder collects an arbitrary file. Relative paths are resolved relative to the package root
|
||||
/// ```rust
|
||||
/// const _: &str = manganis::asset!("src/asset.txt");
|
||||
/// ```
|
||||
/// Or you can use URLs to read the asset at build time from a remote location
|
||||
/// ```rust
|
||||
/// const _: &str = manganis::asset!("https://rustacean.net/assets/rustacean-flat-happy.png");
|
||||
/// ```
|
||||
///
|
||||
/// # Images
|
||||
///
|
||||
/// You can collect images which will be automatically optimized with the image builder:
|
||||
/// ```rust
|
||||
/// const _: manganis::ImageAsset = manganis::asset!(image("rustacean-flat-gesture.png"));
|
||||
/// ```
|
||||
/// Resize the image at compile time to make the assets file size smaller:
|
||||
/// ```rust
|
||||
/// const _: manganis::ImageAsset = manganis::asset!(image("rustacean-flat-gesture.png").size(52, 52));
|
||||
/// ```
|
||||
/// Or convert the image at compile time to a web friendly format:
|
||||
/// ```rust
|
||||
/// const _: manganis::ImageAsset = manganis::asset!(image("rustacean-flat-gesture.png").format(ImageFormat::Avif).size(52, 52));
|
||||
/// ```
|
||||
/// You can mark images as preloaded to make them load faster in your app
|
||||
/// ```rust
|
||||
/// const _: manganis::ImageAsset = manganis::asset!(image("rustacean-flat-gesture.png").preload());
|
||||
/// ```
|
||||
///
|
||||
/// # Fonts
|
||||
///
|
||||
/// You can use the font builder to collect fonts that will be included in the final binary from google fonts
|
||||
/// ```rust
|
||||
/// const _: &str = manganis::asset!(font().families(["Roboto"]));
|
||||
/// ```
|
||||
/// You can specify weights for the fonts
|
||||
/// ```rust
|
||||
/// const _: &str = manganis::asset!(font().families(["Roboto"]).weights([200]));
|
||||
/// ```
|
||||
/// Or set the text to only include the characters you need
|
||||
/// ```rust
|
||||
/// const _: &str = manganis::asset!(font().families(["Roboto"]).weights([200]).text("Hello, world!"));
|
||||
/// ```
|
||||
#[proc_macro]
|
||||
pub fn asset(input: TokenStream) -> TokenStream {
|
||||
let asset = parse_macro_input!(input as asset::AssetParser);
|
||||
|
||||
quote! { #asset }.into_token_stream().into()
|
||||
}
|
||||
|
||||
/// // You can also collect arbitrary key-value pairs. The meaning of these pairs is determined by the CLI that processes your assets
|
||||
/// ```rust
|
||||
/// const _: () = manganis::meta!("opt-level": "3");
|
||||
/// ```
|
||||
#[proc_macro]
|
||||
pub fn meta(input: TokenStream) -> TokenStream {
|
||||
struct MetadataValue {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl Parse for MetadataValue {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let key = input.parse::<Ident>()?.to_string();
|
||||
input.parse::<syn::Token![:]>()?;
|
||||
let value = input.parse::<LitStr>()?.value();
|
||||
Ok(Self { key, value })
|
||||
}
|
||||
}
|
||||
|
||||
todo!()
|
||||
|
||||
// let md = parse_macro_input!(input as MetadataValue);
|
||||
// let asset = MetadataAsset::new(md.key.as_str(), md.value.as_str());
|
||||
// let link_section = generate_link_section(&asset);
|
||||
|
||||
// quote! {
|
||||
// {
|
||||
// #link_section
|
||||
// }
|
||||
// }
|
||||
// .into_token_stream()
|
||||
// .into()
|
||||
}
|
|
@ -1,100 +0,0 @@
|
|||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use serde::Serialize;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use syn::{parse::Parse, parse_macro_input, LitStr};
|
||||
|
||||
/// this new approach will store the assets descriptions *inside the executable*.
|
||||
/// The trick is to use the `link_section` attribute.
|
||||
/// We force rust to store a json representation of the asset description
|
||||
/// inside a particular region of the binary, with the label "manganis".
|
||||
/// After linking, the "manganis" sections of the different executables will be merged.
|
||||
pub fn generate_link_section(asset: &impl Serialize) -> TokenStream2 {
|
||||
let position = proc_macro2::Span::call_site();
|
||||
|
||||
let asset_description = serde_json::to_string(asset).unwrap();
|
||||
|
||||
let len = asset_description.as_bytes().len();
|
||||
|
||||
let asset_bytes = syn::LitByteStr::new(asset_description.as_bytes(), position);
|
||||
|
||||
let section_name = syn::LitStr::new(LinkSection::CURRENT.link_section, position);
|
||||
|
||||
quote! {
|
||||
#[link_section = #section_name]
|
||||
#[used]
|
||||
static ASSET: [u8; #len] = * #asset_bytes;
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about the manganis link section for a given platform
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct LinkSection {
|
||||
/// The link section we pass to the static
|
||||
pub link_section: &'static str,
|
||||
/// The name of the section we find in the binary
|
||||
pub name: &'static str,
|
||||
}
|
||||
|
||||
impl LinkSection {
|
||||
/// The list of link sections for all supported platforms
|
||||
pub const ALL: &'static [&'static LinkSection] =
|
||||
&[Self::WASM, Self::MACOS, Self::WINDOWS, Self::ILLUMOS];
|
||||
|
||||
/// Returns the link section used in linux, android, fuchsia, psp, freebsd, and wasm32
|
||||
pub const WASM: &'static LinkSection = &LinkSection {
|
||||
link_section: "manganis",
|
||||
name: "manganis",
|
||||
};
|
||||
|
||||
/// Returns the link section used in macOS, iOS, tvOS
|
||||
pub const MACOS: &'static LinkSection = &LinkSection {
|
||||
link_section: "__DATA,manganis,regular,no_dead_strip",
|
||||
name: "manganis",
|
||||
};
|
||||
|
||||
/// Returns the link section used in windows
|
||||
pub const WINDOWS: &'static LinkSection = &LinkSection {
|
||||
link_section: "mg",
|
||||
name: "mg",
|
||||
};
|
||||
|
||||
/// Returns the link section used in illumos
|
||||
pub const ILLUMOS: &'static LinkSection = &LinkSection {
|
||||
link_section: "set_manganis",
|
||||
name: "set_manganis",
|
||||
};
|
||||
|
||||
/// The link section used on the current platform
|
||||
pub const CURRENT: &'static LinkSection = {
|
||||
#[cfg(any(
|
||||
target_os = "none",
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "fuchsia",
|
||||
target_os = "psp",
|
||||
target_os = "freebsd",
|
||||
target_arch = "wasm32"
|
||||
))]
|
||||
{
|
||||
Self::WASM
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "tvos"))]
|
||||
{
|
||||
Self::MACOS
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
Self::WINDOWS
|
||||
}
|
||||
|
||||
#[cfg(target_os = "illumos")]
|
||||
{
|
||||
Self::ILLUMOS
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
[package]
|
||||
# Manganese is a rusting catalyst. Manganis makes it faster to collect rust assets (and has almost no google search results)
|
||||
name = "manganis"
|
||||
version.workspace = true
|
||||
authors = ["Evan Almloff"]
|
||||
edition = "2021"
|
||||
description = "Ergonomic, automatic, cross crate asset collection and optimization"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/DioxusLabs/manganis/"
|
||||
homepage = "https://dioxuslabs.com"
|
||||
keywords = ["assets"]
|
||||
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
manganis-macro = { workspace = true, optional = true }
|
||||
dioxus-core-types = { workspace = true }
|
||||
|
||||
# dunce = "1.0.2"
|
||||
# [target.'cfg(any(target_os = "macos", target_os = "ios"))'.dependencies]
|
||||
# core-foundation = "0.10.0"
|
||||
# [target.'cfg(target_os = "macos")'.dependencies]
|
||||
# core-foundation = "0.9.3"
|
||||
# [target.'cfg(target_os ="macos")'.dependencies]
|
||||
# infer = { workspace = true }
|
||||
# dirs = "5.0.1"
|
||||
# infer = { workspace = true }
|
||||
# manganis-common = { workspace = true }
|
||||
|
||||
once_cell = "1.19.0"
|
||||
dunce = "1.0.2"
|
||||
serde = { version = "1.0.183", features = ["derive"] }
|
||||
anyhow = "1"
|
||||
base64 = { workspace = true }
|
||||
|
||||
|
||||
[features]
|
||||
default = ["macro"]
|
||||
html = []
|
||||
# url-encoding = ["manganis-macro/url-encoding"]
|
||||
macro = ["dep:manganis-macro"]
|
|
@ -1,132 +0,0 @@
|
|||
/// This is basically a compile-time version of ResourceAsset
|
||||
/// A struct that contains the relative and absolute paths of an asset
|
||||
#[derive(Debug, PartialEq, PartialOrd, Clone, Hash)]
|
||||
pub struct Asset {
|
||||
/// The input URI given to the macro
|
||||
pub input: &'static str,
|
||||
|
||||
/// The sourcefile of the asset
|
||||
pub source_file: &'static str,
|
||||
|
||||
///
|
||||
pub local: &'static str,
|
||||
|
||||
///
|
||||
pub bundled: &'static str,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Asset {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.resolve().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Asset> for String {
|
||||
fn from(asset: Asset) -> Self {
|
||||
asset.resolve()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Asset> for Option<String> {
|
||||
fn from(asset: Asset) -> Self {
|
||||
Some(asset.resolve())
|
||||
}
|
||||
}
|
||||
|
||||
impl Asset {
|
||||
/// Resolve the asset against the bundle
|
||||
pub fn resolve(&self) -> String {
|
||||
// A fallback for non-bundled apps with no support for manganis
|
||||
//
|
||||
// Necessary to get `cargo run` to work when folks use `cargo run --example demo` on the main
|
||||
// dioxus repo.
|
||||
//
|
||||
// We could also say, suggest that they install `dioxus-cli` and use that instead.
|
||||
if local_fallback() {
|
||||
return self.bundled.to_string();
|
||||
}
|
||||
|
||||
// the rest of the platforms are bundled, so we need to resolve the asset against the bundle
|
||||
|
||||
// for web, we just do the basepath thing
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
return format!("/{}", self.bundled);
|
||||
}
|
||||
|
||||
// On mac do a bundle lookup
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let bundle = core_foundation::bundle::CFBundle::main_bundle();
|
||||
let bundle_path = bundle.path().unwrap();
|
||||
let resources_path = bundle.resources_path().unwrap();
|
||||
let absolute_resources_root = bundle_path.join(resources_path);
|
||||
return dunce::canonicalize(absolute_resources_root)
|
||||
.ok()
|
||||
.unwrap()
|
||||
.display()
|
||||
.to_string();
|
||||
}
|
||||
|
||||
// // on ios do a bundle lookup
|
||||
// #[cfg(target_os = "ios")]
|
||||
// {
|
||||
// let bundle = core_foundation::bundle::CFBundle::main_bundle();
|
||||
// let bundle_path = bundle.path().unwrap();
|
||||
// let resources_path = bundle.resources_path().unwrap();
|
||||
// let absolute_resources_root = bundle_path.join(resources_path);
|
||||
// return dunce::canonicalize(absolute_resources_root)
|
||||
// .ok()
|
||||
// .unwrap()
|
||||
// .display()
|
||||
// .to_string();
|
||||
// }
|
||||
|
||||
// on android do a bundle lookup
|
||||
|
||||
// on windows,
|
||||
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
if BUNDLED {
|
||||
self.input.to_string()
|
||||
} else {
|
||||
self.local.to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static BUNDLED: bool = false;
|
||||
// static BUNDLED: bool = option_env!("MG_BUNDLED").is_some();
|
||||
|
||||
/// Returns whether the app should use the local fallback or not
|
||||
///
|
||||
/// A `cargo run` will not be bundled but the asset will be resolved against the filesystem through
|
||||
/// dependencies.
|
||||
pub fn local_fallback() -> bool {
|
||||
// If we're bundled, manganis is active
|
||||
if BUNDLED {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, check if the MG_RUNTIME env var is set
|
||||
// this prevents us from thrashing the cache when running `cargo run`
|
||||
static USE_FALLBACK: once_cell::sync::OnceCell<bool> = once_cell::sync::OnceCell::new();
|
||||
*USE_FALLBACK.get_or_init(|| {
|
||||
// If the env var is present, we use the bundled path
|
||||
if std::env::var("MG_RUNTIME").is_ok() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// on wasm, there's no env vars... but the app is not bundled
|
||||
// for now we just assume you're using manganis in a wasm app
|
||||
if cfg!(target_arch = "wasm32") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// No env var, not wasm, not bundled, so we're not using manganis
|
||||
true
|
||||
})
|
||||
}
|
|
@ -1,111 +0,0 @@
|
|||
use dioxus_core_types::DioxusFormattable;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Asset
|
||||
#[derive(Debug, PartialEq, PartialOrd, Clone, Copy, Hash)]
|
||||
pub struct Asset {
|
||||
/// The input URI given to the macro
|
||||
pub input: &'static str,
|
||||
|
||||
/// The absolute path to the asset on the filesystem
|
||||
pub local: &'static str,
|
||||
|
||||
/// The asset location after its been bundled
|
||||
///
|
||||
/// `blah123.css``
|
||||
pub bundled: &'static str,
|
||||
}
|
||||
|
||||
impl Asset {
|
||||
/// Create a new asset
|
||||
pub const fn new(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the path to the asset
|
||||
pub fn path(&self) -> PathBuf {
|
||||
PathBuf::from(self.input.to_string())
|
||||
}
|
||||
|
||||
/// Get the path to the asset
|
||||
pub fn relative_path(&self) -> PathBuf {
|
||||
PathBuf::from(self.input.trim_start_matches("/").to_string())
|
||||
}
|
||||
|
||||
/// Return a canonicalized path to the asset
|
||||
pub fn resolve(&self) -> PathBuf {
|
||||
// if we're running with cargo in the loop, we can use the absolute path.
|
||||
// this is non-bundled situations
|
||||
if let Ok(_manifest_dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
return PathBuf::from(self.local);
|
||||
}
|
||||
|
||||
// todo: actually properly resolve this
|
||||
base_path()
|
||||
.unwrap_or_else(|| std::env::current_dir().unwrap_or("/assets/".into()))
|
||||
.join(PathBuf::from(self.bundled.trim_start_matches('/')))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Asset> for String {
|
||||
fn from(value: Asset) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
impl From<Asset> for Option<String> {
|
||||
fn from(value: Asset) -> Self {
|
||||
Some(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Asset {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.resolve().display())
|
||||
}
|
||||
}
|
||||
|
||||
impl DioxusFormattable for Asset {
|
||||
fn format(&self) -> std::borrow::Cow<'static, str> {
|
||||
std::borrow::Cow::Owned(self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
fn base_path() -> Option<PathBuf> {
|
||||
// Use the prescence of the bundle to determine if we're in dev mode
|
||||
// todo: for other platforms, we should check their bundles too. This currently only works for macOS and iOS
|
||||
#[cfg(any(target_os = "macos", target_os = "ios"))]
|
||||
{
|
||||
// usually the bundle is
|
||||
// .app
|
||||
// Contents
|
||||
// Resources
|
||||
// some_asset
|
||||
// macOS
|
||||
// somebinary
|
||||
//
|
||||
// but not always!
|
||||
//
|
||||
// we fallback to using the .app's directory itself if it doesn't exist - which is inline
|
||||
// with how tauri-bundle works
|
||||
//
|
||||
// we would normally just want to use core-foundation, but it's much faster for compile times
|
||||
// to not pull in CF in a build/proc-macro, so it's a teeny bit hand-rolled
|
||||
let cur_exe = std::env::current_exe().ok()?;
|
||||
let mut resources_dir = cur_exe.parent()?.parent()?.join("Resources");
|
||||
if !resources_dir.exists() {
|
||||
resources_dir = cur_exe.parent()?.to_path_buf();
|
||||
}
|
||||
|
||||
// Note that this will return `target/debug` if you're in debug mode - not reliable check if we're in dev mode
|
||||
return dunce::canonicalize(resources_dir).ok();
|
||||
}
|
||||
|
||||
// web-wasm
|
||||
#[cfg(target_os = "wasm32-unknown-unknown")]
|
||||
{
|
||||
return = Some(PathBuf::from("/"))
|
||||
}
|
||||
|
||||
None
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, FileOptions};
|
||||
|
||||
// mod file;
|
||||
// mod folder;
|
||||
mod error;
|
||||
mod file;
|
||||
mod meta;
|
||||
mod resource;
|
||||
mod tailwind;
|
||||
|
||||
// pub use folder::*;
|
||||
pub use error::*;
|
||||
pub use file::*;
|
||||
pub use meta::*;
|
||||
pub use resource::*;
|
||||
pub use tailwind::*;
|
||||
|
||||
/// The maximum length of a path segment
|
||||
const MAX_PATH_LENGTH: usize = 128;
|
||||
|
||||
/// The length of the hash in the output path
|
||||
const HASH_SIZE: usize = 16;
|
||||
|
||||
/// The type of asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
pub enum AssetType {
|
||||
/// A resource asset in the form of a URI
|
||||
///
|
||||
/// Typically a file, but could be a folder or a remote URL
|
||||
Resource(ResourceAsset),
|
||||
|
||||
/// A tailwind class asset
|
||||
Tailwind(TailwindAsset),
|
||||
|
||||
/// A metadata asset
|
||||
Metadata(MetadataAsset),
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, FileOptions};
|
||||
|
||||
/// Error while checking an asset exists
|
||||
#[derive(Debug)]
|
||||
pub enum AssetError {
|
||||
/// The relative path does not exist
|
||||
NotFoundRelative(PathBuf, String),
|
||||
/// The path exist but is not a file
|
||||
NotFile(PathBuf),
|
||||
/// The path exist but is not a folder
|
||||
NotFolder(PathBuf),
|
||||
/// Unknown IO error
|
||||
IO(PathBuf, std::io::Error),
|
||||
}
|
||||
|
||||
impl Display for AssetError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
AssetError::NotFoundRelative(manifest_dir, path) =>
|
||||
write!(f,"cannot find file `{}` in `{}`, please make sure it exists.\nAny relative paths are resolved relative to the manifest directory.",
|
||||
path,
|
||||
manifest_dir.display()
|
||||
),
|
||||
AssetError::NotFile(absolute_path) =>
|
||||
write!(f, "`{}` is not a file, please choose a valid asset.\nAny relative paths are resolved relative to the manifest directory.", absolute_path.display()),
|
||||
AssetError::NotFolder(absolute_path) =>
|
||||
write!(f, "`{}` is not a folder, please choose a valid asset.\nAny relative paths are resolved relative to the manifest directory.", absolute_path.display()),
|
||||
AssetError::IO(absolute_path, err) =>
|
||||
write!(f, "unknown error when accessing `{}`: \n{}", absolute_path.display(), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error that can occur while collecting assets without CLI support
|
||||
#[derive(Debug)]
|
||||
pub enum ManganisSupportError {
|
||||
/// An error that can occur while collecting assets from other packages without CLI support
|
||||
ExternalPackageCollection,
|
||||
/// Manganis failed to find the current package's manifest
|
||||
FailedToFindCargoManifest,
|
||||
}
|
||||
|
||||
impl Display for ManganisSupportError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::ExternalPackageCollection => write!(f, "Attempted to collect assets from other packages without a CLI that supports Manganis. Please recompile with a CLI that supports Manganis like the `dioxus-cli`."),
|
||||
Self::FailedToFindCargoManifest => write!(f, "Manganis failed to find the current package's manifest. Please recompile with a CLI that supports Manganis like the `dioxus-cli`."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ManganisSupportError {}
|
|
@ -1,169 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, FileOptions, ResourceAsset as AssetSource};
|
||||
|
||||
/// A file asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
pub struct FileAsset {
|
||||
location: AssetSource,
|
||||
options: FileOptions,
|
||||
url_encoded: bool,
|
||||
}
|
||||
|
||||
/// A folder asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
pub struct FolderAsset {
|
||||
location: AssetSource,
|
||||
}
|
||||
|
||||
impl FolderAsset {
|
||||
///
|
||||
pub fn path(&self) -> &Path {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for FolderAsset {
|
||||
type Target = AssetSource;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.location
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for FileAsset {
|
||||
type Target = AssetSource;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.location
|
||||
}
|
||||
}
|
||||
|
||||
impl FileAsset {
|
||||
/// Creates a new file asset
|
||||
pub fn new(source: AssetSource) -> Self {
|
||||
todo!()
|
||||
// if let Some(path) = source.as_path() {
|
||||
// assert!(!path.is_dir());
|
||||
// }
|
||||
|
||||
// let options = FileOptions::default_for_extension(source.extension().as_deref());
|
||||
|
||||
// let mut myself = Self {
|
||||
// location: AssetSource {
|
||||
// unique_name: Default::default(),
|
||||
// source,
|
||||
// },
|
||||
// options,
|
||||
// url_encoded: false,
|
||||
// };
|
||||
|
||||
// myself.regenerate_unique_name();
|
||||
|
||||
// myself
|
||||
}
|
||||
|
||||
/// Set the file options
|
||||
pub fn with_options(self, options: FileOptions) -> Self {
|
||||
let mut myself = Self {
|
||||
location: self.location,
|
||||
options,
|
||||
url_encoded: false,
|
||||
};
|
||||
|
||||
myself.regenerate_unique_name();
|
||||
|
||||
myself
|
||||
}
|
||||
|
||||
/// Set whether the file asset should be url encoded
|
||||
pub fn set_url_encoded(&mut self, url_encoded: bool) {
|
||||
self.url_encoded = url_encoded;
|
||||
}
|
||||
|
||||
/// Returns whether the file asset should be url encoded
|
||||
pub fn url_encoded(&self) -> bool {
|
||||
self.url_encoded
|
||||
}
|
||||
|
||||
// /// Returns the location where the file asset will be served from or None if the asset cannot be served
|
||||
// pub fn served_location(&self) -> Result<String, ManganisSupportError> {
|
||||
// if self.url_encoded {
|
||||
// let data = self.location.source.read_to_bytes().unwrap();
|
||||
// let data = base64::engine::general_purpose::STANDARD_NO_PAD.encode(data);
|
||||
// let mime = self.location.source.mime_type().unwrap();
|
||||
// Ok(format!("data:{mime};base64,{data}"))
|
||||
// } else {
|
||||
// resolve_asset_location(&self.location)
|
||||
// }
|
||||
// }
|
||||
|
||||
/// Returns the location of the file asset
|
||||
pub fn location(&self) -> &AssetSource {
|
||||
&self.location
|
||||
}
|
||||
|
||||
/// Returns the options for the file asset
|
||||
pub fn options(&self) -> &FileOptions {
|
||||
&self.options
|
||||
}
|
||||
|
||||
///
|
||||
pub fn path(&self) -> &Path {
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// Returns the options for the file asset mutably
|
||||
pub fn with_options_mut(&mut self, f: impl FnOnce(&mut FileOptions)) {
|
||||
f(&mut self.options);
|
||||
self.regenerate_unique_name();
|
||||
}
|
||||
|
||||
/// Hash the file asset source and options
|
||||
fn hash(&self) -> u64 {
|
||||
todo!()
|
||||
// let mut hash = std::collections::hash_map::DefaultHasher::new();
|
||||
// hash_file(&self.location.source, &mut hash);
|
||||
// self.options.hash(&mut hash);
|
||||
// hash_version(&mut hash);
|
||||
// hash.finish()
|
||||
}
|
||||
|
||||
/// Regenerates the unique name of the file asset
|
||||
fn regenerate_unique_name(&mut self) {
|
||||
todo!()
|
||||
// // Generate an unique name for the file based on the options, source, and the current version of manganis
|
||||
// let uuid = self.hash();
|
||||
// let extension = self.options.extension();
|
||||
// let file_name = normalized_file_name(&self.location.source, extension);
|
||||
// let extension = extension.map(|e| format!(".{e}")).unwrap_or_default();
|
||||
// self.location.unique_name = format!("{file_name}{uuid:x}{extension}");
|
||||
// assert!(self.location.unique_name.len() <= MAX_PATH_LENGTH);
|
||||
}
|
||||
}
|
||||
|
||||
// impl Display for FileAsset {
|
||||
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
// let url_encoded = if self.url_encoded {
|
||||
// " [url encoded]"
|
||||
// } else {
|
||||
// ""
|
||||
// };
|
||||
|
||||
// write!(
|
||||
// f,
|
||||
// "{} [{}]{}",
|
||||
// self.location.source(),
|
||||
// self.options,
|
||||
// url_encoded
|
||||
// )
|
||||
// }
|
||||
// }
|
|
@ -1,114 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{config, AssetSource, FileOptions};
|
||||
|
||||
/// A folder asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone)]
|
||||
pub struct FolderAsset {
|
||||
location: AssetSource,
|
||||
}
|
||||
|
||||
impl Display for FolderAsset {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}/**", self.location.source(),)
|
||||
}
|
||||
}
|
||||
|
||||
impl FolderAsset {
|
||||
/// Creates a new folder asset
|
||||
pub fn new(source: AssetSource) -> Self {
|
||||
let AssetSource::Local(source) = source else {
|
||||
panic!("Folder asset must be a local path");
|
||||
};
|
||||
assert!(source.canonicalized.is_dir());
|
||||
|
||||
let mut myself = Self {
|
||||
location: AssetSource {
|
||||
unique_name: Default::default(),
|
||||
source: AssetSource::Local(source),
|
||||
},
|
||||
};
|
||||
|
||||
myself.regenerate_unique_name();
|
||||
|
||||
myself
|
||||
}
|
||||
|
||||
/// Returns the location where the folder asset will be served from or None if the asset cannot be served
|
||||
pub fn served_location(&self) -> Result<String, ManganisSupportError> {
|
||||
resolve_asset_location(&self.location)
|
||||
}
|
||||
|
||||
/// Returns the unique name of the folder asset
|
||||
pub fn unique_name(&self) -> &str {
|
||||
&self.location.unique_name
|
||||
}
|
||||
|
||||
/// Returns the location of the folder asset
|
||||
pub fn location(&self) -> &AssetSource {
|
||||
&self.location
|
||||
}
|
||||
|
||||
/// Create a unique hash for the source folder by recursively hashing the files
|
||||
fn hash(&self) -> u64 {
|
||||
let mut hash = std::collections::hash_map::DefaultHasher::new();
|
||||
let folder = self
|
||||
.location
|
||||
.source
|
||||
.as_path()
|
||||
.expect("Folder asset must be a local path");
|
||||
|
||||
let mut folders_queued = vec![folder.clone()];
|
||||
|
||||
while let Some(folder) = folders_queued.pop() {
|
||||
// Add the folder to the hash
|
||||
for segment in folder.iter() {
|
||||
segment.hash(&mut hash);
|
||||
}
|
||||
|
||||
let files = std::fs::read_dir(folder).into_iter().flatten().flatten();
|
||||
for file in files {
|
||||
let path = file.path();
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// If the file is a folder, add it to the queue otherwise add it to the hash
|
||||
if metadata.is_dir() {
|
||||
folders_queued.push(path);
|
||||
} else {
|
||||
// todo: these relative/original paths are not correct
|
||||
let local = self.location.source().local().unwrap();
|
||||
hash_file(
|
||||
&AssetSource::Local(LocalAssetSource {
|
||||
original: local.original.clone(),
|
||||
relative: local.relative.clone(),
|
||||
canonicalized: path,
|
||||
}),
|
||||
&mut hash,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the manganis version to the hash
|
||||
hash_version(&mut hash);
|
||||
|
||||
hash.finish()
|
||||
}
|
||||
|
||||
/// Regenerate the unique name of the folder asset
|
||||
fn regenerate_unique_name(&mut self) {
|
||||
let uuid = self.hash();
|
||||
let file_name = normalized_file_name(&self.location.source, None);
|
||||
self.location.unique_name = format!("{file_name}{uuid:x}");
|
||||
assert!(self.location.unique_name.len() <= MAX_PATH_LENGTH);
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, FileOptions};
|
||||
|
||||
/// A metadata asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone)]
|
||||
pub struct MetadataAsset {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl MetadataAsset {
|
||||
/// Creates a new metadata asset
|
||||
pub fn new(key: &str, value: &str) -> Self {
|
||||
Self {
|
||||
key: key.to_string(),
|
||||
value: value.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the key of the metadata asset
|
||||
pub fn key(&self) -> &str {
|
||||
&self.key
|
||||
}
|
||||
|
||||
/// Returns the value of the metadata asset
|
||||
pub fn value(&self) -> &str {
|
||||
&self.value
|
||||
}
|
||||
}
|
|
@ -1,535 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
// use fluent_uri::{
|
||||
// component::{Authority, Scheme},
|
||||
// encoding::EStr,
|
||||
// UriRef,
|
||||
// };
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, AssetError, FileOptions};
|
||||
|
||||
/// An asset identified by a URI
|
||||
///
|
||||
/// This could be a file, a folder, a remote URL, a data-encoded string, etc.
|
||||
///
|
||||
/// We don't want to download or copy the resource itself, just the metadata about it such that
|
||||
/// we can resolve it later.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Hash, Eq)]
|
||||
pub struct ResourceAsset {
|
||||
/// The input URI
|
||||
///
|
||||
/// This is basically whatever the user passed in to the macro
|
||||
pub input: PathBuf,
|
||||
|
||||
/// The local URI for fallbacks
|
||||
///
|
||||
/// This generally retains the original URI that was used to resolve the asset, but for files,
|
||||
/// it's resolved to an absolute path since we transform all schema-less URIs to file:// URIs.
|
||||
///
|
||||
/// If the aset is relative, this will be None since we can't figure it out at compile time.
|
||||
pub local: Option<PathBuf>,
|
||||
|
||||
/// The output URI that makes it into the final bundle.
|
||||
/// This explicitly has the `bundle://` scheme to make it clear that it is a bundle URI.
|
||||
///
|
||||
/// The bundler will generate a unique name for the asset and use that as the path to generate a
|
||||
/// final "flat" architecture.
|
||||
///
|
||||
/// bundle://asset/path/to/file.txt
|
||||
pub bundled: PathBuf,
|
||||
|
||||
/// The options for the resource
|
||||
pub options: Option<FileOptions>,
|
||||
}
|
||||
|
||||
impl ResourceAsset {
|
||||
///
|
||||
pub fn new(raw: &str) -> Self {
|
||||
todo!()
|
||||
}
|
||||
|
||||
///
|
||||
pub fn unique_name(&self) -> &str {
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// Set the file options
|
||||
pub fn with_options(self, options: FileOptions) -> Self {
|
||||
todo!()
|
||||
// let mut myself = Self {
|
||||
// options,
|
||||
// url_encoded: false,
|
||||
// ..self
|
||||
// };
|
||||
|
||||
// myself.regenerate_unique_name();
|
||||
|
||||
// myself
|
||||
}
|
||||
|
||||
///
|
||||
pub fn set_options(&mut self, options: FileOptions) {
|
||||
self.options = Some(options);
|
||||
}
|
||||
|
||||
/// Set whether the file asset should be url encoded
|
||||
pub fn set_url_encoded(&mut self, url_encoded: bool) {
|
||||
todo!()
|
||||
// self.url_encoded = url_encoded;
|
||||
}
|
||||
|
||||
/// Returns whether the file asset should be url encoded
|
||||
pub fn url_encoded(&self) -> bool {
|
||||
todo!()
|
||||
// self.url_encoded
|
||||
}
|
||||
|
||||
/// Parse a string as a file source
|
||||
pub fn parse_file(path: &str) -> Result<Self, AssetError> {
|
||||
// let myself = Self::parse_any(path)?;
|
||||
// if let Self::Local(path) = &myself {
|
||||
// if !path.canonicalized.is_file() {
|
||||
// return Err(AssetError::NotFile(path.canonicalized.to_path_buf()));
|
||||
// }
|
||||
// }
|
||||
// Ok(myself)
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// Parse a string as a folder source
|
||||
pub fn parse_folder(path: &str) -> Result<Self, AssetError> {
|
||||
// let myself = Self::parse_any(path)?;
|
||||
// if let Self::Local(path) = &myself {
|
||||
// if !path.canonicalized.is_dir() {
|
||||
// return Err(AssetError::NotFolder(path.canonicalized.to_path_buf()));
|
||||
// }
|
||||
// }
|
||||
// Ok(myself)
|
||||
todo!()
|
||||
}
|
||||
|
||||
///
|
||||
pub fn parse_url(url: &str) -> Result<Self, AssetError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// Parse a string as a file or folder source
|
||||
pub fn parse_any(src: &str) -> Result<Self, AssetError> {
|
||||
todo!()
|
||||
// // Process the input as a URI
|
||||
// let input: UriRef<String> = src.parse().unwrap();
|
||||
|
||||
// let local = match input.scheme().map(|x| x.as_str()) {
|
||||
// // For http and https, we just use the input as is
|
||||
// // In fallback mode we end up just passing the URI through
|
||||
// Some("http") | Some("https") => Some(input.clone()),
|
||||
|
||||
// // For file, we use the local path
|
||||
// // This will be `file://` in dev
|
||||
// // In release this will be `bundle://`
|
||||
// // Join the URI against the filesystem
|
||||
// None if input.path().is_absolute() => {
|
||||
// tood!()
|
||||
// // let manifest_dir: PathBuf = std::env::var("CARGO_MANIFEST_DIR").unwrap().into();
|
||||
// // let manifest_dir = manifest_dir.canonicalize().unwrap();
|
||||
// // let _local = manifest_dir.join(input.path().as_str());
|
||||
// // // Some(UriRef::<String>::parse(format!("file://{}", _local.display())).unwrap())
|
||||
// }
|
||||
// None => None,
|
||||
|
||||
// Some(scheme) => {
|
||||
// panic!("Unsupported scheme: {}", scheme);
|
||||
// }
|
||||
// };
|
||||
|
||||
// // Generate the bundled URI
|
||||
// //
|
||||
// // We:
|
||||
// // - flatten the URI with a hash
|
||||
// // - change the scheme to `bundle`
|
||||
// // - add the authority of pkg-name.bundle
|
||||
// //
|
||||
// // This results in a bundle-per dependency
|
||||
// let pkg_name = std::env::var("CARGO_PKG_NAME").unwrap();
|
||||
// let bundled = UriRef::builder()
|
||||
// .scheme(Scheme::new_or_panic("bundle"))
|
||||
// .authority_with(|b| b.host(EStr::new_or_panic(&format!("{}.bundle", pkg_name))))
|
||||
// .path(local.as_ref().map(|x| x.path()).unwrap_or_default())
|
||||
// .build()
|
||||
// .unwrap();
|
||||
|
||||
// Ok(Self {
|
||||
// input,
|
||||
// local,
|
||||
// bundled,
|
||||
// options: None,
|
||||
// })
|
||||
}
|
||||
|
||||
// ///
|
||||
// pub fn make_unique_id(uri: &UriRef<String>) -> String {
|
||||
// todo!()
|
||||
// }
|
||||
|
||||
///
|
||||
pub fn is_dir(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
///
|
||||
pub fn resolve(&self) -> String {
|
||||
// fn resolve_asset_location(location: &AssetSource) -> Result<String, ManganisSupportError> {
|
||||
// if !config::is_bundled() {
|
||||
// return Ok(location.source().raw());
|
||||
// }
|
||||
|
||||
// let root = crate::config::base_path();
|
||||
// let path = root.join(location.unique_name());
|
||||
// Ok(path.display().to_string())
|
||||
// }
|
||||
|
||||
todo!()
|
||||
}
|
||||
|
||||
///
|
||||
pub fn normalized(&self, extension: Option<&str>) -> String {
|
||||
// /// Create a normalized file name from the source
|
||||
// fn normalized_file_name(location: &AssetSource, extension: Option<&str>) -> String {
|
||||
// let last_segment = location.last_segment();
|
||||
// let mut file_name = to_alphanumeric_string_lossy(last_segment);
|
||||
|
||||
// let extension_len = extension.map(|e| e.len() + 1).unwrap_or_default();
|
||||
// let extension_and_hash_size = extension_len + HASH_SIZE;
|
||||
// // If the file name is too long, we need to truncate it
|
||||
// if file_name.len() + extension_and_hash_size > MAX_PATH_LENGTH {
|
||||
// file_name = file_name[..MAX_PATH_LENGTH - extension_and_hash_size].to_string();
|
||||
// }
|
||||
// file_name
|
||||
// }
|
||||
|
||||
// /// Normalize a string to only contain alphanumeric characters
|
||||
// fn to_alphanumeric_string_lossy(name: &str) -> String {
|
||||
// name.chars()
|
||||
// .filter(|c| c.is_alphanumeric())
|
||||
// .collect::<String>()
|
||||
// }
|
||||
|
||||
// fn hash_file(location: &AssetSource, hash: &mut DefaultHasher) {
|
||||
// // Hash the last time the file was updated and the file source. If either of these change, we need to regenerate the unique name
|
||||
// let updated = location.last_updated();
|
||||
// updated.hash(hash);
|
||||
// location.hash(hash);
|
||||
// }
|
||||
|
||||
todo!()
|
||||
}
|
||||
|
||||
// /// Covnert the asset source to a string
|
||||
// pub fn raw(&self) -> String {
|
||||
// match self {
|
||||
// Self::Local(path) => path.relative.display().to_string(),
|
||||
// Self::Remote(url) => url.to_string(),
|
||||
// }
|
||||
// }
|
||||
|
||||
// /// Try to convert the asset source to a local asset source
|
||||
// pub fn local(&self) -> Option<&AssetSource> {
|
||||
// match self {
|
||||
// Self::Local(path) => Some(path),
|
||||
// Self::Remote(_) => None,
|
||||
// }
|
||||
// }
|
||||
|
||||
// /// Try to convert the asset source to a path
|
||||
// pub fn as_path(&self) -> Option<&PathBuf> {
|
||||
// match self {
|
||||
// Self::Local(path) => Some(&path.canonicalized),
|
||||
// Self::Remote(_) => None,
|
||||
// }
|
||||
// }
|
||||
|
||||
// /// Try to convert the asset source to a url
|
||||
// pub fn as_url(&self) -> Option<&Url> {
|
||||
// match self {
|
||||
// Self::Local(_) => None,
|
||||
// Self::Remote(url) => Some(url),
|
||||
// }
|
||||
// }
|
||||
|
||||
// /// Returns the last segment of the file source used to generate a unique name
|
||||
// pub fn last_segment(&self) -> &str {
|
||||
// match self {
|
||||
// Self::Local(path) => path.canonicalized.file_name().unwrap().to_str().unwrap(),
|
||||
// Self::Remote(url) => url.path_segments().unwrap().last().unwrap(),
|
||||
// }
|
||||
// }
|
||||
|
||||
/// Returns the extension of the file source
|
||||
pub fn extension(&self) -> Option<String> {
|
||||
// match self {
|
||||
// Self::Local(path) => path
|
||||
// .canonicalized
|
||||
// .extension()
|
||||
// .map(|e| e.to_str().unwrap().to_string()),
|
||||
// Self::Remote(url) => reqwest::blocking::get(url.as_str())
|
||||
// .ok()
|
||||
// .and_then(|request| {
|
||||
// request
|
||||
// .headers()
|
||||
// .get("content-type")
|
||||
// .and_then(|content_type| {
|
||||
// content_type
|
||||
// .to_str()
|
||||
// .ok()
|
||||
// .map(|ty| ext_of_mime(ty).to_string())
|
||||
// })
|
||||
// }),
|
||||
// }
|
||||
todo!()
|
||||
}
|
||||
|
||||
// /// Attempts to get the mime type of the file source
|
||||
// pub fn mime_type(&self) -> Option<String> {
|
||||
// match self {
|
||||
// Self::Local(path) => get_mime_from_path(&path.canonicalized)
|
||||
// .ok()
|
||||
// .map(|mime| mime.to_string()),
|
||||
// Self::Remote(url) => reqwest::blocking::get(url.as_str())
|
||||
// .ok()
|
||||
// .and_then(|request| {
|
||||
// request
|
||||
// .headers()
|
||||
// .get("content-type")
|
||||
// .and_then(|content_type| Some(content_type.to_str().ok()?.to_string()))
|
||||
// }),
|
||||
// }
|
||||
// }
|
||||
|
||||
// /// Find when the asset was last updated
|
||||
// pub fn last_updated(&self) -> Option<String> {
|
||||
// match self {
|
||||
// Self::Local(path) => path.canonicalized.metadata().ok().and_then(|metadata| {
|
||||
// metadata
|
||||
// .modified()
|
||||
// .ok()
|
||||
// .map(|modified| format!("{:?}", modified))
|
||||
// .or_else(|| {
|
||||
// metadata
|
||||
// .created()
|
||||
// .ok()
|
||||
// .map(|created| format!("{:?}", created))
|
||||
// })
|
||||
// }),
|
||||
// Self::Remote(url) => reqwest::blocking::get(url.as_str())
|
||||
// .ok()
|
||||
// .and_then(|request| {
|
||||
// request
|
||||
// .headers()
|
||||
// .get("last-modified")
|
||||
// .and_then(|last_modified| {
|
||||
// last_modified
|
||||
// .to_str()
|
||||
// .ok()
|
||||
// .map(|last_modified| last_modified.to_string())
|
||||
// })
|
||||
// }),
|
||||
// }
|
||||
// }
|
||||
|
||||
/// Reads the file to a string
|
||||
pub fn read_to_string(&self) -> anyhow::Result<String> {
|
||||
// match &self {
|
||||
// AssetSource::Local(path) => Ok(std::fs::read_to_string(&path.canonicalized)
|
||||
// .with_context(|| {
|
||||
// format!(
|
||||
// "Failed to read file from location: {}",
|
||||
// path.canonicalized.display()
|
||||
// )
|
||||
// })?),
|
||||
// AssetSource::Remote(url) => {
|
||||
// let response = reqwest::blocking::get(url.as_str())
|
||||
// .with_context(|| format!("Failed to asset from url: {}", url.as_str()))?;
|
||||
// Ok(response.text().with_context(|| {
|
||||
// format!("Failed to read text for asset from url: {}", url.as_str())
|
||||
// })?)
|
||||
// }
|
||||
// }
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// Reads the file to bytes
|
||||
pub fn read_to_bytes(&self) -> anyhow::Result<Vec<u8>> {
|
||||
// match &self {
|
||||
// AssetSource::Local(path) => {
|
||||
// Ok(std::fs::read(&path.canonicalized).with_context(|| {
|
||||
// format!(
|
||||
// "Failed to read file from location: {}",
|
||||
// path.canonicalized.display()
|
||||
// )
|
||||
// })?)
|
||||
// }
|
||||
// AssetSource::Remote(url) => {
|
||||
// let response = reqwest::blocking::get(url.as_str())
|
||||
// .with_context(|| format!("Failed to asset from url: {}", url.as_str()))?;
|
||||
// Ok(response.bytes().map(|b| b.to_vec()).with_context(|| {
|
||||
// format!("Failed to read text for asset from url: {}", url.as_str())
|
||||
// })?)
|
||||
// }
|
||||
// }
|
||||
todo!()
|
||||
}
|
||||
|
||||
/// The location where the asset will be served from post-bundle
|
||||
/// This is not the "resolved" location at runtime
|
||||
pub fn served_location(&self) -> Result<String, ()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
// /// Returns the unique name of the file that the asset will be served from
|
||||
// pub fn unique_name(&self) -> &str {
|
||||
// &self.unique_name
|
||||
// }
|
||||
|
||||
// /// Returns the source of the file that the asset will be collected from
|
||||
// pub fn source(&self) -> &AssetSource {
|
||||
// &self.source
|
||||
// }
|
||||
|
||||
/// Returns the location of the file asset
|
||||
pub fn location(&self) -> &ResourceAsset {
|
||||
todo!()
|
||||
// &self.location
|
||||
}
|
||||
|
||||
/// Returns the options for the file asset
|
||||
pub fn options(&self) -> &FileOptions {
|
||||
todo!()
|
||||
// &self.options
|
||||
}
|
||||
|
||||
/// Returns the options for the file asset mutably
|
||||
pub fn with_options_mut(&mut self, f: impl FnOnce(&mut FileOptions)) {
|
||||
todo!()
|
||||
// f(&mut self.options);
|
||||
// self.regenerate_unique_name();
|
||||
}
|
||||
|
||||
/// Regenerates the unique name of the file asset
|
||||
fn regenerate_unique_name(&mut self) {
|
||||
// // Generate an unique name for the file based on the options, source, and the current version of manganis
|
||||
// let uuid = self.hash();
|
||||
// let extension = self.options.extension();
|
||||
// let file_name = normalized_file_name(&self.location.source, extension);
|
||||
// let extension = extension.map(|e| format!(".{e}")).unwrap_or_default();
|
||||
// self.location.unique_name = format!("{file_name}{uuid:x}{extension}");
|
||||
// assert!(self.location.unique_name.len() <= MAX_PATH_LENGTH);
|
||||
}
|
||||
|
||||
// /// Hash the file asset source and options
|
||||
// fn hash(&self) -> u64 {
|
||||
// let mut hash = std::collections::hash_map::DefaultHasher::new();
|
||||
// hash_file(&self.location.source, &mut hash);
|
||||
// self.options.hash(&mut hash);
|
||||
// hash_version(&mut hash);
|
||||
// hash.finish()
|
||||
// }
|
||||
}
|
||||
|
||||
/// Get the mime type from a URI using its extension
|
||||
fn ext_of_mime(mime: &str) -> &str {
|
||||
let mime = mime.split(';').next().unwrap_or_default();
|
||||
match mime.trim() {
|
||||
"application/octet-stream" => "bin",
|
||||
"text/css" => "css",
|
||||
"text/csv" => "csv",
|
||||
"text/html" => "html",
|
||||
"image/vnd.microsoft.icon" => "ico",
|
||||
"text/javascript" => "js",
|
||||
"application/json" => "json",
|
||||
"application/ld+json" => "jsonld",
|
||||
"application/rtf" => "rtf",
|
||||
"image/svg+xml" => "svg",
|
||||
"video/mp4" => "mp4",
|
||||
"text/plain" => "txt",
|
||||
"application/xml" => "xml",
|
||||
"application/zip" => "zip",
|
||||
"image/png" => "png",
|
||||
"image/jpeg" => "jpg",
|
||||
"image/gif" => "gif",
|
||||
"image/webp" => "webp",
|
||||
"image/avif" => "avif",
|
||||
"font/ttf" => "ttf",
|
||||
"font/woff" => "woff",
|
||||
"font/woff2" => "woff2",
|
||||
other => other.split('/').last().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the mime type from a path-like string
|
||||
fn get_mime_from_path(trimmed: &Path) -> std::io::Result<&'static str> {
|
||||
todo!()
|
||||
// if trimmed.extension().is_some_and(|ext| ext == "svg") {
|
||||
// return Ok("image/svg+xml");
|
||||
// }
|
||||
|
||||
// let res = match infer::get_from_path(trimmed)?.map(|f| f.mime_type()) {
|
||||
// Some(f) => {
|
||||
// if f == "text/plain" {
|
||||
// get_mime_by_ext(trimmed)
|
||||
// } else {
|
||||
// f
|
||||
// }
|
||||
// }
|
||||
// None => get_mime_by_ext(trimmed),
|
||||
// };
|
||||
|
||||
// Ok(res)
|
||||
}
|
||||
|
||||
/// Get the mime type from a URI using its extension
|
||||
fn get_mime_by_ext(trimmed: &Path) -> &'static str {
|
||||
get_mime_from_ext(trimmed.extension().and_then(|e| e.to_str()))
|
||||
}
|
||||
|
||||
/// Get the mime type from a URI using its extension
|
||||
pub fn get_mime_from_ext(extension: Option<&str>) -> &'static str {
|
||||
match extension {
|
||||
Some("bin") => "application/octet-stream",
|
||||
Some("css") => "text/css",
|
||||
Some("csv") => "text/csv",
|
||||
Some("html") => "text/html",
|
||||
Some("ico") => "image/vnd.microsoft.icon",
|
||||
Some("js") => "text/javascript",
|
||||
Some("json") => "application/json",
|
||||
Some("jsonld") => "application/ld+json",
|
||||
Some("mjs") => "text/javascript",
|
||||
Some("rtf") => "application/rtf",
|
||||
Some("svg") => "image/svg+xml",
|
||||
Some("mp4") => "video/mp4",
|
||||
Some("png") => "image/png",
|
||||
Some("jpg") => "image/jpeg",
|
||||
Some("gif") => "image/gif",
|
||||
Some("webp") => "image/webp",
|
||||
Some("avif") => "image/avif",
|
||||
Some("txt") => "text/plain",
|
||||
// Assume HTML when a TLD is found for eg. `dioxus:://dioxuslabs.app` | `dioxus://hello.com`
|
||||
Some(_) => "text/html",
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
|
||||
// using octet stream according to this:
|
||||
None => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_version(hash: &mut DefaultHasher) {
|
||||
// // Hash the current version of manganis. If this changes, we need to regenerate the unique name
|
||||
// crate::built::PKG_VERSION.hash(hash);
|
||||
// crate::built::GIT_COMMIT_HASH.hash(hash);
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{config, FileOptions};
|
||||
|
||||
/// A tailwind class asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone)]
|
||||
pub struct TailwindAsset {
|
||||
classes: String,
|
||||
}
|
||||
|
||||
impl TailwindAsset {
|
||||
/// Creates a new tailwind class asset
|
||||
pub fn new(classes: &str) -> Self {
|
||||
Self {
|
||||
classes: classes.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the classes of the tailwind class asset
|
||||
pub fn classes(&self) -> &str {
|
||||
&self.classes
|
||||
}
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
// The file `built.rs` was placed there by cargo and `build.rs`
|
||||
include!(concat!(env!("OUT_DIR"), "/built.rs"));
|
|
@ -1,41 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
/// Get the base path for assets defined by the MG_BASEPATH environment variable
|
||||
///
|
||||
/// The basepath should always start and end with a `/`
|
||||
///
|
||||
/// If no basepath is set, the default is `/` which is the root of the assets folder.
|
||||
pub fn base_path() -> PathBuf {
|
||||
"/".into()
|
||||
// match option_env!("MG_BASEPATH") {
|
||||
// Some(path) => {
|
||||
// let path = path.trim_end_matches('/').trim_start_matches('/');
|
||||
// PathBuf::from(format!("/{path}/"))
|
||||
// }
|
||||
// None => "/".into(),
|
||||
// }
|
||||
}
|
||||
|
||||
/// MG_BUNDLED is set to true when the application is bundled.
|
||||
///
|
||||
/// When running under a dev server, this is false to prevent thrashing of the cache since an ordinary
|
||||
/// `cargo check` will not pass MG_BUNDLED.
|
||||
pub const fn is_bundled() -> bool {
|
||||
false
|
||||
// option_env!("MG_BUNDLED").is_some()
|
||||
}
|
||||
|
||||
/// The location of the manifest directory used to build this crate
|
||||
pub fn manifest_dir() -> Option<PathBuf> {
|
||||
std::env::var("CARGO_MANIFEST_DIR").ok().map(PathBuf::from)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn base_path_works() {
|
||||
assert_eq!(base_path(), PathBuf::from("/"));
|
||||
}
|
||||
}
|
|
@ -1,574 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
/// The options for a file asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub enum FileOptions {
|
||||
/// An image asset
|
||||
Image(ImageOptions),
|
||||
/// A video asset
|
||||
Video(VideoOptions),
|
||||
/// A font asset
|
||||
Font(FontOptions),
|
||||
/// A css asset
|
||||
Css(CssOptions),
|
||||
/// A JavaScript asset
|
||||
Js(JsOptions),
|
||||
/// A Json asset
|
||||
Json(JsonOptions),
|
||||
/// Any other asset
|
||||
Other(UnknownFileOptions),
|
||||
}
|
||||
|
||||
impl Display for FileOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Image(options) => write!(f, "{}", options),
|
||||
Self::Video(options) => write!(f, "{}", options),
|
||||
Self::Font(options) => write!(f, "{}", options),
|
||||
Self::Css(options) => write!(f, "{}", options),
|
||||
Self::Js(options) => write!(f, "{}", options),
|
||||
Self::Json(options) => write!(f, "{}", options),
|
||||
Self::Other(options) => write!(f, "{}", options),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileOptions {
|
||||
/// Returns the default options for a given extension
|
||||
pub fn default_for_extension(extension: Option<&str>) -> Self {
|
||||
if let Some(extension) = extension {
|
||||
if extension == CssOptions::EXTENSION {
|
||||
return Self::Css(CssOptions::default());
|
||||
} else if extension == JsonOptions::EXTENSION {
|
||||
return Self::Json(JsonOptions::default());
|
||||
} else if let Ok(ty) = extension.parse::<ImageType>() {
|
||||
return Self::Image(ImageOptions::new(ty, None));
|
||||
} else if let Ok(ty) = extension.parse::<VideoType>() {
|
||||
return Self::Video(VideoOptions::new(ty));
|
||||
} else if let Ok(ty) = extension.parse::<FontType>() {
|
||||
return Self::Font(FontOptions::new(ty));
|
||||
} else if let Ok(ty) = extension.parse::<JsType>() {
|
||||
return Self::Js(JsOptions::new(ty));
|
||||
}
|
||||
}
|
||||
Self::Other(UnknownFileOptions {
|
||||
extension: extension.map(String::from),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the extension for this file
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Image(options) => Some(options.ty.extension()),
|
||||
Self::Video(options) => Some(options.ty.extension()),
|
||||
Self::Font(options) => Some(options.ty.extension()),
|
||||
Self::Css(_) => Some(CssOptions::EXTENSION),
|
||||
Self::Js(js) => Some(js.ty.extension()),
|
||||
Self::Json(_) => Some(JsonOptions::EXTENSION),
|
||||
Self::Other(extension) => extension.extension.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FileOptions {
|
||||
fn default() -> Self {
|
||||
Self::Other(UnknownFileOptions { extension: None })
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for an image asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub struct ImageOptions {
|
||||
compress: bool,
|
||||
size: Option<(u32, u32)>,
|
||||
preload: bool,
|
||||
ty: ImageType,
|
||||
}
|
||||
|
||||
impl Display for ImageOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some((x, y)) = self.size {
|
||||
write!(f, "{} ({}x{})", self.ty, x, y)?;
|
||||
} else {
|
||||
write!(f, "{}", self.ty)?;
|
||||
}
|
||||
if self.compress {
|
||||
write!(f, " (compressed)")?;
|
||||
}
|
||||
if self.preload {
|
||||
write!(f, " (preload)")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ImageOptions {
|
||||
/// Creates a new image options struct
|
||||
pub fn new(ty: ImageType, size: Option<(u32, u32)>) -> Self {
|
||||
Self {
|
||||
compress: true,
|
||||
size,
|
||||
ty,
|
||||
preload: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the image should be preloaded
|
||||
pub fn preload(&self) -> bool {
|
||||
self.preload
|
||||
}
|
||||
|
||||
/// Sets whether the image should be preloaded
|
||||
pub fn set_preload(&mut self, preload: bool) {
|
||||
self.preload = preload;
|
||||
}
|
||||
|
||||
/// Returns the image type
|
||||
pub fn ty(&self) -> &ImageType {
|
||||
&self.ty
|
||||
}
|
||||
|
||||
/// Sets the image type
|
||||
pub fn set_ty(&mut self, ty: ImageType) {
|
||||
self.ty = ty;
|
||||
}
|
||||
|
||||
/// Returns the size of the image
|
||||
pub fn size(&self) -> Option<(u32, u32)> {
|
||||
self.size
|
||||
}
|
||||
|
||||
/// Sets the size of the image
|
||||
pub fn set_size(&mut self, size: Option<(u32, u32)>) {
|
||||
self.size = size;
|
||||
}
|
||||
|
||||
/// Returns whether the image should be compressed
|
||||
pub fn compress(&self) -> bool {
|
||||
self.compress
|
||||
}
|
||||
|
||||
/// Sets whether the image should be compressed
|
||||
pub fn set_compress(&mut self, compress: bool) {
|
||||
self.compress = compress;
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of an image
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Copy, Hash, Eq)]
|
||||
pub enum ImageType {
|
||||
/// A png image
|
||||
Png,
|
||||
/// A jpg image
|
||||
Jpg,
|
||||
/// An avif image
|
||||
Avif,
|
||||
/// A webp image
|
||||
Webp,
|
||||
}
|
||||
|
||||
impl ImageType {
|
||||
/// Returns the extension for this image type
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Png => "png",
|
||||
Self::Jpg => "jpg",
|
||||
Self::Avif => "avif",
|
||||
Self::Webp => "webp",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ImageType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.extension())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ImageType {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"png" => Ok(Self::Png),
|
||||
"jpg" | "jpeg" => Ok(Self::Jpg),
|
||||
"avif" => Ok(Self::Avif),
|
||||
"webp" => Ok(Self::Webp),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for a video asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub struct VideoOptions {
|
||||
/// Whether the video should be compressed
|
||||
compress: bool,
|
||||
/// Whether the video should be preloaded
|
||||
preload: bool,
|
||||
/// The type of the video
|
||||
ty: VideoType,
|
||||
}
|
||||
|
||||
impl Display for VideoOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ty)?;
|
||||
if self.compress {
|
||||
write!(f, " (compressed)")?;
|
||||
}
|
||||
if self.preload {
|
||||
write!(f, " (preload)")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl VideoOptions {
|
||||
/// Creates a new video options struct
|
||||
pub fn new(ty: VideoType) -> Self {
|
||||
Self {
|
||||
compress: true,
|
||||
ty,
|
||||
preload: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of the video
|
||||
pub fn ty(&self) -> &VideoType {
|
||||
&self.ty
|
||||
}
|
||||
|
||||
/// Sets the type of the video
|
||||
pub fn set_ty(&mut self, ty: VideoType) {
|
||||
self.ty = ty;
|
||||
}
|
||||
|
||||
/// Returns whether the video should be compressed
|
||||
pub fn compress(&self) -> bool {
|
||||
self.compress
|
||||
}
|
||||
|
||||
/// Sets whether the video should be compressed
|
||||
pub fn set_compress(&mut self, compress: bool) {
|
||||
self.compress = compress;
|
||||
}
|
||||
|
||||
/// Returns whether the video should be preloaded
|
||||
pub fn preload(&self) -> bool {
|
||||
self.preload
|
||||
}
|
||||
|
||||
/// Sets whether the video should be preloaded
|
||||
pub fn set_preload(&mut self, preload: bool) {
|
||||
self.preload = preload;
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of a video
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub enum VideoType {
|
||||
/// An mp4 video
|
||||
MP4,
|
||||
/// A webm video
|
||||
Webm,
|
||||
/// A gif video
|
||||
GIF,
|
||||
}
|
||||
|
||||
impl VideoType {
|
||||
/// Returns the extension for this video type
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::MP4 => "mp4",
|
||||
Self::Webm => "webm",
|
||||
Self::GIF => "gif",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for VideoType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.extension())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for VideoType {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"mp4" => Ok(Self::MP4),
|
||||
"webm" => Ok(Self::Webm),
|
||||
"gif" => Ok(Self::GIF),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for a font asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub struct FontOptions {
|
||||
ty: FontType,
|
||||
}
|
||||
|
||||
impl Display for FontOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl FontOptions {
|
||||
/// Creates a new font options struct
|
||||
pub fn new(ty: FontType) -> Self {
|
||||
Self { ty }
|
||||
}
|
||||
|
||||
/// Returns the type of the font
|
||||
pub fn ty(&self) -> &FontType {
|
||||
&self.ty
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of a font
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub enum FontType {
|
||||
/// A ttf (TrueType) font
|
||||
TTF,
|
||||
/// A woff (Web Open Font Format) font
|
||||
WOFF,
|
||||
/// A woff2 (Web Open Font Format 2) font
|
||||
WOFF2,
|
||||
}
|
||||
|
||||
impl FontType {
|
||||
/// Returns the extension for this font type
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::TTF => "ttf",
|
||||
Self::WOFF => "woff",
|
||||
Self::WOFF2 => "woff2",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for FontType {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"ttf" => Ok(Self::TTF),
|
||||
"woff" => Ok(Self::WOFF),
|
||||
"woff2" => Ok(Self::WOFF2),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FontType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::TTF => write!(f, "ttf"),
|
||||
Self::WOFF => write!(f, "woff"),
|
||||
Self::WOFF2 => write!(f, "woff2"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for a css asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub struct CssOptions {
|
||||
minify: bool,
|
||||
preload: bool,
|
||||
}
|
||||
|
||||
impl Default for CssOptions {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CssOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if self.minify {
|
||||
write!(f, "minified")?;
|
||||
}
|
||||
if self.preload {
|
||||
write!(f, " (preload)")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl CssOptions {
|
||||
const EXTENSION: &'static str = "css";
|
||||
|
||||
/// Creates a new css options struct
|
||||
pub const fn new() -> Self {
|
||||
Self {
|
||||
minify: true,
|
||||
preload: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the css should be minified
|
||||
pub fn minify(&self) -> bool {
|
||||
self.minify
|
||||
}
|
||||
|
||||
/// Sets whether the css should be minified
|
||||
pub fn set_minify(&mut self, minify: bool) {
|
||||
self.minify = minify;
|
||||
}
|
||||
|
||||
/// Returns whether the css should be preloaded
|
||||
pub fn preload(&self) -> bool {
|
||||
self.preload
|
||||
}
|
||||
|
||||
/// Sets whether the css should be preloaded
|
||||
pub fn set_preload(&mut self, preload: bool) {
|
||||
self.preload = preload;
|
||||
}
|
||||
}
|
||||
|
||||
/// The type of a Javascript asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Copy, Hash, Default, Eq)]
|
||||
pub enum JsType {
|
||||
/// A js asset
|
||||
#[default]
|
||||
Js,
|
||||
// TODO: support ts files
|
||||
}
|
||||
|
||||
impl JsType {
|
||||
/// Returns the extension for this js type
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Js => "js",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for JsType {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"js" => Ok(Self::Js),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for JsType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.extension())
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for a Javascript asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Default, Eq)]
|
||||
pub struct JsOptions {
|
||||
ty: JsType,
|
||||
minify: bool,
|
||||
preload: bool,
|
||||
}
|
||||
|
||||
impl Display for JsOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "js")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl JsOptions {
|
||||
/// Creates a new js options struct
|
||||
pub fn new(ty: JsType) -> Self {
|
||||
Self {
|
||||
ty,
|
||||
preload: false,
|
||||
minify: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the js should be preloaded
|
||||
pub fn preload(&self) -> bool {
|
||||
self.preload
|
||||
}
|
||||
|
||||
/// Sets whether the js should be preloaded
|
||||
pub fn set_preload(&mut self, preload: bool) {
|
||||
self.preload = preload;
|
||||
}
|
||||
|
||||
/// Returns if the js should be minified
|
||||
pub fn minify(&self) -> bool {
|
||||
self.minify
|
||||
}
|
||||
|
||||
/// Sets if the js should be minified
|
||||
pub fn set_minify(&mut self, minify: bool) {
|
||||
self.minify = minify;
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for a Json asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Default, Eq)]
|
||||
pub struct JsonOptions {
|
||||
preload: bool,
|
||||
}
|
||||
|
||||
impl Display for JsonOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "json")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonOptions {
|
||||
/// The extension of the json asset
|
||||
pub const EXTENSION: &'static str = "json";
|
||||
|
||||
/// Creates a new json options struct
|
||||
pub fn new() -> Self {
|
||||
Self { preload: false }
|
||||
}
|
||||
|
||||
/// Returns whether the json should be preloaded
|
||||
pub fn preload(&self) -> bool {
|
||||
self.preload
|
||||
}
|
||||
|
||||
/// Sets whether the json should be preloaded
|
||||
pub fn set_preload(&mut self, preload: bool) {
|
||||
self.preload = preload;
|
||||
}
|
||||
}
|
||||
|
||||
/// The options for an unknown file asset
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Clone, Hash, Eq)]
|
||||
pub struct UnknownFileOptions {
|
||||
extension: Option<String>,
|
||||
}
|
||||
|
||||
impl Display for UnknownFileOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(extension) = &self.extension {
|
||||
write!(f, "{}", extension)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl UnknownFileOptions {
|
||||
/// Creates a new unknown file options struct
|
||||
pub fn new(extension: Option<String>) -> Self {
|
||||
Self { extension }
|
||||
}
|
||||
|
||||
/// Returns the extension of the file
|
||||
pub fn extension(&self) -> Option<&str> {
|
||||
self.extension.as_deref()
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue