mirror of
https://github.com/launchbadge/sqlx
synced 2024-11-10 06:24:16 +00:00
WIP feat: add support for sqlx.toml
This commit is contained in:
parent
6a4f61e3b3
commit
8db7d67b73
3 changed files with 170 additions and 88 deletions
54
sqlx-macros-core/src/query/config.rs
Normal file
54
sqlx-macros-core/src/query/config.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
//! `sqlx.toml` config.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(serde::Deserialize, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(default)]
|
||||
pub struct Config {
|
||||
/// Override the environment variable used to connect to the database (default `DATABASE_URL`).
|
||||
pub url_var: Option<String>,
|
||||
|
||||
/// Configure SQL -> Rust type mappings.
|
||||
pub types: TypesConfig,
|
||||
|
||||
// Possible future extensions:
|
||||
// * enable out-of-tree drivers
|
||||
// * inheritance
|
||||
// * create a shadowed set of macros with a different config:
|
||||
// https://github.com/launchbadge/sqlx/issues/121#issuecomment-609092100
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(default)]
|
||||
pub struct TypesConfig {
|
||||
/// Choose the preferred crate for time-related types (`TIMESTAMP`, `DATETIME`, `TIME`, `DATE`).
|
||||
pub time_crate: Option<TimeCrate>,
|
||||
|
||||
/// Choose the preferred crate for `NUMERIC`.
|
||||
pub numeric_crate: Option<NumericCrate>,
|
||||
|
||||
/// Global type overrides (SQL type name -> fully qualified Rust path).
|
||||
pub r#override: HashMap<String, String>
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum TimeCrate {
|
||||
Chrono,
|
||||
Time
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub enum NumericCrate {
|
||||
#[serde(rename = "rust_decimal")]
|
||||
RustDecimal,
|
||||
#[serde(rename = "bigdecimal")]
|
||||
BigDecimal
|
||||
}
|
||||
|
||||
pub fn load(path: &Path) -> crate::Result<Config> {
|
||||
|
||||
}
|
105
sqlx-macros-core/src/query/metadata.rs
Normal file
105
sqlx-macros-core/src/query/metadata.rs
Normal file
|
@ -0,0 +1,105 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use crate::query::{config, env};
|
||||
use crate::query::config::Config;
|
||||
|
||||
pub struct Metadata {
|
||||
#[allow(unused)]
|
||||
pub manifest_dir: PathBuf,
|
||||
pub offline: bool,
|
||||
pub config: Config,
|
||||
pub database_url: Option<String>,
|
||||
workspace_root: OnceCell<PathBuf>,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub fn get() -> crate::Result<&'static Self> {
|
||||
static METADATA: OnceCell<Metadata> = OnceCell::new();
|
||||
METADATA.get_or_try_init(Self::init)
|
||||
}
|
||||
|
||||
pub fn url_var(&self) -> &str {
|
||||
self.config.url_var.as_deref().unwrap_or("DATABASE_URL")
|
||||
}
|
||||
|
||||
// If we are in a workspace, lookup `workspace_root` since `CARGO_MANIFEST_DIR` won't
|
||||
// reflect the workspace dir: https://github.com/rust-lang/cargo/issues/3946
|
||||
pub fn workspace_root(&self) -> crate::Result<&PathBuf> {
|
||||
self.workspace_root.get_or_try_init(|| {
|
||||
use serde::Deserialize;
|
||||
use std::process::Command;
|
||||
|
||||
let cargo = env("CARGO").map_err(|_| "`CARGO` must be set")?;
|
||||
|
||||
let output = Command::new(&cargo)
|
||||
.args(&["metadata", "--format-version=1", "--no-deps"])
|
||||
.current_dir(&self.manifest_dir)
|
||||
.env_remove("__CARGO_FIX_PLZ")
|
||||
.output()
|
||||
.map_err(|e| format!("Could not fetch metadata: {e:?}"))?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoMetadata {
|
||||
workspace_root: PathBuf,
|
||||
}
|
||||
|
||||
let metadata: CargoMetadata =
|
||||
serde_json::from_slice(&output.stdout)
|
||||
.map_err(|e| format!("Invalid `cargo metadata` output: {e:?}"))?;
|
||||
|
||||
Ok(metadata.workspace_root)
|
||||
})
|
||||
}
|
||||
|
||||
fn init() -> crate::Result<Self> {
|
||||
let manifest_dir: PathBuf = env("CARGO_MANIFEST_DIR")
|
||||
.map_err(|| "`CARGO_MANIFEST_DIR` must be set")?
|
||||
.into();
|
||||
|
||||
let config_path = manifest_dir.join("sqlx.toml");
|
||||
|
||||
let config = if config_path.exists() {
|
||||
config::load(&config_path)
|
||||
.map_err(|e| format!("failed to load config at {}: {e:?}", config_path.display()))?
|
||||
} else {
|
||||
Config::default()
|
||||
};
|
||||
|
||||
// If a .env file exists at CARGO_MANIFEST_DIR, load environment variables from this,
|
||||
// otherwise fallback to default dotenv behaviour.
|
||||
let env_path = manifest_dir.join(".env");
|
||||
|
||||
#[cfg_attr(not(procmacro2_semver_exempt), allow(unused_variables))]
|
||||
let env_path = if env_path.exists() {
|
||||
let res = dotenvy::from_path(&env_path);
|
||||
if let Err(e) = res {
|
||||
return Err(format!("failed to load environment from {env_path:?}, {e}").into());
|
||||
}
|
||||
|
||||
Some(env_path)
|
||||
} else {
|
||||
dotenvy::dotenv().ok()
|
||||
};
|
||||
|
||||
// tell the compiler to watch the `.env` for changes, if applicable
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
if let Some(env_path) = env_path.as_ref().and_then(|path| path.to_str()) {
|
||||
proc_macro::tracked_path::path(env_path);
|
||||
}
|
||||
|
||||
let offline = env("SQLX_OFFLINE")
|
||||
.map(|s| s.eq_ignore_ascii_case("true") || s == "1")
|
||||
.unwrap_or(false);
|
||||
|
||||
let database_url = env(config.url_var.as_deref().unwrap_or("DATABASE_URL")).ok();
|
||||
|
||||
Ok(Metadata {
|
||||
manifest_dir,
|
||||
offline,
|
||||
config,
|
||||
database_url,
|
||||
workspace_root: Arc::new(Mutex::new(None)),
|
||||
})
|
||||
}
|
||||
}
|
|
@ -16,9 +16,13 @@ use crate::query::data::{hash_string, DynQueryData, QueryData};
|
|||
use crate::query::input::RecordType;
|
||||
use either::Either;
|
||||
use url::Url;
|
||||
use crate::query::metadata::Metadata;
|
||||
|
||||
mod args;
|
||||
mod config;
|
||||
mod data;
|
||||
mod metadata;
|
||||
|
||||
mod input;
|
||||
mod output;
|
||||
|
||||
|
@ -68,107 +72,26 @@ impl<'a> QueryDataSource<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
struct Metadata {
|
||||
#[allow(unused)]
|
||||
manifest_dir: PathBuf,
|
||||
offline: bool,
|
||||
database_url: Option<String>,
|
||||
workspace_root: Arc<Mutex<Option<PathBuf>>>,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub fn workspace_root(&self) -> PathBuf {
|
||||
let mut root = self.workspace_root.lock().unwrap();
|
||||
if root.is_none() {
|
||||
use serde::Deserialize;
|
||||
use std::process::Command;
|
||||
|
||||
let cargo = env("CARGO").expect("`CARGO` must be set");
|
||||
|
||||
let output = Command::new(&cargo)
|
||||
.args(&["metadata", "--format-version=1", "--no-deps"])
|
||||
.current_dir(&self.manifest_dir)
|
||||
.env_remove("__CARGO_FIX_PLZ")
|
||||
.output()
|
||||
.expect("Could not fetch metadata");
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoMetadata {
|
||||
workspace_root: PathBuf,
|
||||
}
|
||||
|
||||
let metadata: CargoMetadata =
|
||||
serde_json::from_slice(&output.stdout).expect("Invalid `cargo metadata` output");
|
||||
|
||||
*root = Some(metadata.workspace_root);
|
||||
}
|
||||
root.clone().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
// If we are in a workspace, lookup `workspace_root` since `CARGO_MANIFEST_DIR` won't
|
||||
// reflect the workspace dir: https://github.com/rust-lang/cargo/issues/3946
|
||||
static METADATA: Lazy<Metadata> = Lazy::new(|| {
|
||||
let manifest_dir: PathBuf = env("CARGO_MANIFEST_DIR")
|
||||
.expect("`CARGO_MANIFEST_DIR` must be set")
|
||||
.into();
|
||||
|
||||
// If a .env file exists at CARGO_MANIFEST_DIR, load environment variables from this,
|
||||
// otherwise fallback to default dotenv behaviour.
|
||||
let env_path = manifest_dir.join(".env");
|
||||
|
||||
#[cfg_attr(not(procmacro2_semver_exempt), allow(unused_variables))]
|
||||
let env_path = if env_path.exists() {
|
||||
let res = dotenvy::from_path(&env_path);
|
||||
if let Err(e) = res {
|
||||
panic!("failed to load environment from {env_path:?}, {e}");
|
||||
}
|
||||
|
||||
Some(env_path)
|
||||
} else {
|
||||
dotenvy::dotenv().ok()
|
||||
};
|
||||
|
||||
// tell the compiler to watch the `.env` for changes, if applicable
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
if let Some(env_path) = env_path.as_ref().and_then(|path| path.to_str()) {
|
||||
proc_macro::tracked_path::path(env_path);
|
||||
}
|
||||
|
||||
let offline = env("SQLX_OFFLINE")
|
||||
.map(|s| s.eq_ignore_ascii_case("true") || s == "1")
|
||||
.unwrap_or(false);
|
||||
|
||||
let database_url = env("DATABASE_URL").ok();
|
||||
|
||||
Metadata {
|
||||
manifest_dir,
|
||||
offline,
|
||||
database_url,
|
||||
workspace_root: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
});
|
||||
|
||||
pub fn expand_input<'a>(
|
||||
input: QueryMacroInput,
|
||||
drivers: impl IntoIterator<Item = &'a QueryDriver>,
|
||||
) -> crate::Result<TokenStream> {
|
||||
let data_source = match &*METADATA {
|
||||
let data_source = match Metadata::get()? {
|
||||
Metadata {
|
||||
offline: false,
|
||||
database_url: Some(db_url),
|
||||
..
|
||||
} => QueryDataSource::live(db_url)?,
|
||||
|
||||
Metadata { offline, .. } => {
|
||||
meta@ Metadata { offline, manifest_dir, .. } => {
|
||||
// Try load the cached query metadata file.
|
||||
let filename = format!("query-{}.json", hash_string(&input.sql));
|
||||
|
||||
// Check SQLX_OFFLINE_DIR, then local .sqlx, then workspace .sqlx.
|
||||
let dirs = [
|
||||
|| env("SQLX_OFFLINE_DIR").ok().map(PathBuf::from),
|
||||
|| Some(METADATA.manifest_dir.join(".sqlx")),
|
||||
|| Some(METADATA.workspace_root().join(".sqlx")),
|
||||
|| Some(manifest_dir.join(".sqlx")),
|
||||
|| Some(meta.workspace_root().expect("failed to find workspace root").join(".sqlx")),
|
||||
];
|
||||
let Some(data_file_path) = dirs
|
||||
.iter()
|
||||
|
@ -178,10 +101,10 @@ pub fn expand_input<'a>(
|
|||
else {
|
||||
return Err(
|
||||
if *offline {
|
||||
"`SQLX_OFFLINE=true` but there is no cached data for this query, run `cargo sqlx prepare` to update the query cache or unset `SQLX_OFFLINE`"
|
||||
"`SQLX_OFFLINE=true` but there is no cached data for this query, run `cargo sqlx prepare` (with `sqlx-cli` installed) to update the query cache or unset `SQLX_OFFLINE`".into()
|
||||
} else {
|
||||
"set `DATABASE_URL` to use query macros online, or run `cargo sqlx prepare` to update the query cache"
|
||||
}.into()
|
||||
format!("set `{}` to use query macros online, or run `cargo sqlx prepare` (with `sqlx-cli` installed) to update the query cache", meta.url_var()).into()
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue