get offline macros working with todos example

This commit is contained in:
Austin Bonander 2020-05-13 20:30:12 -07:00 committed by Ryan Leckey
parent 21041ff55e
commit 6b248e0d5f
15 changed files with 186 additions and 129 deletions

15
Cargo.lock generated
View file

@ -304,6 +304,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"cargo_metadata",
"chrono",
"console",
"dialoguer",
@ -318,6 +319,18 @@ dependencies = [
"url 2.1.1",
]
[[package]]
name = "cargo_metadata"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8de60b887edf6d74370fc8eb177040da4847d971d6234c7b13a6da324ef0caf"
dependencies = [
"semver",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "cc"
version = "1.0.50"
@ -1650,6 +1663,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
dependencies = [
"semver-parser",
"serde",
]
[[package]]
@ -1880,6 +1894,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-std",
"dotenv",
"futures 0.3.4",
"paw",
"sqlx",

View file

@ -13,12 +13,12 @@ default-run = "sqlx"
[[bin]]
name = "sqlx"
path = "src/main.rs"
path = "src/bin/sqlx.rs"
# enables invocation as `cargo sqlx`; required for `prepare` subcommand
[[bin]]
name = "cargo-sqlx"
path = "src/main.rs"
path = "src/bin/cargo-sqlx.rs"
[dependencies]
dotenv = "0.15"
@ -35,6 +35,7 @@ dialoguer = "0.5.0"
serde_json = { version = "1.0.53", features = ["preserve_order"] }
serde = "1.0.110"
glob = "0.3.0"
cargo_metadata = "0.10.0"
[features]
default = [ "postgres", "sqlite", "mysql" ]

View file

@ -0,0 +1,18 @@
use cargo_sqlx::Command;
use structopt::{clap, StructOpt};
use std::env;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
// when invoked as `cargo sqlx [...]` the args we see are `[...]/cargo-sqlx sqlx prepare`
// so we want to notch out that superfluous "sqlx"
let args = env::args_os().skip(2);
let matches = Command::clap()
.bin_name("cargo sqlx")
.setting(clap::AppSettings::NoBinaryName)
.get_matches_from(args);
cargo_sqlx::run(Command::from_clap(&matches)).await
}

View file

@ -0,0 +1,8 @@
use cargo_sqlx::Command;
use structopt::StructOpt;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
// no special handling here
cargo_sqlx::run(Command::from_args()).await
}

View file

@ -1,16 +1,16 @@
use std::env;
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use url::Url;
use dotenv::dotenv;
use structopt::StructOpt;
use anyhow::{anyhow, Context};
use console::style;
use dialoguer::Confirmation;
mod migrator;
@ -18,12 +18,12 @@ mod db;
mod migration;
mod prepare;
use migrator::DatabaseMigrator;
/// Sqlx commandline tool
#[derive(StructOpt, Debug)]
#[structopt(name = "Sqlx")]
enum Opt {
pub enum Command {
#[structopt(alias = "mig")]
Migrate(MigrationCommand),
@ -54,7 +54,7 @@ enum Opt {
/// Adds and runs migrations. Alias: mig
#[derive(StructOpt, Debug)]
#[structopt(name = "Sqlx migrator")]
enum MigrationCommand {
pub enum MigrationCommand {
/// Add new migration with name <timestamp>_<migration_name>.sql
Add { name: String },
@ -68,7 +68,7 @@ enum MigrationCommand {
/// Create or drops database depending on your connection string. Alias: db
#[derive(StructOpt, Debug)]
#[structopt(name = "Sqlx migrator")]
enum DatabaseCommand {
pub enum DatabaseCommand {
/// Create database in url
Create,
@ -76,26 +76,22 @@ enum DatabaseCommand {
Drop,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
pub async fn run(cmd: Command) -> anyhow::Result<()> {
dotenv().ok();
let opt = Opt::from_args();
match opt {
Opt::Migrate(command) => match command {
match cmd {
Command::Migrate(migrate) => match migrate {
MigrationCommand::Add { name } => migration::add_file(&name)?,
MigrationCommand::Run => migration::run().await?,
MigrationCommand::List => migration::list().await?,
},
Opt::Database(command) => match command {
Command::Database(database) => match database {
DatabaseCommand::Create => db::run_create().await?,
DatabaseCommand::Drop => db::run_drop().await?,
},
Opt::Prepare { check: false } => prepare::run()?,
Opt::Prepare { check: true } => prepare::check()?,
Command::Prepare { check: false } => prepare::run()?,
Command::Prepare { check: true } => prepare::check()?,
};
println!("All done!");
Ok(())
}

View file

@ -2,9 +2,11 @@ use anyhow::{anyhow, bail, Context};
use std::process::Command;
use std::{env, fs};
use cargo_metadata::MetadataCommand;
use std::collections::BTreeMap;
use std::fs::File;
use std::path::Path;
use std::time::SystemTime;
use url::Url;
type QueryData = BTreeMap<String, serde_json::Value>;
@ -22,10 +24,17 @@ pub fn run() -> anyhow::Result<()> {
let data = run_prepare_step()?;
serde_json::to_writer_pretty(
File::create("sqlx-data.json")?,
File::create("sqlx-data.json").context("failed to create/open `sqlx-data.json`")?,
&DataFile { db: db_kind, data },
)
.map_err(Into::into)
.context("failed to write to `sqlx-data.json`")?;
println!(
"query data written to `sqlx-data.json` in the current directory; \
please check this into version control"
);
Ok(())
}
pub fn check() -> anyhow::Result<()> {
@ -66,12 +75,25 @@ fn run_prepare_step() -> anyhow::Result<QueryData> {
let cargo = env::var("CARGO")
.context("`prepare` subcommand may only be invoked as `cargo sqlx prepare``")?;
if !Command::new(cargo).arg("check").status()?.success() {
bail!("`cargo check` failed");
let check_status = Command::new(&cargo)
.arg("check")
// set an always-changing env var that the macros depend on via `env!()`
.env(
"__SQLX_RECOMPILE_TRIGGER",
SystemTime::UNIX_EPOCH.elapsed()?.as_millis().to_string(),
)
.status()?;
if !check_status.success() {
bail!("`cargo check` failed with status: {}", check_status);
}
let save_dir = env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target/sqlx".into());
let pattern = Path::new(&save_dir).join("/query-*.json");
let metadata = MetadataCommand::new()
.cargo_path(cargo)
.exec()
.context("failed to execute `cargo metadata`")?;
let pattern = metadata.target_directory.join("sqlx/query-*.json");
let mut data = BTreeMap::new();

View file

@ -9,5 +9,6 @@ anyhow = "1.0"
async-std = { version = "1.4.0", features = [ "attributes" ] }
futures = "0.3"
paw = "1.0"
sqlx = { path = "../../../", features = ["postgres"] }
sqlx = { path = "../../../", features = ["postgres", "offline"] }
structopt = { version = "0.3", features = ["paw"] }
dotenv = "0.15.0"

View file

@ -1,5 +1,7 @@
use sqlx_core as sqlx;
impl_database_ext! {
sqlx_core::mysql::MySql {
sqlx::mysql::MySql {
u8,
u16,
u32,
@ -18,34 +20,34 @@ impl_database_ext! {
Vec<u8>,
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx_core::types::chrono::NaiveTime,
sqlx::types::chrono::NaiveTime,
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx_core::types::chrono::NaiveDate,
sqlx::types::chrono::NaiveDate,
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx_core::types::chrono::NaiveDateTime,
sqlx::types::chrono::NaiveDateTime,
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx_core::types::chrono::DateTime<sqlx_core::types::chrono::Utc>,
sqlx::types::chrono::DateTime<sqlx::types::chrono::Utc>,
#[cfg(feature = "time")]
sqlx_core::types::time::Time,
sqlx::types::time::Time,
#[cfg(feature = "time")]
sqlx_core::types::time::Date,
sqlx::types::time::Date,
#[cfg(feature = "time")]
sqlx_core::types::time::PrimitiveDateTime,
sqlx::types::time::PrimitiveDateTime,
#[cfg(feature = "time")]
sqlx_core::types::time::OffsetDateTime,
sqlx::types::time::OffsetDateTime,
#[cfg(feature = "bigdecimal")]
sqlx_core::types::BigDecimal,
sqlx::types::BigDecimal,
},
ParamChecking::Weak,
feature-types: info => info.type_feature_gate(),
row = sqlx_core::mysql::MySqlRow,
row = sqlx::mysql::MySqlRow,
name = "MySQL/MariaDB"
}

View file

@ -1,5 +1,7 @@
use sqlx_core as sqlx;
impl_database_ext! {
sqlx_core::postgres::Postgres {
sqlx::postgres::Postgres {
bool,
String | &str,
i8,
@ -13,37 +15,37 @@ impl_database_ext! {
Vec<u8> | &[u8],
#[cfg(feature = "uuid")]
sqlx_core::types::Uuid,
sqlx::types::Uuid,
#[cfg(feature = "chrono")]
sqlx_core::types::chrono::NaiveTime,
sqlx::types::chrono::NaiveTime,
#[cfg(feature = "chrono")]
sqlx_core::types::chrono::NaiveDate,
sqlx::types::chrono::NaiveDate,
#[cfg(feature = "chrono")]
sqlx_core::types::chrono::NaiveDateTime,
sqlx::types::chrono::NaiveDateTime,
#[cfg(feature = "chrono")]
sqlx_core::types::chrono::DateTime<sqlx_core::types::chrono::Utc> | sqlx_core::types::chrono::DateTime<_>,
sqlx::types::chrono::DateTime<sqlx::types::chrono::Utc> | sqlx::types::chrono::DateTime<_>,
#[cfg(feature = "time")]
sqlx_core::types::time::Time,
sqlx::types::time::Time,
#[cfg(feature = "time")]
sqlx_core::types::time::Date,
sqlx::types::time::Date,
#[cfg(feature = "time")]
sqlx_core::types::time::PrimitiveDateTime,
sqlx::types::time::PrimitiveDateTime,
#[cfg(feature = "time")]
sqlx_core::types::time::OffsetDateTime,
sqlx::types::time::OffsetDateTime,
#[cfg(feature = "bigdecimal")]
sqlx_core::types::BigDecimal,
sqlx::types::BigDecimal,
#[cfg(feature = "ipnetwork")]
sqlx_core::types::ipnetwork::IpNetwork,
sqlx::types::ipnetwork::IpNetwork,
#[cfg(feature = "json")]
serde_json::Value,
@ -61,42 +63,42 @@ impl_database_ext! {
#[cfg(feature = "uuid")]
Vec<sqlx_core::types::Uuid> | &[sqlx_core::types::Uuid],
Vec<sqlx::types::Uuid> | &[sqlx::types::Uuid],
#[cfg(feature = "chrono")]
Vec<sqlx_core::types::chrono::NaiveTime> | &[sqlx_core::types::sqlx_core::types::chrono::NaiveTime],
Vec<sqlx::types::chrono::NaiveTime> | &[sqlx::types::sqlx::types::chrono::NaiveTime],
#[cfg(feature = "chrono")]
Vec<sqlx_core::types::chrono::NaiveDate> | &[sqlx_core::types::chrono::NaiveDate],
Vec<sqlx::types::chrono::NaiveDate> | &[sqlx::types::chrono::NaiveDate],
#[cfg(feature = "chrono")]
Vec<sqlx_core::types::chrono::NaiveDateTime> | &[sqlx_core::types::chrono::NaiveDateTime],
Vec<sqlx::types::chrono::NaiveDateTime> | &[sqlx::types::chrono::NaiveDateTime],
// TODO
// #[cfg(feature = "chrono")]
// Vec<sqlx_core::types::chrono::DateTime<sqlx_core::types::chrono::Utc>> | &[sqlx_core::types::chrono::DateTime<_>],
// Vec<sqlx::types::chrono::DateTime<sqlx::types::chrono::Utc>> | &[sqlx::types::chrono::DateTime<_>],
#[cfg(feature = "time")]
Vec<sqlx_core::types::time::Time> | &[sqlx_core::types::time::Time],
Vec<sqlx::types::time::Time> | &[sqlx::types::time::Time],
#[cfg(feature = "time")]
Vec<sqlx_core::types::time::Date> | &[sqlx_core::types::time::Date],
Vec<sqlx::types::time::Date> | &[sqlx::types::time::Date],
#[cfg(feature = "time")]
Vec<sqlx_core::types::time::PrimitiveDateTime> | &[sqlx_core::types::time::PrimitiveDateTime],
Vec<sqlx::types::time::PrimitiveDateTime> | &[sqlx::types::time::PrimitiveDateTime],
#[cfg(feature = "time")]
Vec<sqlx_core::types::time::OffsetDateTime> | &[sqlx_core::types::time::OffsetDateTime],
Vec<sqlx::types::time::OffsetDateTime> | &[sqlx::types::time::OffsetDateTime],
#[cfg(feature = "bigdecimal")]
Vec<sqlx_core::types::BigDecimal> | &[sqlx_core::types::BigDecimal],
Vec<sqlx::types::BigDecimal> | &[sqlx::types::BigDecimal],
#[cfg(feature = "ipnetwork")]
Vec<sqlx_core::types::ipnetwork::IpNetwork> | &[sqlx_core::types::ipnetwork::IpNetwork],
Vec<sqlx::types::ipnetwork::IpNetwork> | &[sqlx::types::ipnetwork::IpNetwork],
},
ParamChecking::Strong,
feature-types: info => info.type_feature_gate(),
row = sqlx_core::postgres::PgRow,
row = sqlx::postgres::PgRow,
name = "PostgreSQL"
}

View file

@ -1,5 +1,7 @@
use sqlx_core as sqlx;
impl_database_ext! {
sqlx_core::sqlite::Sqlite {
sqlx::sqlite::Sqlite {
bool,
i32,
i64,
@ -10,6 +12,6 @@ impl_database_ext! {
},
ParamChecking::Weak,
feature-types: _info => None,
row = sqlx_core::sqlite::SqliteRow,
row = sqlx::sqlite::SqliteRow,
name = "SQLite"
}

View file

@ -1,5 +1,5 @@
#![cfg_attr(
not(any(feature = "postgres", feature = "mysql")),
not(any(feature = "postgres", feature = "mysql", feature = "offline")),
allow(dead_code, unused_macros, unused_imports)
)]
extern crate proc_macro;
@ -8,11 +8,6 @@ use proc_macro::TokenStream;
use quote::quote;
#[cfg(feature = "runtime-async-std")]
use async_std::task::block_on;
use url::Url;
type Error = Box<dyn std::error::Error>;
type Result<T> = std::result::Result<T, Error>;

View file

@ -1,15 +1,6 @@
use sqlx_core::connection::{Connect, Connection};
use sqlx_core::database::Database;
use sqlx_core::describe::Describe;
use sqlx_core::executor::{Executor, RefExecutor};
use url::Url;
use std::fmt::{self, Display, Formatter};
use crate::database::DatabaseExt;
use proc_macro2::TokenStream;
use std::fs::File;
use syn::export::Span;
use sqlx_core::executor::Executor;
// TODO: enable serialization
#[cfg_attr(feature = "offline", derive(serde::Deserialize, serde::Serialize))]
@ -21,6 +12,7 @@ use syn::export::Span;
))
)]
pub struct QueryData<DB: Database> {
#[allow(dead_code)]
pub(super) query: String,
pub(super) describe: Describe<DB>,
#[cfg(feature = "offline")]
@ -49,10 +41,9 @@ pub mod offline {
use std::fmt::{self, Formatter};
use crate::database::DatabaseExt;
use proc_macro2::{Span, TokenStream};
use serde::de::{Deserializer, MapAccess, Visitor};
use proc_macro2::Span;
use serde::de::{Deserializer, IgnoredAny, MapAccess, Visitor};
use sqlx_core::describe::Describe;
use sqlx_core::query::query;
use std::path::Path;
#[derive(serde::Deserialize)]
@ -150,9 +141,16 @@ pub mod offline {
{
let mut db_name: Option<String> = None;
// unfortunately we can't avoid this copy because deserializing from `io::Read`
// doesn't support deserializing borrowed values
while let Some(key) = map.next_key::<String>()? {
let query_data = loop {
// unfortunately we can't avoid this copy because deserializing from `io::Read`
// doesn't support deserializing borrowed values
let key = map.next_key::<String>()?.ok_or_else(|| {
serde::de::Error::custom(format_args!(
"failed to find data for query {}",
self.hash
))
})?;
// lazily deserialize the query data only
if key == "db" {
db_name = Some(map.next_value::<String>()?);
@ -163,23 +161,27 @@ pub mod offline {
let mut query_data: DynQueryData = map.next_value()?;
return if query_data.query == self.query {
if query_data.query == self.query {
query_data.db_name = db_name;
query_data.hash = self.hash;
Ok(query_data)
query_data.hash = self.hash.clone();
break query_data;
} else {
Err(serde::de::Error::custom(format_args!(
return Err(serde::de::Error::custom(format_args!(
"hash collision for stored queries:\n{:?}\n{:?}",
self.query, query_data.query
)))
)));
};
} else {
// we don't care about entries that don't match our hash
let _ = map.next_value::<IgnoredAny>()?;
}
}
};
Err(serde::de::Error::custom(format_args!(
"failed to find data for query {}",
self.hash
)))
// Serde expects us to consume the whole map; fortunately they've got a convenient
// type to let us do just that
while let Some(_) = map.next_entry::<IgnoredAny, IgnoredAny>()? {}
Ok(query_data)
}
}
}

View file

@ -2,24 +2,17 @@ use std::env;
use std::fs;
use proc_macro2::{Ident, Span};
use quote::{format_ident, ToTokens};
use syn::parse::{Parse, ParseBuffer, ParseStream};
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
use syn::token::Group;
use syn::{Error, Expr, ExprLit, ExprPath, Lit, LitBool, LitStr, Token};
use syn::{ExprArray, ExprGroup, Type};
use sqlx_core::connection::Connection;
use sqlx_core::describe::Describe;
use quote::format_ident;
use syn::parse::{Parse, ParseStream};
use syn::{Expr, LitBool, LitStr};
use syn::{ExprArray, Type};
/// Macro input shared by `query!()` and `query_file!()`
pub struct QueryMacroInput {
pub(super) src: String,
#[cfg_attr(not(feature = "offline"), allow(dead_code))]
pub(super) src_span: Span,
pub(super) data_src: DataSrc,
pub(super) record_type: RecordType,
// `arg0 .. argN` for N arguments
@ -34,12 +27,6 @@ enum QuerySrc {
File(String),
}
pub enum DataSrc {
Env(String),
DbUrl(String),
File,
}
pub enum RecordType {
Given(Type),
Generated,
@ -48,7 +35,6 @@ pub enum RecordType {
impl Parse for QueryMacroInput {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut query_src: Option<(QuerySrc, Span)> = None;
let mut data_src = DataSrc::Env("DATABASE_URL".into());
let mut args: Option<Vec<Expr>> = None;
let mut record_type = RecordType::Generated;
let mut checked = true;
@ -97,7 +83,6 @@ impl Parse for QueryMacroInput {
Ok(QueryMacroInput {
src: src.resolve(src_span)?,
src_span,
data_src,
record_type,
arg_names,
arg_exprs,

View file

@ -1,16 +1,13 @@
use std::borrow::Cow;
use std::env;
use std::fmt::Display;
use std::path::PathBuf;
use proc_macro2::{Ident, Span, TokenStream};
use proc_macro2::{Span, TokenStream};
use syn::Type;
use url::Url;
pub use input::QueryMacroInput;
use quote::{format_ident, quote};
use sqlx_core::connection::Connect;
use sqlx_core::connection::Connection;
use sqlx_core::database::Database;
use sqlx_core::describe::Describe;
@ -102,7 +99,10 @@ fn expand_from_db(input: QueryMacroInput, db_url: &str) -> crate::Result<TokenSt
}
#[cfg(feature = "offline")]
pub fn expand_from_file(input: QueryMacroInput, file: PathBuf) -> crate::Result<TokenStream> {
pub fn expand_from_file(
input: QueryMacroInput,
file: std::path::PathBuf,
) -> crate::Result<TokenStream> {
use data::offline::DynQueryData;
let query_data = DynQueryData::from_data_file(file, &input.src)?;
@ -222,6 +222,9 @@ where
(#($#arg_names:expr),*) => {{
use sqlx::arguments::Arguments as _;
// lets `cargo sqlx prepare` ensure that we can always trigger a recompile
const _: Option<&'static str> = option_env!("__SQLX_RECOMPILE_TRIGGER");
#args_tokens
#output
@ -231,8 +234,13 @@ where
#[cfg(feature = "offline")]
{
let save_dir = env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target/sqlx".into());
std::fs::create_dir_all(&save_dir);
let mut save_dir = std::path::PathBuf::from(
env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target/".into()),
);
save_dir.push("sqlx");
std::fs::create_dir_all(&save_dir)?;
data.save_in(save_dir, input.src_span)?;
}

View file

@ -1,6 +1,6 @@
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use syn::{Path, Type};
use syn::Type;
use sqlx_core::describe::Describe;