Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve handling of feature flags #118

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions cargo-pgx/src/cli.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,31 @@ subcommands:
short: r
long: release
help: compile for release mode (default is debug)
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- package:
about: create an installation package directory (in ./target/[debug|release]/extname-pgXX/) for the Postgres installation specified by whatever "pg_config" is currently on your $PATH
args:
- debug:
short: d
long: debug
help: compile for debug mode (default is release)
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- schema:
about: generate extension schema files (typically not necessary)
args:
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- dump-schema:
about: dump the full extension SQL schema file
args:
Expand All @@ -94,6 +110,11 @@ subcommands:
takes_value: true
required: true
help: Where should the extension .sql file be written?
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- run:
about: compile/install extension to a pgx-managed Postgres instance and start psql
args:
Expand All @@ -110,6 +131,11 @@ subcommands:
short: r
long: release
help: compile for release mode (default is debug)
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- connect:
about: connect, via psql, to a Postgres instance
args:
Expand All @@ -133,6 +159,11 @@ subcommands:
short: r
long: release
help: compile for release mode (default is debug)
- features:
long: features
help: additional cargo features to activate (default is '--no-default-features')
takes_value: true
multiple: true
- get:
about: get a property from the extension control file
args:
Expand Down
15 changes: 11 additions & 4 deletions cargo-pgx/src/commands/install.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ pub(crate) fn install_extension(
pg_config: &PgConfig,
is_release: bool,
base_directory: Option<PathBuf>,
additional_features: Vec<&str>,
) -> Result<(), std::io::Error> {
let base_directory = base_directory.unwrap_or("/".into());
let (control_file, extname) = find_control_file();
Expand All @@ -27,7 +28,7 @@ pub(crate) fn install_extension(
)
}

build_extension(major_version, is_release);
build_extension(major_version, is_release, &*additional_features);

println!();
println!("installing extension");
Expand All @@ -50,7 +51,7 @@ pub(crate) fn install_extension(
}

{
handle_result!(crate::generate_schema(), "failed to generate SQL schema");
handle_result!(crate::generate_schema(&*additional_features), "failed to generate SQL schema");
}

copy_sql_files(&extdir, &extname, &base_directory);
Expand Down Expand Up @@ -83,9 +84,15 @@ fn copy_file(src: PathBuf, dest: PathBuf, msg: &str) {
);
}

fn build_extension(major_version: u16, is_release: bool) {
let features = std::env::var("PGX_BUILD_FEATURES").unwrap_or(format!("pg{}", major_version));
fn build_extension(major_version: u16, is_release: bool, additional_features: &[&str],) {
let mut features = std::env::var("PGX_BUILD_FEATURES").unwrap_or(format!("pg{}", major_version));
let flags = std::env::var("PGX_BUILD_FLAGS").unwrap_or_default();
if !additional_features.is_empty() {
use std::fmt::Write;
let mut additional_features = additional_features.join(" ");
let _ = write!(&mut additional_features, " {}", features);
features = additional_features
}
let mut command = Command::new("cargo");
command.arg("build");
if is_release {
Expand Down
2 changes: 1 addition & 1 deletion cargo-pgx/src/commands/new.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub(crate) fn create_crate_template(

let cwd = std::env::current_dir().unwrap();
std::env::set_current_dir(&path)?;
crate::generate_schema()?;
crate::generate_schema(&[])?;
std::env::set_current_dir(cwd)?;

Ok(())
Expand Down
3 changes: 2 additions & 1 deletion cargo-pgx/src/commands/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use std::path::PathBuf;
pub(crate) fn package_extension(
pg_config: &PgConfig,
is_debug: bool,
additional_features: Vec<&str>,
) -> Result<(), std::io::Error> {
let base_path = build_base_path(pg_config, is_debug)?;

Expand All @@ -20,7 +21,7 @@ pub(crate) fn package_extension(
if !base_path.exists() {
std::fs::create_dir_all(&base_path)?;
}
install_extension(pg_config, !is_debug, Some(base_path))
install_extension(pg_config, !is_debug, Some(base_path), additional_features)
}

fn build_base_path(pg_config: &PgConfig, is_debug: bool) -> Result<PathBuf, std::io::Error> {
Expand Down
3 changes: 2 additions & 1 deletion cargo-pgx/src/commands/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@ pub(crate) fn run_psql(
pg_config: &PgConfig,
dbname: &str,
is_release: bool,
additional_features: Vec<&str>,
) -> Result<(), std::io::Error> {
// stop postgres
stop_postgres(pg_config)?;

// install the extension
install_extension(pg_config, is_release, None)?;
install_extension(pg_config, is_release, None, additional_features)?;

// restart postgres
start_postgres(pg_config)?;
Expand Down
119 changes: 115 additions & 4 deletions cargo-pgx/src/commands/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,11 @@ enum DeriveMacros {
PostgresHash,
}

pub(crate) fn generate_schema() -> Result<(), std::io::Error> {
pub(crate) fn generate_schema(features: &[&str]) -> Result<(), std::io::Error> {
let mut features: HashSet<String> = features.into_iter().map(|s| s.to_string()).collect();
if let Ok(pg_features) = std::env::var("PGX_BUILD_FEATURES") {
features.extend(pg_features.split(' ').map(|s| s.to_string()))
}
let path = PathBuf::from_str("./src").unwrap();
let files = find_rs_files(&path, Vec::new());
let default_schema = get_property("schema").unwrap_or_else(|| "public".to_string());
Expand All @@ -66,7 +70,7 @@ pub(crate) fn generate_schema() -> Result<(), std::io::Error> {

let mut created = Vec::new();
files.iter().for_each(|f: &DirEntry| {
let statemets = generate_sql(f, default_schema.clone());
let statemets = generate_sql(f, default_schema.clone(), &features);
let (did_write, filename) = write_sql_file(f, statemets);

// strip the leading ./sql/ from the filenames we generated
Expand Down Expand Up @@ -239,7 +243,7 @@ fn parse_extern_args(att: &Attribute) -> BTreeSet<ExternArgs> {
.collect()
}

fn generate_sql(rs_file: &DirEntry, default_schema: String) -> Vec<String> {
fn generate_sql(rs_file: &DirEntry, default_schema: String, features: &HashSet<String>) -> Vec<String> {
let mut sql = Vec::new();
let file = std::fs::read_to_string(rs_file.path()).unwrap();
let ast = syn::parse_file(file.as_str()).unwrap();
Expand All @@ -253,6 +257,7 @@ fn generate_sql(rs_file: &DirEntry, default_schema: String) -> Vec<String> {
ast.items,
&mut schema_stack,
&default_schema,
features,
);

sql
Expand All @@ -265,6 +270,7 @@ fn walk_items(
items: Vec<Item>,
schema_stack: &mut Vec<String>,
default_schema: &str,
features: &HashSet<String>,
) {
let mut sql = Vec::new();
let mut postgres_enums = Vec::new();
Expand All @@ -275,10 +281,14 @@ fn walk_items(
.expect("couldn't determine the current schema")
.clone();
for item in items {
if !is_active(&item, features) {
continue
}
if let Item::Mod(module) = item {
module.attrs;
if let Some((_, items)) = module.content {
schema_stack.push(module.ident.to_string());
walk_items(rs_file, &mut sql, items, schema_stack, default_schema);
walk_items(rs_file, &mut sql, items, schema_stack, default_schema, features);
schema_stack.pop();
}
} else if let Item::Struct(strct) = item {
Expand Down Expand Up @@ -327,13 +337,15 @@ fn walk_items(
vec![parse_item(eq(&strct.ident))],
schema_stack,
default_schema,
features,
);
walk_items(
rs_file,
&mut operator_sql,
vec![parse_item(ne(&strct.ident))],
schema_stack,
default_schema,
features,
);
}

Expand All @@ -344,34 +356,39 @@ fn walk_items(
vec![parse_item(lt(&strct.ident))],
schema_stack,
default_schema,
features,
);
walk_items(
rs_file,
&mut operator_sql,
vec![parse_item(gt(&strct.ident))],
schema_stack,
default_schema,
features,
);
walk_items(
rs_file,
&mut operator_sql,
vec![parse_item(le(&strct.ident))],
schema_stack,
default_schema,
features,
);
walk_items(
rs_file,
&mut operator_sql,
vec![parse_item(ge(&strct.ident))],
schema_stack,
default_schema,
features,
);
walk_items(
rs_file,
&mut operator_sql,
vec![parse_item(cmp(&strct.ident))],
schema_stack,
default_schema,
features,
);
}

Expand All @@ -382,6 +399,7 @@ fn walk_items(
vec![parse_item(hash(&strct.ident))],
schema_stack,
default_schema,
features,
);

let type_name = &strct.ident.to_string().to_lowercase();
Expand Down Expand Up @@ -635,6 +653,99 @@ fn walk_items(
all_sql.append(&mut operator_sql);
}

fn is_active(item: &syn::Item, features: &HashSet<String>) -> bool {
for attr in item_attrs(item) {
if !attr.path.is_ident("cfg") {
continue
}

let meta = match attr.parse_meta() {
Ok(syn::Meta::List(meta)) => meta,
_ => continue,
};

if !is_active_inner(meta.nested.iter(), features, true) {
return false
}

fn is_active_inner<'a>(
metas: impl Iterator<Item=&'a syn::NestedMeta>,
features: &HashSet<String>,
is_any: bool,
) -> bool {
let mut active = !is_any;
for meta in metas {
let meta = match meta {
syn::NestedMeta::Meta(meta) => meta,
syn::NestedMeta::Lit(_) => return true,
};

match meta {
// cannot tell, just continue
syn::Meta::Path(_) => continue,
syn::Meta::NameValue(inner) => {
if !inner.path.is_ident("feature") {
continue // cannot tell, just continue
}
match &inner.lit {
syn::Lit::Str(s) => {
match (features.contains(&*s.value()), is_any) {
(true, true) => active |= true,
(true, false) => active &= true,
(false, true) => active |= false,
(false, false) => active &= false,
}
}
_ => continue,
}

}
// if we find a list, there can only be one element
syn::Meta::List(list) => {
if list.path.is_ident("not") {
return !is_active_inner(list.nested.iter(), features, is_any);
}

if list.path.is_ident("any") {
return is_active_inner(list.nested.iter(), features, true);
}

if list.path.is_ident("all") {
return is_active_inner(list.nested.iter(), features, false);
}
}
}
}
active
}
}
true
}

fn item_attrs(item: &syn::Item) -> impl Iterator<Item=&syn::Attribute> {
match item {
syn::Item::Const(i) => i.attrs.iter(),
syn::Item::Enum(i) => i.attrs.iter(),
syn::Item::ExternCrate(i) => i.attrs.iter(),
syn::Item::Fn(i) => i.attrs.iter(),
syn::Item::ForeignMod(i) => i.attrs.iter(),
syn::Item::Impl(i) => i.attrs.iter(),
syn::Item::Macro(i) => i.attrs.iter(),
syn::Item::Macro2(i) => i.attrs.iter(),
syn::Item::Mod(i) => i.attrs.iter(),
syn::Item::Static(i) => i.attrs.iter(),
syn::Item::Struct(i) => i.attrs.iter(),
syn::Item::Trait(i) => i.attrs.iter(),
syn::Item::TraitAlias(i) => i.attrs.iter(),
syn::Item::Type(i) => i.attrs.iter(),
syn::Item::Union(i) => i.attrs.iter(),
syn::Item::Use(i) => i.attrs.iter(),
_ => [].iter(),

}
}


fn qualify_name(schema: &str, name: &str) -> String {
if "public" == schema {
name.to_owned()
Expand Down
Loading