Skip to content

WIP - OnConflict support for Insert mutations #503

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 10 commits into
base: master
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion sql/load_sql_context.sql
Original file line number Diff line number Diff line change
@@ -281,11 +281,14 @@ select
array[]::text[]
),
'is_unique', pi.indisunique and pi.indpred is null,
'is_primary_key', pi.indisprimary
'is_primary_key', pi.indisprimary,
'name', pc_ix.relname
)
)
from
pg_catalog.pg_index pi
join pg_catalog.pg_class pc_ix
on pi.indexrelid = pc_ix.oid
where
pi.indrelid = pc.oid
),
156 changes: 142 additions & 14 deletions src/builder.rs
Original file line number Diff line number Diff line change
@@ -4,16 +4,21 @@ use crate::parser_util::*;
use crate::sql_types::*;
use graphql_parser::query::*;
use serde::Serialize;
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::hash::Hash;
use std::ops::Deref;
use std::str::FromStr;
use std::sync::Arc;

#[derive(Clone, Debug)]
pub struct InsertBuilder {
pub alias: String,
pub struct OnConflictBuilder {
pub constraint: Index, // Could probably get away with a name ref
pub update_fields: HashSet<Arc<Column>>, // Could probably get away with a name ref
pub filter: FilterBuilder,
}

#[derive(Clone, Debug)]
pub struct InsertBuilder {
// args
pub objects: Vec<InsertRowBuilder>,

@@ -22,6 +27,8 @@ pub struct InsertBuilder {

//fields
pub selections: Vec<InsertSelection>,

pub on_conflict: Option<OnConflictBuilder>,
}

#[derive(Clone, Debug)]
@@ -176,6 +183,117 @@ where
parse_node_id(node_id_base64_encoded_json_string)
}

fn read_argument_on_conflict<'a, T>(
field: &__Field,
query_field: &graphql_parser::query::Field<'a, T>,
variables: &serde_json::Value,
variable_definitions: &Vec<VariableDefinition<'a, T>>,
) -> Result<Option<OnConflictBuilder>, String>
where
T: Text<'a> + Eq + AsRef<str>,
{
let conflict_type: OnConflictType = match field.get_arg("onConflict") {
None => return Ok(None),
Some(x) => match x.type_().unmodified_type() {
__Type::OnConflictInput(insert_on_conflict) => insert_on_conflict,
_ => return Err("Could not locate Insert Entity type".to_string()),
},
};

let validated: gson::Value = read_argument(
"onConflict",
field,
query_field,
variables,
variable_definitions,
)?;

let on_conflict_builder = match validated {
gson::Value::Absent | gson::Value::Null => None,
gson::Value::Object(contents) => {
let constraint = match contents
.get("constraint")
.expect("OnConflict revalidation error. Expected constraint")
{
gson::Value::String(ix_name) => conflict_type
.table
.indexes
.iter()
.find(|ix| &ix.name == ix_name)
.expect("OnConflict revalidation error. constraint: unknown constraint name"),
_ => {
return Err(
"OnConflict revalidation error. Expected constraint as String".to_string(),
)
}
};

// TODO: Filter reading logic is partially duplicated from read_argument_filter
// ideally this should be refactored
let filter_gson = contents
.get("filter")
.expect("onConflict revalidation error");

let filter = match filter_gson {
gson::Value::Null | gson::Value::Absent => FilterBuilder { elems: vec![] },
gson::Value::Object(_) => {
let filter_type = conflict_type
.input_fields()
.expect("Failed to unwrap input fields on OnConflict type")
.iter()
.find(|in_f| in_f.name() == "filter")
.expect("Failed to get filter input_field on onConflict type")
.type_()
.unmodified_type();

if !matches!(filter_type, __Type::FilterEntity(_)) {
return Err("Could not locate Filter Entity type".to_string());
}
let filter_field_map = input_field_map(&filter_type);
let filter_elems = create_filters(&filter_gson, &filter_field_map)?;
FilterBuilder {
elems: filter_elems,
}
}
_ => return Err("OnConflict revalidation error. invalid filter object".to_string()),
};

let update_fields = match contents
.get("updateFields")
.expect("OnConflict revalidation error. Expected updateFields")
{
gson::Value::Array(col_names) => {
let mut update_columns: HashSet<Arc<Column>> = HashSet::new();
for col_name in col_names {
match col_name {
gson::Value::String(c) => {
let col = conflict_type.table.columns.iter().find(|column| &column.name == c).expect("OnConflict revalidation error. updateFields: unknown column name");
update_columns.insert(Arc::clone(col));
}
_ => return Err("OnConflict revalidation error. Expected updateFields to be column names".to_string()),
}
}
update_columns
}
_ => {
return Err(
"OnConflict revalidation error. Expected updateFields to be an array"
.to_string(),
)
}
};

Some(OnConflictBuilder {
constraint: constraint.clone(),
update_fields,
filter,
})
}
_ => return Err("Insert re-validation errror".to_string()),
};
Ok(on_conflict_builder)
}

fn read_argument_objects<'a, T>(
field: &__Field,
query_field: &graphql_parser::query::Field<'a, T>,
@@ -272,12 +390,27 @@ where
.name()
.ok_or("Encountered type without name in connection builder")?;
let field_map = field_map(&type_);
let alias = alias_or_name(query_field);

match &type_ {
__Type::InsertResponse(xtype) => {
// Raise for disallowed arguments
restrict_allowed_arguments(&["objects"], query_field)?;
let allowed_args = field
.args
.iter()
.map(|iv| iv.name())
.collect::<HashSet<String>>();

match allowed_args.contains("onConflict") {
true => restrict_allowed_arguments(&["objects", "onConflict"], query_field)?,
false => restrict_allowed_arguments(&["objects"], query_field)?,
}

let on_conflict: Option<OnConflictBuilder> = match allowed_args.contains("onConflict") {
true => {
read_argument_on_conflict(field, query_field, variables, variable_definitions)?
}
false => None,
};

let objects: Vec<InsertRowBuilder> =
read_argument_objects(field, query_field, variables, variable_definitions)?;
@@ -320,10 +453,10 @@ where
}
}
Ok(InsertBuilder {
alias,
table: Arc::clone(&xtype.table),
objects,
selections: builder_fields,
on_conflict,
})
}
_ => Err(format!(
@@ -335,8 +468,6 @@ where

#[derive(Clone, Debug)]
pub struct UpdateBuilder {
pub alias: String,

// args
pub filter: FilterBuilder,
pub set: SetBuilder,
@@ -438,7 +569,6 @@ where
.name()
.ok_or("Encountered type without name in update builder")?;
let field_map = field_map(&type_);
let alias = alias_or_name(query_field);

match &type_ {
__Type::UpdateResponse(xtype) => {
@@ -490,7 +620,6 @@ where
}
}
Ok(UpdateBuilder {
alias,
filter,
set,
at_most,
@@ -507,8 +636,6 @@ where

#[derive(Clone, Debug)]
pub struct DeleteBuilder {
pub alias: String,

// args
pub filter: FilterBuilder,
pub at_most: i64,
@@ -544,7 +671,6 @@ where
.name()
.ok_or("Encountered type without name in delete builder")?;
let field_map = field_map(&type_);
let alias = alias_or_name(query_field);

match &type_ {
__Type::DeleteResponse(xtype) => {
@@ -594,7 +720,6 @@ where
}
}
Ok(DeleteBuilder {
alias,
filter,
at_most,
table: Arc::clone(&xtype.table),
@@ -1060,11 +1185,14 @@ where
variable_definitions,
)?;

//return Err(format!("Err {:?}", validated));

let filter_type = field
.get_arg("filter")
.expect("failed to get filter argument")
.type_()
.unmodified_type();

if !matches!(filter_type, __Type::FilterEntity(_)) {
return Err("Could not locate Filter Entity type".to_string());
}
180 changes: 163 additions & 17 deletions src/graphql.rs
Original file line number Diff line number Diff line change
@@ -515,6 +515,7 @@ pub enum __Type {
// Mutation
Mutation(MutationType),
InsertInput(InsertInputType),
OnConflictInput(OnConflictType),
InsertResponse(InsertResponseType),
UpdateInput(UpdateInputType),
UpdateResponse(UpdateResponseType),
@@ -593,6 +594,7 @@ impl ___Type for __Type {
Self::Node(x) => x.kind(),
Self::NodeInterface(x) => x.kind(),
Self::InsertInput(x) => x.kind(),
Self::OnConflictInput(x) => x.kind(),
Self::InsertResponse(x) => x.kind(),
Self::UpdateInput(x) => x.kind(),
Self::UpdateResponse(x) => x.kind(),
@@ -628,6 +630,7 @@ impl ___Type for __Type {
Self::Node(x) => x.name(),
Self::NodeInterface(x) => x.name(),
Self::InsertInput(x) => x.name(),
Self::OnConflictInput(x) => x.name(),
Self::InsertResponse(x) => x.name(),
Self::UpdateInput(x) => x.name(),
Self::UpdateResponse(x) => x.name(),
@@ -663,6 +666,7 @@ impl ___Type for __Type {
Self::Node(x) => x.description(),
Self::NodeInterface(x) => x.description(),
Self::InsertInput(x) => x.description(),
Self::OnConflictInput(x) => x.description(),
Self::InsertResponse(x) => x.description(),
Self::UpdateInput(x) => x.description(),
Self::UpdateResponse(x) => x.description(),
@@ -699,6 +703,7 @@ impl ___Type for __Type {
Self::Node(x) => x.fields(_include_deprecated),
Self::NodeInterface(x) => x.fields(_include_deprecated),
Self::InsertInput(x) => x.fields(_include_deprecated),
Self::OnConflictInput(x) => x.fields(_include_deprecated),
Self::InsertResponse(x) => x.fields(_include_deprecated),
Self::UpdateInput(x) => x.fields(_include_deprecated),
Self::UpdateResponse(x) => x.fields(_include_deprecated),
@@ -735,6 +740,7 @@ impl ___Type for __Type {
Self::Node(x) => x.interfaces(),
Self::NodeInterface(x) => x.interfaces(),
Self::InsertInput(x) => x.interfaces(),
Self::OnConflictInput(x) => x.interfaces(),
Self::InsertResponse(x) => x.interfaces(),
Self::UpdateInput(x) => x.interfaces(),
Self::UpdateResponse(x) => x.interfaces(),
@@ -780,6 +786,7 @@ impl ___Type for __Type {
Self::Node(x) => x.enum_values(_include_deprecated),
Self::NodeInterface(x) => x.enum_values(_include_deprecated),
Self::InsertInput(x) => x.enum_values(_include_deprecated),
Self::OnConflictInput(x) => x.enum_values(_include_deprecated),
Self::InsertResponse(x) => x.enum_values(_include_deprecated),
Self::UpdateInput(x) => x.enum_values(_include_deprecated),
Self::UpdateResponse(x) => x.enum_values(_include_deprecated),
@@ -816,6 +823,7 @@ impl ___Type for __Type {
Self::Node(x) => x.input_fields(),
Self::NodeInterface(x) => x.input_fields(),
Self::InsertInput(x) => x.input_fields(),
Self::OnConflictInput(x) => x.input_fields(),
Self::InsertResponse(x) => x.input_fields(),
Self::UpdateInput(x) => x.input_fields(),
Self::UpdateResponse(x) => x.input_fields(),
@@ -962,6 +970,12 @@ pub struct InsertResponseType {
pub schema: Arc<__Schema>,
}

#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct OnConflictType {
pub table: Arc<Table>,
pub schema: Arc<__Schema>,
}

#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct UpdateResponseType {
pub table: Arc<Table>,
@@ -1095,6 +1109,8 @@ impl ConnectionType {
pub enum EnumSource {
Enum(Arc<Enum>),
FilterIs,
TableColumns(Arc<Table>),
OnConflictTarget(Arc<Table>),
}

#[derive(Clone, Debug, Eq, PartialEq, Hash)]
@@ -1420,28 +1436,43 @@ impl ___Type for MutationType {
let table_base_type_name = self.schema.graphql_table_base_type_name(table);

if self.schema.graphql_table_insert_types_are_valid(table) {
f.push(__Field {
name_: format!("insertInto{}Collection", table_base_type_name),
type_: __Type::InsertResponse(InsertResponseType {
table: Arc::clone(table),
schema: Arc::clone(&self.schema),
}),
args: vec![__InputValue {
name_: "objects".to_string(),
type_: __Type::NonNull(NonNullType {
type_: Box::new(__Type::List(ListType {
type_: Box::new(__Type::NonNull(NonNullType {
type_: Box::new(__Type::InsertInput(InsertInputType {
table: Arc::clone(table),
schema: Arc::clone(&self.schema),
})),
let mut args = vec![__InputValue {
name_: "objects".to_string(),
type_: __Type::NonNull(NonNullType {
type_: Box::new(__Type::List(ListType {
type_: Box::new(__Type::NonNull(NonNullType {
type_: Box::new(__Type::InsertInput(InsertInputType {
table: Arc::clone(table),
schema: Arc::clone(&self.schema),
})),
})),
})),
}),
description: None,
default_value: None,
sql_type: None,
}];

if table.has_upsert_support() {
args.push(__InputValue {
name_: "onConflict".to_string(),
type_: __Type::OnConflictInput(OnConflictType {
table: Arc::clone(table),
schema: Arc::clone(&self.schema),
}),
description: None,
default_value: None,
sql_type: None,
}],
});
}

f.push(__Field {
name_: format!("insertInto{}Collection", table_base_type_name),
type_: __Type::InsertResponse(InsertResponseType {
table: Arc::clone(table),
schema: Arc::clone(&self.schema),
}),
args,
description: Some(format!(
"Adds one or more `{}` records to the collection",
table_base_type_name
@@ -1629,6 +1660,14 @@ impl ___Type for EnumType {
)
}
EnumSource::FilterIs => Some("FilterIs".to_string()),
EnumSource::TableColumns(table) => Some(format!(
"{}Field",
self.schema.graphql_table_base_type_name(&table)
)),
EnumSource::OnConflictTarget(table) => Some(format!(
"{}OnConflictConstraint",
self.schema.graphql_table_base_type_name(&table)
)),
}
}

@@ -1667,6 +1706,29 @@ impl ___Type for EnumType {
},
]
}
EnumSource::TableColumns(table) => table
.columns
.iter()
// TODO: is this the right thing to filter on?
.filter(|x| x.permissions.is_selectable)
.map(|col| __EnumValue {
name: self.schema.graphql_column_field_name(col),
description: None,
deprecation_reason: None,
})
.collect(),
EnumSource::OnConflictTarget(table) => {
table
.on_conflict_indexes()
.iter()
.map(|ix| __EnumValue {
// TODO, apply name restrictions
name: ix.name.clone(),
description: None,
deprecation_reason: None,
})
.collect()
}
})
}
}
@@ -3100,6 +3162,75 @@ impl ___Type for InsertInputType {
}
}

impl ___Type for OnConflictType {
fn kind(&self) -> __TypeKind {
__TypeKind::INPUT_OBJECT
}

fn name(&self) -> Option<String> {
Some(format!(
"{}OnConflictInput",
self.schema.graphql_table_base_type_name(&self.table)
))
}

fn fields(&self, _include_deprecated: bool) -> Option<Vec<__Field>> {
None
}

fn input_fields(&self) -> Option<Vec<__InputValue>> {
Some(vec![
__InputValue {
name_: "constraint".to_string(),
// If triggers are involved, we can't detect if a field is non-null. Default
// all fields to non-null and let postgres errors handle it.
type_: __Type::NonNull(NonNullType {
type_: Box::new(__Type::Enum(EnumType {
enum_: EnumSource::OnConflictTarget(Arc::clone(&self.table)),
schema: Arc::clone(&self.schema),
})),
}),
description: Some(
"A unique constraint that may conflict with the inserted records".to_string(),
),
default_value: None,
sql_type: None,
},
__InputValue {
name_: "updateFields".to_string(),
// If triggers are involved, we can't detect if a field is non-null. Default
// all fields to non-null and let postgres errors handle it.
type_: __Type::NonNull(NonNullType {
type_: Box::new(__Type::List(ListType {
type_: Box::new(__Type::NonNull(NonNullType {
type_: Box::new(__Type::Enum(EnumType {
enum_: EnumSource::TableColumns(Arc::clone(&self.table)),
schema: Arc::clone(&self.schema),
})),
})),
})),
}),
description: Some("Fields to be updated if conflict occurs".to_string()),
default_value: None,
sql_type: None,
},
__InputValue {
name_: "filter".to_string(),
type_: __Type::FilterEntity(FilterEntityType {
table: Arc::clone(&self.table),
schema: self.schema.clone(),
}),
description: Some(
"Filters to apply to the results set when querying from the collection"
.to_string(),
),
default_value: None,
sql_type: None,
},
])
}
}

impl ___Type for InsertResponseType {
fn kind(&self) -> __TypeKind {
__TypeKind::OBJECT
@@ -3320,7 +3451,6 @@ impl ___Type for FuncCallResponseType {
}

use std::str::FromStr;
use std::string::ToString;

#[derive(Clone, Copy, Debug)]
pub enum FilterOp {
@@ -4160,6 +4290,22 @@ impl __Schema {
table: Arc::clone(table),
schema: Arc::clone(&schema_rc),
}));

// Used exclusively by onConflict
if table.has_upsert_support() {
types_.push(__Type::OnConflictInput(OnConflictType {
table: Arc::clone(table),
schema: Arc::clone(&schema_rc),
}));
types_.push(__Type::Enum(EnumType {
enum_: EnumSource::TableColumns(Arc::clone(table)),
schema: Arc::clone(&schema_rc),
}));
types_.push(__Type::Enum(EnumType {
enum_: EnumSource::OnConflictTarget(Arc::clone(table)),
schema: Arc::clone(&schema_rc),
}));
}
}

if self.graphql_table_update_types_are_valid(table) {
19 changes: 13 additions & 6 deletions src/parser_util.rs
Original file line number Diff line number Diff line change
@@ -412,6 +412,9 @@ pub fn validate_arg_from_type(type_: &__Type, value: &gson::Value) -> Result<gso
.map(|val| GsonValue::String(val.clone()))
.unwrap_or_else(|| value.clone()),
EnumSource::FilterIs => value.clone(),
// TODO(or): Do I need to check directives here?
EnumSource::TableColumns(_e) => value.clone(),
EnumSource::OnConflictTarget(_e) => value.clone(),
}
}
None => return Err(format!("Invalid input for {} type", enum_name)),
@@ -469,11 +472,12 @@ pub fn validate_arg_from_type(type_: &__Type, value: &gson::Value) -> Result<gso
_ => out_elem,
}
}
__Type::InsertInput(_) => validate_arg_from_input_object(type_, value)?,
__Type::UpdateInput(_) => validate_arg_from_input_object(type_, value)?,
__Type::OrderByEntity(_) => validate_arg_from_input_object(type_, value)?,
__Type::FilterType(_) => validate_arg_from_input_object(type_, value)?,
__Type::FilterEntity(_) => validate_arg_from_input_object(type_, value)?,
__Type::InsertInput(_)
| __Type::UpdateInput(_)
| __Type::OrderByEntity(_)
| __Type::FilterType(_)
| __Type::FilterEntity(_)
| __Type::OnConflictInput(_) => validate_arg_from_input_object(type_, value)?,
_ => {
return Err(format!(
"Invalid Type used as input argument {}",
@@ -525,7 +529,10 @@ pub fn validate_arg_from_input_object(

match input_obj.get(&obj_field_key) {
None => {
validate_arg_from_type(&obj_field_type, &GsonValue::Null)?;
// If there was no provided key, use "Absent" so all arguments
// always exist in the validated input datat
validate_arg_from_type(&obj_field_type, &GsonValue::Absent)?;
out_map.insert(obj_field_key, GsonValue::Absent);
}
Some(x) => {
let out_val = validate_arg_from_type(&obj_field_type, x)?;
31 changes: 31 additions & 0 deletions src/sql_types.rs
Original file line number Diff line number Diff line change
@@ -417,6 +417,7 @@ pub struct Index {
pub column_names: Vec<String>,
pub is_unique: bool,
pub is_primary_key: bool,
pub name: String,
}

#[derive(Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
@@ -544,13 +545,39 @@ impl Table {
column_names: column_names.clone(),
is_unique: true,
is_primary_key: true,
name: "NOT REQUIRED".to_string(),
})
}
} else {
None
}
}

pub fn on_conflict_indexes(&self) -> Vec<&Index> {
// Indexes that are valid targets for an on conflict clause
// must be unique, real (not comment directives), and must
// not contain serial or generated columns because we don't
// allow those to be set in insert statements
let unique_indexes = self.indexes.iter().filter(|x| x.is_unique);

let allowed_column_names = self
.columns
.iter()
.filter(|x| x.permissions.is_insertable)
.filter(|x| !x.is_generated)
.filter(|x| !x.is_serial)
.map(|x| &x.name)
.collect::<HashSet<&String>>();

unique_indexes
.filter(|uix| {
uix.column_names
.iter()
.all(|col_name| allowed_column_names.contains(col_name))
})
.collect::<Vec<&Index>>()
}

pub fn primary_key_columns(&self) -> Vec<&Arc<Column>> {
self.primary_key()
.map(|x| x.column_names)
@@ -565,6 +592,10 @@ impl Table {
.collect::<Vec<&Arc<Column>>>()
}

pub fn has_upsert_support(&self) -> bool {
self.on_conflict_indexes().len() > 0
}

pub fn is_any_column_selectable(&self) -> bool {
self.columns.iter().any(|x| x.permissions.is_selectable)
}
37 changes: 35 additions & 2 deletions src/transpile.rs
Original file line number Diff line number Diff line change
@@ -2,7 +2,6 @@ use crate::builder::*;
use crate::graphql::*;
use crate::sql_types::{Column, ForeignKey, ForeignKeyTableInfo, Function, Table, TypeDetails};
use itertools::Itertools;
use pgrx::pg_sys::PgBuiltInOids;
use pgrx::prelude::*;
use pgrx::spi::SpiClient;
use pgrx::{direct_function_call, JsonB};
@@ -310,11 +309,45 @@ impl MutationEntrypoint<'_> for InsertBuilder {

let values_clause = values_rows_clause.join(", ");

let insert_quoted_block_name = rand_block_name();
let on_conflict_clause = match &self.on_conflict {
Some(on_conflict) => {
let quoted_constraint_name = quote_ident(&on_conflict.constraint.name);
let do_update_set_clause = on_conflict
.update_fields
.iter()
.map(|col| {
format!(
"{} = excluded.{}",
quote_ident(&col.name),
quote_ident(&col.name),
)
})
.join(", ");

let conflict_where_clause = on_conflict.filter.to_where_clause(
&insert_quoted_block_name,
&self.table,
param_context,
)?;

format!(
"
on conflict on constraint {quoted_constraint_name}
do update set {do_update_set_clause}
where {conflict_where_clause}
",
)
}
None => "".to_string(),
};

Ok(format!(
"
with affected as (
insert into {quoted_schema}.{quoted_table}({referenced_columns_clause})
insert into {quoted_schema}.{quoted_table} as {insert_quoted_block_name} ({referenced_columns_clause})
values {values_clause}
{on_conflict_clause}
returning {selectable_columns_clause}
)
select
36 changes: 24 additions & 12 deletions test/expected/inflection_types.out
Original file line number Diff line number Diff line change
@@ -20,19 +20,22 @@ begin;
'$.data.__schema.types[*].name ? (@ starts with "blog")'
)
);
jsonb_pretty
---------------------------
jsonb_pretty
---------------------------------
"blog_post"
"blog_postConnection"
"blog_postDeleteResponse"
"blog_postEdge"
"blog_postField"
"blog_postFilter"
"blog_postInsertInput"
"blog_postInsertResponse"
"blog_postOnConflictConstraint"
"blog_postOnConflictInput"
"blog_postOrderBy"
"blog_postUpdateInput"
"blog_postUpdateResponse"
(10 rows)
(13 rows)

-- Inflection off, Overrides: on
comment on table blog_post is e'@graphql({"name": "BlogZZZ"})';
@@ -50,19 +53,22 @@ begin;
'$.data.__schema.types[*].name ? (@ starts with "Blog")'
)
);
jsonb_pretty
-------------------------
jsonb_pretty
-------------------------------
"BlogZZZ"
"BlogZZZConnection"
"BlogZZZDeleteResponse"
"BlogZZZEdge"
"BlogZZZField"
"BlogZZZFilter"
"BlogZZZInsertInput"
"BlogZZZInsertResponse"
"BlogZZZOnConflictConstraint"
"BlogZZZOnConflictInput"
"BlogZZZOrderBy"
"BlogZZZUpdateInput"
"BlogZZZUpdateResponse"
(10 rows)
(13 rows)

rollback to savepoint a;
-- Inflection on, Overrides: off
@@ -81,19 +87,22 @@ begin;
'$.data.__schema.types[*].name ? (@ starts with "Blog")'
)
);
jsonb_pretty
--------------------------
jsonb_pretty
--------------------------------
"BlogPost"
"BlogPostConnection"
"BlogPostDeleteResponse"
"BlogPostEdge"
"BlogPostField"
"BlogPostFilter"
"BlogPostInsertInput"
"BlogPostInsertResponse"
"BlogPostOnConflictConstraint"
"BlogPostOnConflictInput"
"BlogPostOrderBy"
"BlogPostUpdateInput"
"BlogPostUpdateResponse"
(10 rows)
(13 rows)

-- Inflection on, Overrides: on
comment on table blog_post is e'@graphql({"name": "BlogZZZ"})';
@@ -111,18 +120,21 @@ begin;
'$.data.__schema.types[*].name ? (@ starts with "Blog")'
)
);
jsonb_pretty
-------------------------
jsonb_pretty
-------------------------------
"BlogZZZ"
"BlogZZZConnection"
"BlogZZZDeleteResponse"
"BlogZZZEdge"
"BlogZZZField"
"BlogZZZFilter"
"BlogZZZInsertInput"
"BlogZZZInsertResponse"
"BlogZZZOnConflictConstraint"
"BlogZZZOnConflictInput"
"BlogZZZOrderBy"
"BlogZZZUpdateInput"
"BlogZZZUpdateResponse"
(10 rows)
(13 rows)

rollback;
224 changes: 224 additions & 0 deletions test/expected/mutation_insert_on_conflict.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,224 @@
begin;
create table account(
id int primary key,
email varchar(255) not null,
priority int,
status text default 'active'
);
/*
Literals
*/
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "foo@barsley.com", priority: 1 },
{ id: 2, email: "bar@foosworth.com" }
]
onConflict: {
constraint: account_pkey,
updateFields: [email, priority, status],
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$));
jsonb_pretty
---------------------------------------------------
{ +
"data": { +
"insertIntoAccountCollection": { +
"records": [ +
{ +
"id": 1, +
"email": "foo@barsley.com", +
"priority": 1 +
}, +
{ +
"id": 2, +
"email": "bar@foosworth.com",+
"priority": null +
} +
], +
"affectedCount": 2 +
} +
} +
}
(1 row)

-- Email should update. Priority should not
-- 1 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "new@email.com", priority: 2 },
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
}
) {
affectedCount
records {
id
email
}
}
}
$$));
jsonb_pretty
----------------------------------------------
{ +
"data": { +
"insertIntoAccountCollection": { +
"records": [ +
{ +
"id": 1, +
"email": "new@email.com"+
} +
], +
"affectedCount": 1 +
} +
} +
}
(1 row)

-- Email and priority should update
-- 2 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "new@email.com", priority: 2 },
{ id: 2, email: "new@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$));
jsonb_pretty
-----------------------------------------------
{ +
"data": { +
"insertIntoAccountCollection": { +
"records": [ +
{ +
"id": 1, +
"email": "new@email.com",+
"priority": 1 +
}, +
{ +
"id": 2, +
"email": "new@email.com",+
"priority": null +
} +
], +
"affectedCount": 2 +
} +
} +
}
(1 row)

-- Filter prevents second row update
-- 1 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "third@email.com"},
{ id: 2, email: "new@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
filter: {
id: $ifilt
}
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$,
variables:= '{"ifilt": {"eq": 2}}'
));
jsonb_pretty
-----------------------------------------------
{ +
"data": { +
"insertIntoAccountCollection": { +
"records": [ +
{ +
"id": 2, +
"email": "new@email.com",+
"priority": null +
} +
], +
"affectedCount": 1 +
} +
} +
}
(1 row)

-- Variable Filter
-- Only row id=2 updated due to where clause
select jsonb_pretty(graphql.resolve($$
mutation AccountsFiltered($ifilt: IntFilter!)
insertIntoAccountCollection(
objects: [
{ id: 1, email: "fourth@email.com"},
{ id: 2, email: "fourth@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
filter: {
id: $ifilt
}
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$,
variables:= '{"ifilt": {"eq": 2}}'
));
jsonb_pretty
--------------------------------------------------------------------------------------------------------------------------------
{ +
"errors": [ +
{ +
"message": "query parse error: Parse error at 3:7\nUnexpected `insertIntoAccountCollection[Name]`\nExpected `{`\n"+
} +
] +
}
(1 row)

rollback;
12 changes: 12 additions & 0 deletions test/expected/resolve___schema.out
Original file line number Diff line number Diff line change
@@ -165,6 +165,10 @@ begin;
"kind": "OBJECT", +
"name": "BlogPostEdge" +
}, +
{ +
"kind": "ENUM", +
"name": "BlogPostField" +
}, +
{ +
"kind": "INPUT_OBJECT", +
"name": "BlogPostFilter" +
@@ -177,6 +181,14 @@ begin;
"kind": "OBJECT", +
"name": "BlogPostInsertResponse" +
}, +
{ +
"kind": "ENUM", +
"name": "BlogPostOnConflictConstraint" +
}, +
{ +
"kind": "INPUT_OBJECT", +
"name": "BlogPostOnConflictInput" +
}, +
{ +
"kind": "INPUT_OBJECT", +
"name": "BlogPostOrderBy" +
141 changes: 141 additions & 0 deletions test/expected/resolve_graphiql_schema.out

Large diffs are not rendered by default.

140 changes: 140 additions & 0 deletions test/sql/mutation_insert_on_conflict.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
begin;

create table account(
id int primary key,
email varchar(255) not null,
priority int,
status text default 'active'
);

/*
Literals
*/

select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "foo@barsley.com", priority: 1 },
{ id: 2, email: "bar@foosworth.com" }
]
onConflict: {
constraint: account_pkey,
updateFields: [email, priority, status],
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$));

-- Email should update. Priority should not
-- 1 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "new@email.com", priority: 2 },
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
}
) {
affectedCount
records {
id
email
}
}
}
$$));

-- Email and priority should update
-- 2 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "new@email.com", priority: 2 },
{ id: 2, email: "new@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$));

-- Filter prevents second row update
-- 1 row affected
select jsonb_pretty(graphql.resolve($$
mutation {
insertIntoAccountCollection(
objects: [
{ id: 1, email: "third@email.com"},
{ id: 2, email: "new@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
filter: {
id: $ifilt
}
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$,
variables:= '{"ifilt": {"eq": 2}}'
));

-- Variable Filter
-- Only row id=2 updated due to where clause
select jsonb_pretty(graphql.resolve($$
mutation AccountsFiltered($ifilt: IntFilter!)
insertIntoAccountCollection(
objects: [
{ id: 1, email: "fourth@email.com"},
{ id: 2, email: "fourth@email.com"},
]
onConflict: {
constraint: account_pkey,
updateFields: [email, status],
filter: {
id: $ifilt
}
}
) {
affectedCount
records {
id
email
priority
}
}
}
$$,
variables:= '{"ifilt": {"eq": 2}}'
));

rollback;