Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(D1): numerical filters and aggregations + emulated referential actions #4970

Merged
merged 17 commits into from
Aug 8, 2024
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
c9d1b37
feat(driver-adapters): add support for arg_types (Napi, Wasm) in "Query"
jkomyno Jul 31, 2024
b627392
test(driver-adapters): fixed D1 tests related to Int64 (de)serialisation
jkomyno Jul 31, 2024
46841fc
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types retrigger CI/CD
jkomyno Jul 31, 2024
d742a1e
DRIVER_ADAPTERS_BRANCH=feat-d1-arg-types retrigger CI/CD
jkomyno Aug 6, 2024
dd8fb32
Merge branch 'main' of github.com:prisma/prisma-engines
jkomyno Aug 6, 2024
286ce7b
Merge branch 'main' into integration/fix-d1-int64
jkomyno Aug 6, 2024
966bb2c
chore: add logs to "Extract branch name" CI step
jkomyno Aug 6, 2024
61431bb
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types chore: retrigger CI/CD
jkomyno Aug 6, 2024
a125647
chore: add logs to "Extract branch name" CI step
jkomyno Aug 6, 2024
a17704b
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types chore: retrigger CI/CD
jkomyno Aug 6, 2024
372fa9b
fix: use head ref rather than merge branch to get the original commit…
jkomyno Aug 6, 2024
f1fdaaf
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types chore: retrigger CI/CD
jkomyno Aug 6, 2024
8c92345
feat(driver-adapters): map every "QuaintValue" enum value to "JSArgType"
jkomyno Aug 7, 2024
39886f4
chore(driver-adapters): use camelCase for "Query" deserialisation in …
jkomyno Aug 7, 2024
e5ebd17
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types chore: retrigger CI/CD
jkomyno Aug 7, 2024
1ba2653
feat(driver-adapters): discard the optionality in JSArgType, simplify…
jkomyno Aug 8, 2024
3fe108c
DRIVER_ADAPTERS_BRANCH=feat/d1-arg-types chore: retrigger CI/CD
jkomyno Aug 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/test-driver-adapters-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
# using head ref rather than merge branch to get original commit message
ref: ${{ github.event.pull_request.head.sha }}
- name: "Setup Node.js"
uses: actions/setup-node@v4
with:
Expand All @@ -55,7 +58,9 @@ jobs:
- name: Extract Branch Name
id: extract-branch
run: |
echo "Extracting branch name from: $(git show -s --format=%s)"
branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)"
echo "branch=$branch"
if [ -n "$branch" ]; then
echo "Using $branch branch of driver adapters"
echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV"
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/wasm-benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ jobs:
- name: Checkout PR branch
uses: actions/checkout@v4
with:
# using head ref rather than merge branch to get original commit message
ref: ${{ github.event.pull_request.head.sha }}

- uses: ./.github/workflows/include/rust-wasm-setup
Expand All @@ -48,7 +49,9 @@ jobs:

- name: Extract Branch Name
run: |
echo "Extracting branch name from: $(git show -s --format=%s)"
branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)"
echo "branch=$branch"
if [ -n "$branch" ]; then
echo "Using $branch branch of driver adapters"
echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,7 @@ mod one2one_req {
schema.to_owned()
}

#[connector_test(schema(required), exclude(Sqlite("cfd1")))]
/// On D1, this fails with:
///
/// ```diff
/// - {"data":{"updateManyParent":{"count":1}}}
/// + {"data":{"updateManyParent":{"count":2}}}
/// ```
#[connector_test(schema(required))]
async fn update_parent_cascade(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
Expand Down Expand Up @@ -176,14 +170,7 @@ mod one2one_opt {
schema.to_owned()
}

#[connector_test(schema(optional), exclude(Sqlite("cfd1")))]
// Updating the parent updates the child FK as well.
// On D1, this fails with:
//
// ```diff
// - {"data":{"updateManyParent":{"count":1}}}
// + {"data":{"updateManyParent":{"count":2}}}
// ```
#[connector_test(schema(optional))]
async fn update_parent_cascade(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,13 +255,7 @@ mod one2many_req {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
/// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload.
///
/// ```diff
/// - {"data":{"updateManyParent":{"count":1}}}
/// + {"data":{"updateManyParent":{"count":2}}}
/// ```
#[connector_test]
async fn update_parent(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
run_query!(
Expand Down Expand Up @@ -383,13 +377,7 @@ mod one2many_opt {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
/// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload.
///
/// ```diff
/// - {"data":{"updateManyParent":{"count":1}}}
/// + {"data":{"updateManyParent":{"count":2}}}
/// ```
#[connector_test]
async fn update_parent(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
run_query!(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,7 @@ mod one2one_opt {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"updateManyParent":{"count":1}}}
// + {"data":{"updateManyParent":{"count":2}}}
// ```
#[connector_test]
async fn update_many_parent(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -457,13 +451,7 @@ mod one2many_opt {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"updateManyParent":{"count":1}}}
// + {"data":{"updateManyParent":{"count":2}}}
// ```
#[connector_test]
async fn update_many_parent(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"groupByTestModel":[{"string":"group1","_count":{"int":2}}]}}
// + {"data":{"groupByTestModel":[]}}
// ```
#[connector_test]
async fn having_count_scalar_filter(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, int: 1, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, int: 2, string: "group1" }"#).await?;
Expand Down Expand Up @@ -133,13 +127,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"groupByTestModel":[{"string":"group1","_sum":{"float":16.0,"int":16}}]}}
// + {"data":{"groupByTestModel":[]}}
// ```
#[connector_test]
async fn having_sum_scalar_filter(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, float: 6, int: 6, string: "group1" }"#).await?;
Expand Down Expand Up @@ -208,13 +196,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"groupByTestModel":[{"string":"group1","_min":{"float":0.0,"int":0}},{"string":"group2","_min":{"float":0.0,"int":0}}]}}
// + {"data":{"groupByTestModel":[]}}
// ```
#[connector_test]
async fn having_min_scalar_filter(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?;
Expand Down Expand Up @@ -282,13 +264,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"groupByTestModel":[{"string":"group1","_max":{"float":10.0,"int":10}},{"string":"group2","_max":{"float":10.0,"int":10}}]}}
// + {"data":{"groupByTestModel":[]}}
// ```
#[connector_test]
async fn having_max_scalar_filter(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?;
Expand Down Expand Up @@ -356,13 +332,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this fails with:
//
// ```diff
// - {"data":{"groupByTestModel":[{"string":"group1","_count":{"string":2}}]}}
// + {"data":{"groupByTestModel":[]}}
// ```
#[connector_test]
async fn having_count_non_numerical_field(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?;
Expand All @@ -380,16 +350,7 @@ mod aggr_group_by_having {
Ok(())
}

#[connector_test(exclude(Sqlite("cfd1")))]
// On D1, this panics with:
//
// ```
// assertion `left == right` failed: Query result: {"data":{"groupByTestModel":[]}} is not part of the expected results: ["{\"data\":{\"groupByTestModel\":[{\"string\":\"group1\"},{\"string\":\"group2\"}]}}", "{\"data\":{\"groupByTestModel\":[{\"string\":\"group2\"},{\"string\":\"group1\"}]}}"] for connector SQLite (cfd1)
// left: false
// right: true
// note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
// FAILED
// ```
#[connector_test]
async fn having_without_aggr_sel(runner: Runner) -> TestResult<()> {
create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?;
create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ mod nested_create_many {
// Each DB allows a certain amount of params per single query, and a certain number of rows.
// We create 1000 nested records.
// "Nested createMany" should "allow creating a large number of records (horizontal partitioning check)"
#[connector_test(exclude(Sqlite("cfd1")))]
#[connector_test]
async fn allow_create_large_number_records(runner: Runner) -> TestResult<()> {
let records: Vec<_> = (1..=1000).map(|i| format!(r#"{{ id: {i}, str1: "{i}" }}"#)).collect();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ mod delete_many_rels {
// On D1, this fails with:
//
// ```diff
// - {"data":{"deleteManyParent":{"count":1}}}
// - {"data":{"deleteManyParent":{"count":2}}}
// + {"data":{"deleteManyParent":{"count":3}}}
// ```
async fn p1_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> {
Expand Down
91 changes: 91 additions & 0 deletions query-engine/driver-adapters/src/conversion/js_arg_type.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
#[derive(Debug, PartialEq)]
pub enum JSArgType {
/// 32-bit signed integer.
Int32,
/// 64-bit signed integer.
Int64,
/// 32-bit floating point.
Float,
/// 64-bit floating point.
Double,
/// String value.
Text,
/// Database enum value.
Enum,
/// Database enum array (PostgreSQL specific).
EnumArray,
/// Bytes value.
Bytes,
/// Boolean value.
Boolean,
/// A single character.
Char,
/// An array value (PostgreSQL).
Array,
/// A numeric value.
Numeric,
/// A JSON value.
Json,
/// A XML value.
Xml,
/// An UUID value.
Uuid,
/// A datetime value.
DateTime,
/// A date value.
Date,
/// A time value.
Time,
}

impl core::fmt::Display for JSArgType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
JSArgType::Int32 => "Int32",
JSArgType::Int64 => "Int64",
JSArgType::Float => "Float",
JSArgType::Double => "Double",
JSArgType::Text => "Text",
JSArgType::Enum => "Enum",
JSArgType::EnumArray => "EnumArray",
JSArgType::Bytes => "Bytes",
JSArgType::Boolean => "Boolean",
JSArgType::Char => "Char",
JSArgType::Array => "Array",
JSArgType::Numeric => "Numeric",
JSArgType::Json => "Json",
JSArgType::Xml => "Xml",
JSArgType::Uuid => "Uuid",
JSArgType::DateTime => "DateTime",
JSArgType::Date => "Date",
JSArgType::Time => "Time",
};

write!(f, "{}", s)
}
}

pub fn value_to_js_arg_type(value: &quaint::Value) -> Option<JSArgType> {
match &value.typed {
quaint::ValueType::Int32(Some(_)) => Some(JSArgType::Int32),
jkomyno marked this conversation as resolved.
Show resolved Hide resolved
quaint::ValueType::Int64(Some(_)) => Some(JSArgType::Int64),
quaint::ValueType::Float(Some(_)) => Some(JSArgType::Float),
quaint::ValueType::Double(Some(_)) => Some(JSArgType::Double),
quaint::ValueType::Text(Some(_)) => Some(JSArgType::Text),
quaint::ValueType::Enum(Some(_), _) => Some(JSArgType::Enum),
quaint::ValueType::EnumArray(Some(_), _) => Some(JSArgType::EnumArray),
jkomyno marked this conversation as resolved.
Show resolved Hide resolved
quaint::ValueType::Bytes(Some(_)) => Some(JSArgType::Bytes),
quaint::ValueType::Boolean(Some(_)) => Some(JSArgType::Boolean),
quaint::ValueType::Char(Some(_)) => Some(JSArgType::Char),
quaint::ValueType::Array(Some(_)) => Some(JSArgType::Array),
quaint::ValueType::Numeric(Some(_)) => Some(JSArgType::Numeric),
quaint::ValueType::Json(Some(_)) => Some(JSArgType::Json),
quaint::ValueType::Xml(Some(_)) => Some(JSArgType::Xml),
quaint::ValueType::Uuid(Some(_)) => Some(JSArgType::Uuid),
quaint::ValueType::DateTime(Some(_)) => Some(JSArgType::DateTime),
quaint::ValueType::Date(Some(_)) => Some(JSArgType::Date),
quaint::ValueType::Time(Some(_)) => Some(JSArgType::Time),

_ => None,
}
}
2 changes: 2 additions & 0 deletions query-engine/driver-adapters/src/conversion/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub(crate) mod js_arg;
pub(crate) mod js_arg_type;
pub(crate) mod js_to_quaint;

#[cfg(feature = "mysql")]
Expand All @@ -9,3 +10,4 @@ pub(crate) mod postgres;
pub(crate) mod sqlite;

pub use js_arg::JSArg;
pub use js_arg_type::{value_to_js_arg_type, JSArgType};
2 changes: 1 addition & 1 deletion query-engine/driver-adapters/src/conversion/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ mod test {
#[rustfmt::skip]
fn test_value_to_js_arg() {
let test_cases = vec![
(
(
// This is different than how mysql or postgres processes integral BigInt values.
ValueType::Numeric(Some(1.into())),
JSArg::Value(Value::Number("1.0".parse().unwrap()))
Expand Down
14 changes: 13 additions & 1 deletion query-engine/driver-adapters/src/napi/conversion.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pub(crate) use crate::conversion::JSArg;
pub(crate) use crate::conversion::{JSArg, JSArgType};

use napi::bindgen_prelude::{FromNapiValue, ToNapiValue};
use napi::NapiValue;
Expand All @@ -12,6 +12,12 @@ impl FromNapiValue for JSArg {
}
}

impl FromNapiValue for JSArgType {
unsafe fn from_napi_value(_env: napi::sys::napi_env, _napi_value: napi::sys::napi_value) -> napi::Result<Self> {
unreachable!()
jkomyno marked this conversation as resolved.
Show resolved Hide resolved
}
}

// ToNapiValue is the napi equivalent to serde::Serialize.
impl ToNapiValue for JSArg {
unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result<napi::sys::napi_value> {
Expand Down Expand Up @@ -46,3 +52,9 @@ impl ToNapiValue for JSArg {
}
}
}

impl ToNapiValue for JSArgType {
unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result<napi::sys::napi_value> {
ToNapiValue::to_napi_value(env, value.to_string())
}
}
Loading
Loading