Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Batch transaction #5849

Merged
merged 30 commits into from
Jul 31, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
8222855
Batch transaction boilerplate
davimacedo Jul 23, 2019
a46451f
Merge branch 'upstream' into batchTransaction
davimacedo Jul 23, 2019
47be348
Refactoring transaction boilerplate
davimacedo Jul 23, 2019
5404ba9
Independent sessions test
davimacedo Jul 23, 2019
c99fe13
Transactions - partial
davimacedo Jul 24, 2019
5ddab74
Merge branch 'upstream' into batchTransaction
davimacedo Jul 25, 2019
863b235
Missing only one test
davimacedo Jul 26, 2019
e1f7caf
All tests passing for mongo db
davimacedo Jul 26, 2019
b6b5cc7
Tests on Travis
davimacedo Jul 26, 2019
a8850f5
Merge branch 'upstream' into batchTransaction
davimacedo Jul 26, 2019
e1dbd71
Transactions on postgres
davimacedo Jul 26, 2019
142acde
Fix travis to restart mongodb
davimacedo Jul 26, 2019
fa087c3
Remove mongodb service and keep only mongodb runner
davimacedo Jul 26, 2019
801e859
MongoDB service back
davimacedo Jul 26, 2019
5ef981b
Initialize replicaset
davimacedo Jul 26, 2019
264fc84
Remove mongodb runner again
davimacedo Jul 26, 2019
1e5753b
Again only with mongodb-runner and removing cache
davimacedo Jul 26, 2019
93cc1c2
Trying with pretest and posttest
davimacedo Jul 26, 2019
cd6b2ed
WiredTiger
davimacedo Jul 26, 2019
1663733
Pretest and posttest again
davimacedo Jul 26, 2019
afa6ed3
Removing inexistent scripts
davimacedo Jul 26, 2019
560c9fd
wiredTiger
davimacedo Jul 26, 2019
145c28d
One more attempt
davimacedo Jul 26, 2019
ac3b22c
Merge branch 'upstream' into batchTransaction
davimacedo Jul 27, 2019
e3b42cd
Trying another way to run mongodb-runner
davimacedo Jul 29, 2019
97d8a58
Merge branch 'upstream' into batchTransaction
davimacedo Jul 29, 2019
dd214be
Fixing tests
davimacedo Jul 29, 2019
b9420e5
Merge branch 'upstream' into batchTransaction
davimacedo Jul 31, 2019
1499519
Include batch transaction on direct access
davimacedo Jul 31, 2019
20cb761
Add tests to direct access
davimacedo Jul 31, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
language: node_js
dist: trusty
services:
- mongodb
- postgresql
- redis-server
- docker
Expand All @@ -19,13 +18,12 @@ branches:
cache:
directories:
- "$HOME/.npm"
- "$HOME/.mongodb/versions"
stage: test
env:
global:
- COVERAGE_OPTION='./node_modules/.bin/nyc'
matrix:
- MONGODB_VERSION=4.0.4
- MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredTiger
- MONGODB_VERSION=3.6.9
- PARSE_SERVER_TEST_DB=postgres
- PARSE_SERVER_TEST_CACHE=redis
Expand All @@ -42,7 +40,6 @@ before_script:
- psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres
- psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database
- psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database
- silent=1 mongodb-runner --start
- greenkeeper-lockfile-update
script:
- npm run lint
Expand Down
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,10 @@
"lint": "flow && eslint --cache ./",
"build": "babel src/ -d lib/ --copy-files",
"watch": "babel --watch src/ -d lib/ --copy-files",
"test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine",
"coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine",
"pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start",
"test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine",
"posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop",
"coverage": "npm run pretest && cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine && npm run posttest",
"start": "node ./bin/parse-server",
"prepare": "npm run build",
"postinstall": "node -p 'require(\"./postinstall.js\")()'"
Expand Down
3 changes: 2 additions & 1 deletion spec/.eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
"jequal": true,
"create": true,
"arrayContains": true,
"expectAsync": true
"expectAsync": true,
"databaseAdapter": true
},
"rules": {
"no-console": [0],
Expand Down
2 changes: 1 addition & 1 deletion spec/GridFSBucketStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ describe('GridFSBucket and GridStore interop', () => {
beforeEach(async () => {
const gsAdapter = new GridStoreAdapter(databaseURI);
const db = await gsAdapter._connect();
db.dropDatabase();
await db.dropDatabase();
});

it('a file created in GridStore should be available in GridFS', async () => {
Expand Down
2 changes: 1 addition & 1 deletion spec/GridStoreAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ describe_only_db('mongo')('GridStoreAdapter', () => {
const config = Config.get(Parse.applicationId);
const gridStoreAdapter = new GridStoreAdapter(databaseURI);
const db = await gridStoreAdapter._connect();
db.dropDatabase();
await db.dropDatabase();
const filesController = new FilesController(
gridStoreAdapter,
Parse.applicationId,
Expand Down
269 changes: 268 additions & 1 deletion spec/ParseServerRESTController.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const ParseServerRESTController = require('../lib/ParseServerRESTController')
.ParseServerRESTController;
const ParseServer = require('../lib/ParseServer').default;
const Parse = require('parse/node').Parse;
const TestUtils = require('../lib/TestUtils');

let RESTController;

Expand Down Expand Up @@ -40,7 +41,7 @@ describe('ParseServerRESTController', () => {
);
});

it('should handle a POST batch', done => {
it('should handle a POST batch without transaction', done => {
RESTController.request('POST', 'batch', {
requests: [
{
Expand Down Expand Up @@ -69,6 +70,272 @@ describe('ParseServerRESTController', () => {
);
});

it('should handle a POST batch with transaction=false', done => {
RESTController.request('POST', 'batch', {
requests: [
{
method: 'GET',
path: '/classes/MyObject',
},
{
method: 'POST',
path: '/classes/MyObject',
body: { key: 'value' },
},
{
method: 'GET',
path: '/classes/MyObject',
},
],
transaction: false,
}).then(
res => {
expect(res.length).toBe(3);
done();
},
err => {
jfail(err);
done();
}
);
});

if (
(process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') ||
process.env.PARSE_SERVER_TEST_DB === 'postgres'
) {
describe('transactions', () => {
beforeAll(async () => {
if (
process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger'
) {
await reconfigureServer({
databaseAdapter: undefined,
databaseURI:
'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset',
});
}
});

beforeEach(async () => {
await TestUtils.destroyAllDataPermanently(true);
});

it('should handle a batch request with transaction = true', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
spyOn(databaseAdapter, 'createObject').and.callThrough();

RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
}).then(response => {
expect(response.length).toEqual(2);
expect(response[0].success.objectId).toBeDefined();
expect(response[0].success.createdAt).toBeDefined();
expect(response[1].success.objectId).toBeDefined();
expect(response[1].success.createdAt).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(databaseAdapter.createObject.calls.count()).toBe(2);
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe(
databaseAdapter.createObject.calls.argsFor(1)[3]
);
expect(results.map(result => result.get('key')).sort()).toEqual(
['value1', 'value2']
);
done();
});
});
});
});

it('should not save anything when one operation fails in a transaction', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 10 },
},
],
transaction: true,
}).catch(error => {
expect(error.message).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(results.length).toBe(0);
done();
});
});
});
});

it('should generate separate session for each call', async () => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
await myObject.save();
await myObject.destroy();

const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections
await myObject2.save();
await myObject2.destroy();

spyOn(databaseAdapter, 'createObject').and.callThrough();

let myObjectCalls = 0;
Parse.Cloud.beforeSave('MyObject', async () => {
myObjectCalls++;
if (myObjectCalls === 2) {
try {
await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 10 },
},
],
transaction: true,
});
fail('should fail');
} catch (e) {
expect(e).toBeDefined();
}
}
});

const response = await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
});

expect(response.length).toEqual(2);
expect(response[0].success.objectId).toBeDefined();
expect(response[0].success.createdAt).toBeDefined();
expect(response[1].success.objectId).toBeDefined();
expect(response[1].success.createdAt).toBeDefined();

await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value2' },
},
],
});

const query = new Parse.Query('MyObject');
const results = await query.find();
expect(results.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);

const query2 = new Parse.Query('MyObject2');
const results2 = await query2.find();
expect(results2.length).toEqual(0);

const query3 = new Parse.Query('MyObject3');
const results3 = await query3.find();
expect(results3.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);

expect(databaseAdapter.createObject.calls.count()).toBe(5);
let transactionalSession;
let transactionalSession2;
let myObjectDBCalls = 0;
let myObject2DBCalls = 0;
let myObject3DBCalls = 0;
for (let i = 0; i < 5; i++) {
const args = databaseAdapter.createObject.calls.argsFor(i);
switch (args[0]) {
case 'MyObject':
myObjectDBCalls++;
if (!transactionalSession) {
transactionalSession = args[3];
} else {
expect(transactionalSession).toBe(args[3]);
}
if (transactionalSession2) {
expect(transactionalSession2).not.toBe(args[3]);
}
break;
case 'MyObject2':
myObject2DBCalls++;
transactionalSession2 = args[3];
if (transactionalSession) {
expect(transactionalSession).not.toBe(args[3]);
}
break;
case 'MyObject3':
myObject3DBCalls++;
expect(args[3]).toEqual(null);
break;
}
}
expect(myObjectDBCalls).toEqual(2);
expect(myObject2DBCalls).toEqual(1);
expect(myObject3DBCalls).toEqual(2);
});
});
}

it('should handle a POST request', done => {
RESTController.request('POST', '/classes/MyObject', { key: 'value' })
.then(() => {
Expand Down
Loading