-
Notifications
You must be signed in to change notification settings - Fork 26
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* feat: support knex * fix: install dependencies in the correct directory * fix: remove unused import * feat: add sample for Knex Adds a small sample application for using Knex with PGAdapter and Cloud Spanner. Knex is a query builder built on top of the standard Node.js 'pg' driver, which is already supported. This means that Knex.js indirectly is also supported with PGAdapter. * fix: run npm install * test: remove deliberate test failure * docs: add to readme + metadata * docs: add meta tag
- Loading branch information
Showing
12 changed files
with
791 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
<meta name='keywords' content='pgadapter, knex, knex.js, spanner, cloud spanner, node, node.js'> | ||
|
||
# PGAdapter Spanner and Knex.js | ||
|
||
PGAdapter has experimental support for [Knex.js](https://knexjs.org/) with the standard Node.js `pg` | ||
driver. This sample application shows how to connect to PGAdapter with Knex, and how to execute | ||
queries and transactions on Cloud Spanner. | ||
|
||
The sample uses the Cloud Spanner emulator. You can run the sample on the emulator with this | ||
command: | ||
|
||
```shell | ||
npm start | ||
``` | ||
|
||
PGAdapter and the emulator are started in a Docker test container by the sample application. | ||
Docker is therefore required to be installed on your system to run this sample. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
{ | ||
"name": "knex-sample", | ||
"version": "0.0.1", | ||
"description": "Knex Query Builder Sample", | ||
"type": "commonjs", | ||
"devDependencies": { | ||
"@types/node": "^20.1.4", | ||
"ts-node": "10.9.1", | ||
"typescript": "5.2.2" | ||
}, | ||
"dependencies": { | ||
"pg": "^8.9.0", | ||
"knex": "^3.0.1", | ||
"testcontainers": "^10.7.1", | ||
"yargs": "^17.5.1" | ||
}, | ||
"scripts": { | ||
"start": "ts-node src/index.ts" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,175 @@ | ||
// Copyright 2024 Google LLC | ||
// | ||
// Licensed under the Apache License, Version 2.0 (the "License"); | ||
// you may not use this file except in compliance with the License. | ||
// You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, software | ||
// distributed under the License is distributed on an "AS IS" BASIS, | ||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
// See the License for the specific language governing permissions and | ||
// limitations under the License. | ||
|
||
import {Knex} from "knex"; | ||
import {createDataModel, startPGAdapter} from './init'; | ||
import {Album, Concert, Singer, TicketSale, Track, Venue} from './model'; | ||
import {randomInt, randomUUID} from "crypto"; | ||
import {randomAlbumTitle, randomFirstName, randomLastName, randomTrackTitle} from "./random"; | ||
|
||
async function main() { | ||
// Start PGAdapter and the Spanner emulator in a Docker container. | ||
// Using a TestContainer to run PGAdapter is OK in development and test, but for production, it is | ||
// recommended to run PGAdapter as a side-car container. | ||
// See https://github.com/GoogleCloudPlatform/pgadapter/tree/postgresql-dialect/samples/cloud-run/nodejs | ||
// for a sample. | ||
const pgAdapter = await startPGAdapter(); | ||
|
||
// Connect to PGAdapter with the standard PostgreSQL driver. | ||
const knex = require('knex')({ | ||
client: 'pg', | ||
connection: { | ||
host: 'localhost', | ||
port: pgAdapter.getMappedPort(5432), | ||
database: 'knex-sample', | ||
ssl: false, | ||
jsonbSupport: true, | ||
} | ||
}) as Knex; | ||
|
||
// Create the sample tables (if they do not exist), and delete any existing test data before | ||
// running the sample. | ||
await createDataModel(knex); | ||
await deleteAllData(knex); | ||
|
||
// Create and then print some random data. | ||
await createRandomSingersAndAlbums(knex, 20); | ||
await printSingersAlbums(knex); | ||
|
||
// Create a Venue, Concert and TicketSale row. | ||
// The ticket_sales table uses an auto-generated primary key that is generated by a bit-reversed | ||
// sequence. The value can be returned to the application using a 'returning' clause. | ||
await createVenuesAndConcerts(knex); | ||
|
||
// Close the knex connection pool and shut down PGAdapter. | ||
await knex.destroy(); | ||
await pgAdapter.stop(); | ||
} | ||
|
||
async function createRandomSingersAndAlbums(knex: Knex, numSingers: number) { | ||
console.log("Creating random singers and albums..."); | ||
const singers: Singer[] = new Array(numSingers); | ||
const albums: Album[] = []; | ||
const tracks: Track[] = []; | ||
|
||
await knex.transaction(async tx => { | ||
|
||
// Generate some random singers. | ||
for (let i=0; i<numSingers; i++) { | ||
singers[i] = {id: randomUUID(), first_name: randomFirstName(), last_name: randomLastName(), | ||
active: Math.random() < 0.5, created_at: new Date()} as Singer; | ||
|
||
// Generate some random albums. | ||
const numAlbums = randomInt(2, 10); | ||
for (let j=0; j<numAlbums; j++) { | ||
// Generate a random ID for the Album. This ID is also used for all the tracks of this | ||
// album, as the table "tracks" is interleaved in "albums". | ||
const album_id = randomUUID(); | ||
albums.push({id: album_id, singer_id: singers[i].id, title: randomAlbumTitle(), | ||
marketing_budget: Math.random() * 1000000, created_at: new Date()} as Album); | ||
|
||
// Generate some random tracks. | ||
const numTracks = randomInt(5, 12); | ||
for (let k=0; k<numTracks; k++) { | ||
tracks.push({id: album_id, track_number: k+1, title: randomTrackTitle(), | ||
sample_rate: Math.random(), created_at: new Date()} as Track); | ||
} | ||
} | ||
} | ||
|
||
// Insert the data in batches of 50 elements. | ||
const batchSize = 50; | ||
for (let i=0; i<singers.length; i+=batchSize) { | ||
await tx.insert(singers.slice(i, i+batchSize)).into<Singer>('singers'); | ||
process.stdout.write('.'); | ||
} | ||
for (let i=0; i<albums.length; i+=batchSize) { | ||
await tx.insert(albums.slice(i, i+batchSize)).into<Album>('albums'); | ||
process.stdout.write('.'); | ||
} | ||
for (let i=0; i<tracks.length; i+=batchSize) { | ||
await tx.insert(tracks.slice(i, i+batchSize)).into<Track>('tracks'); | ||
process.stdout.write('.'); | ||
} | ||
console.log(''); | ||
}); | ||
console.log(`Finished creating ${singers.length} singers, ${albums.length} albums, and ${tracks.length} tracks.`); | ||
} | ||
|
||
async function printSingersAlbums(knex: Knex) { | ||
const singers = await knex.select('*').from<Singer>('singers').orderBy('last_name'); | ||
for (const singer of singers) { | ||
console.log(`Singer ${singer.full_name} has albums:`); | ||
const albums = await knex.select('*') | ||
.from<Album>('albums') | ||
.where('singer_id', singer.id) | ||
.orderBy('title'); | ||
for (const album of albums) { | ||
console.log(`\t${album.title}`); | ||
} | ||
} | ||
} | ||
|
||
async function createVenuesAndConcerts(knex: Knex) { | ||
console.log("Creating venues and concerts..."); | ||
await knex.transaction(async tx => { | ||
const singer = await tx.select<Singer>('*').from('singers').first(); | ||
const venue = { | ||
id: randomUUID(), | ||
name: 'Avenue Park', | ||
description: '{"Capacity": 5000, "Location": "New York", "Country": "US"}' | ||
} as Venue; | ||
await tx.insert<Venue>(venue).into('venues'); | ||
const concert = { | ||
id: randomUUID(), | ||
name: 'Avenue Park Open', | ||
singer_id: singer!.id, | ||
venue_id: venue.id, | ||
start_time: new Date('2023-02-01T20:00:00-05:00'), | ||
end_time: new Date('2023-02-02T02:00:00-05:00'), | ||
} as Concert; | ||
await tx.insert<Concert>(concert).into('concerts'); | ||
|
||
// TicketSale uses an auto-generated primary key, so we don't need to supply a value for it. | ||
// The primary key value is generated by a bit-reversed sequence. | ||
const ticketSale = { | ||
concert_id: concert.id, | ||
customer_name: `${randomFirstName()} ${randomLastName()}`, | ||
price: Math.random() * 1000, | ||
seats: ['A19', 'A20', 'A21'], | ||
} as TicketSale; | ||
// The generated ID can be returned. | ||
const rows = await tx.insert<TicketSale>(ticketSale).into('ticket_sales').returning('id'); | ||
ticketSale.id = rows[0].id; | ||
}); | ||
console.log("Finished creating venues and concerts"); | ||
} | ||
|
||
async function deleteAllData(knex: Knex) { | ||
console.log("Deleting all existing test data..."); | ||
await knex<TicketSale>('ticket_sales').delete(); | ||
await knex<Concert>('concerts').delete(); | ||
await knex<Venue>('venues').delete(); | ||
await knex<Track>('tracks').delete(); | ||
await knex<Album>('albums').delete(); | ||
await knex<Singer>('singers').delete(); | ||
console.log("Finished deleting all existing test data"); | ||
} | ||
|
||
(async () => { | ||
await main(); | ||
})().catch(e => { | ||
console.error(e); | ||
process.exit(1); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,128 @@ | ||
// Copyright 2024 Google LLC | ||
// | ||
// Licensed under the Apache License, Version 2.0 (the "License"); | ||
// you may not use this file except in compliance with the License. | ||
// You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, software | ||
// distributed under the License is distributed on an "AS IS" BASIS, | ||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
// See the License for the specific language governing permissions and | ||
// limitations under the License. | ||
|
||
import {GenericContainer, PullPolicy, StartedTestContainer, TestContainer} from "testcontainers"; | ||
import {Knex} from "knex"; | ||
|
||
/** | ||
* Creates the data model that is needed for this sample application. | ||
* | ||
* The Cloud Spanner PostgreSQL dialect does not support all system tables (pg_catalog tables) that | ||
* are present in open-source PostgreSQL databases. Those tables are used by Sequelize migrations. | ||
* Migrations are therefore not supported. | ||
*/ | ||
export async function createDataModel(knex: Knex) { | ||
console.log("Checking whether tables already exists"); | ||
const result: any = await knex.raw(` | ||
SELECT COUNT(1) AS c | ||
FROM information_schema.tables | ||
WHERE table_schema='public' | ||
AND table_name IN ('singers', 'albums', 'tracks', 'venues', 'concerts', 'ticket_sales')`); | ||
if (result.rows[0].c == '6') { | ||
console.log("Sample data model already exists, not creating any new tables"); | ||
return; | ||
} | ||
console.log("Creating tables..."); | ||
// Create the data model. | ||
await knex.raw( | ||
` | ||
create table if not exists singers ( | ||
id varchar not null primary key, | ||
first_name varchar, | ||
last_name varchar not null, | ||
full_name varchar(300) generated always as ( | ||
CASE WHEN first_name IS NULL THEN last_name | ||
WHEN last_name IS NULL THEN first_name | ||
ELSE first_name || ' ' || last_name | ||
END) stored, | ||
active boolean, | ||
created_at timestamptz, | ||
updated_at timestamptz | ||
); | ||
create table if not exists albums ( | ||
id varchar not null primary key, | ||
title varchar not null, | ||
marketing_budget numeric, | ||
release_date date, | ||
cover_picture bytea, | ||
singer_id varchar not null, | ||
created_at timestamptz, | ||
updated_at timestamptz, | ||
constraint fk_albums_singers foreign key (singer_id) references singers (id) | ||
); | ||
create table if not exists tracks ( | ||
id varchar not null, | ||
track_number bigint not null, | ||
title varchar not null, | ||
sample_rate float8 not null, | ||
created_at timestamptz, | ||
updated_at timestamptz, | ||
primary key (id, track_number) | ||
) interleave in parent albums on delete cascade; | ||
create table if not exists venues ( | ||
id varchar not null primary key, | ||
name varchar not null, | ||
description varchar not null, | ||
created_at timestamptz, | ||
updated_at timestamptz | ||
); | ||
create table if not exists concerts ( | ||
id varchar not null primary key, | ||
venue_id varchar not null, | ||
singer_id varchar not null, | ||
name varchar not null, | ||
start_time timestamptz not null, | ||
end_time timestamptz not null, | ||
created_at timestamptz, | ||
updated_at timestamptz, | ||
constraint fk_concerts_venues foreign key (venue_id) references venues (id), | ||
constraint fk_concerts_singers foreign key (singer_id) references singers (id), | ||
constraint chk_end_time_after_start_time check (end_time > start_time) | ||
); | ||
-- Create a bit-reversed sequence that will be used to generate identifiers for the ticket_sales table. | ||
-- See also https://cloud.google.com/spanner/docs/reference/postgresql/data-definition-language#create_sequence | ||
-- Note that the 'bit_reversed_positive' keyword is required for Spanner, | ||
-- and is automatically skipped for open-source PostgreSQL. | ||
create sequence if not exists ticket_sale_seq | ||
bit_reversed_positive | ||
skip range 1 1000 | ||
start counter with 50000; | ||
create table if not exists ticket_sales ( | ||
id bigint not null primary key default nextval('ticket_sale_seq'), | ||
concert_id varchar not null, | ||
customer_name varchar not null, | ||
price decimal not null, | ||
seats text[], | ||
created_at timestamptz, | ||
updated_at timestamptz, | ||
constraint fk_ticket_sales_concerts foreign key (concert_id) references concerts (id) | ||
); | ||
`); | ||
console.log("Finished creating tables"); | ||
} | ||
|
||
export async function startPGAdapter(): Promise<StartedTestContainer> { | ||
console.log("Pulling PGAdapter and Spanner emulator"); | ||
const container: TestContainer = new GenericContainer("gcr.io/cloud-spanner-pg-adapter/pgadapter-emulator") | ||
.withPullPolicy(PullPolicy.alwaysPull()) | ||
.withExposedPorts(5432); | ||
console.log("Starting PGAdapter and Spanner emulator"); | ||
return await container.start(); | ||
} |
Oops, something went wrong.