diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 272cbd7..b5c68e5 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -4,7 +4,6 @@ about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
-
---
**Describe the bug**
@@ -12,6 +11,7 @@ A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
+
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
@@ -24,15 +24,17 @@ A clear and concise description of what you expected to happen.
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- - OS: [e.g. iOS]
- - Browser [e.g. chrome, safari]
- - Version [e.g. 22]
+
+- OS: [e.g. iOS]
+- Browser [e.g. chrome, safari]
+- Version [e.g. 22]
**Smartphone (please complete the following information):**
- - Device: [e.g. iPhone6]
- - OS: [e.g. iOS8.1]
- - Browser [e.g. stock browser, safari]
- - Version [e.g. 22]
+
+- Device: [e.g. iPhone6]
+- OS: [e.g. iOS8.1]
+- Browser [e.g. stock browser, safari]
+- Version [e.g. 22]
**Additional context**
-Add any other context about the problem here.
\ No newline at end of file
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 24473de..2f28cea 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -4,7 +4,6 @@ about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
-
---
**Is your feature request related to a problem? Please describe.**
@@ -17,4 +16,4 @@ A clear and concise description of what you want to happen.
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
-Add any other context or screenshots about the feature request here.
\ No newline at end of file
+Add any other context or screenshots about the feature request here.
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 3014fee..6858cf4 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,21 +1,21 @@
## Purpose
-
-
## Tasks
+
-- [ ]
+- [ ]
## Verify
+
--
+-
## Before
-
-
+
## After
-
\ No newline at end of file
+
+
diff --git a/.husky/pre-commit b/.husky/pre-commit
new file mode 100644
index 0000000..5ee7abd
--- /dev/null
+++ b/.husky/pre-commit
@@ -0,0 +1 @@
+pnpm exec lint-staged
diff --git a/.prettierrc.json b/.prettierrc.json
new file mode 100644
index 0000000..e74ed9f
--- /dev/null
+++ b/.prettierrc.json
@@ -0,0 +1,6 @@
+{
+ "trailingComma": "es5",
+ "tabWidth": 4,
+ "semi": false,
+ "singleQuote": true
+}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 5fb5268..8bec075 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -39,4 +39,4 @@ Before you submit your issue, please [search the issue archive](https://github.c
If you find a bug in the source code, you can help us by [submitting an issue to our GitHub issue tracker](https://github.com/Brayden/starbasedb/issues). Even better, you can submit a Pull Request with a fix!
-_More contribution guidelines around code contribution and PR guidelines coming soon._
\ No newline at end of file
+_More contribution guidelines around code contribution and PR guidelines coming soon._
diff --git a/README.md b/README.md
index 42e5540..1bbf96d 100644
--- a/README.md
+++ b/README.md
@@ -179,46 +179,52 @@ curl --location --request POST 'https://starbasedb.YOUR-ID-HERE.workers.dev/quer
Below is an example HTML script function showing how you can connect via Web Sockets.
```javascript
-let socket;
+let socket
function connectWebSocket() {
- logMessage("Connecting to WebSocket...");
-
- socket = new WebSocket('wss://starbasedb.YOUR-ID-HERE.workers.dev/socket?token=ABC123');
+ logMessage('Connecting to WebSocket...')
- socket.onopen = function() {
- logMessage("WebSocket connection opened.");
- };
+ socket = new WebSocket(
+ 'wss://starbasedb.YOUR-ID-HERE.workers.dev/socket?token=ABC123'
+ )
- socket.onmessage = function(event) {
- logMessage("Received: " + event.data);
- };
+ socket.onopen = function () {
+ logMessage('WebSocket connection opened.')
+ }
+
+ socket.onmessage = function (event) {
+ logMessage('Received: ' + event.data)
+ }
- socket.onclose = function(event) {
- logMessage(`WebSocket closed with code: ${event.code}, reason: ${event.reason}`);
- };
+ socket.onclose = function (event) {
+ logMessage(
+ `WebSocket closed with code: ${event.code}, reason: ${event.reason}`
+ )
+ }
- socket.onerror = function(error) {
- logMessage("WebSocket error: " + error.message);
- };
+ socket.onerror = function (error) {
+ logMessage('WebSocket error: ' + error.message)
+ }
}
function sendMessage() {
- const message = document.getElementById('messageInput').value;
+ const message = document.getElementById('messageInput').value
if (socket && socket.readyState === WebSocket.OPEN) {
- logMessage("Sending: " + message);
-
- socket.send(JSON.stringify({
- sql: message,
- params: [],
- action: 'query'
- }));
+ logMessage('Sending: ' + message)
+
+ socket.send(
+ JSON.stringify({
+ sql: message,
+ params: [],
+ action: 'query',
+ })
+ )
} else {
- logMessage("WebSocket is not open.");
+ logMessage('WebSocket is not open.')
}
}
-window.onload = connectWebSocket;
+window.onload = connectWebSocket
```
SQL Dump
@@ -259,7 +265,6 @@ curl --location 'https://starbasedb.YOUR-ID-HERE.workers.dev/import/dump' \
-
Contributing
We welcome contributions! Please refer to our Contribution Guide for more details.
diff --git a/package.json b/package.json
index 2dd1d79..200e82f 100644
--- a/package.json
+++ b/package.json
@@ -1,27 +1,36 @@
{
- "name": "durable-object-starter",
- "version": "0.0.0",
- "private": true,
- "scripts": {
- "deploy": "wrangler deploy",
- "dev": "wrangler dev",
- "start": "wrangler dev",
- "cf-typegen": "wrangler types"
- },
- "devDependencies": {
- "@cloudflare/workers-types": "^4.20241216.0",
- "@types/pg": "^8.11.10",
- "typescript": "^5.7.2",
- "wrangler": "^3.96.0"
- },
- "dependencies": {
- "@libsql/client": "^0.14.0",
- "@outerbase/sdk": "2.0.0-rc.3",
- "hono": "^4.6.14",
- "jose": "^5.9.6",
- "mongodb": "^6.11.0",
- "mysql2": "^3.11.4",
- "node-sql-parser": "^4.18.0",
- "pg": "^8.13.1"
- }
-}
\ No newline at end of file
+ "name": "durable-object-starter",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "deploy": "wrangler deploy",
+ "dev": "wrangler dev",
+ "start": "wrangler dev",
+ "cf-typegen": "wrangler types",
+ "prepare": "husky"
+ },
+ "devDependencies": {
+ "@cloudflare/workers-types": "^4.20241216.0",
+ "@types/pg": "^8.11.10",
+ "husky": "^9.1.7",
+ "lint-staged": "^15.2.11",
+ "prettier": "3.4.2",
+ "typescript": "^5.7.2",
+ "wrangler": "^3.96.0"
+ },
+ "dependencies": {
+ "@libsql/client": "^0.14.0",
+ "@outerbase/sdk": "2.0.0-rc.3",
+ "hono": "^4.6.14",
+ "jose": "^5.9.6",
+ "mongodb": "^6.11.0",
+ "mysql2": "^3.11.4",
+ "node-sql-parser": "^4.18.0",
+ "pg": "^8.13.1"
+ },
+ "lint-staged": {
+ "*.{js,jsx,ts,tsx,json,css,md}": [
+ "prettier --write"
+ ]
+ }
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 318941e..ffee8bc 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1,1474 +1,2592 @@
lockfileVersion: '9.0'
settings:
- autoInstallPeers: true
- excludeLinksFromLockfile: false
+ autoInstallPeers: true
+ excludeLinksFromLockfile: false
importers:
-
- .:
- dependencies:
- '@libsql/client':
- specifier: ^0.14.0
- version: 0.14.0
- '@outerbase/sdk':
- specifier: 2.0.0-rc.3
- version: 2.0.0-rc.3
- hono:
- specifier: ^4.6.14
- version: 4.6.14
- jose:
- specifier: ^5.9.6
- version: 5.9.6
- mongodb:
- specifier: ^6.11.0
- version: 6.12.0
- mysql2:
- specifier: ^3.11.4
- version: 3.11.5
- node-sql-parser:
- specifier: ^4.18.0
- version: 4.18.0
- pg:
- specifier: ^8.13.1
- version: 8.13.1
- devDependencies:
- '@cloudflare/workers-types':
- specifier: ^4.20241216.0
- version: 4.20241216.0
- '@types/pg':
- specifier: ^8.11.10
- version: 8.11.10
- typescript:
- specifier: ^5.7.2
- version: 5.7.2
- wrangler:
- specifier: ^3.96.0
- version: 3.96.0(@cloudflare/workers-types@4.20241216.0)
+ .:
+ dependencies:
+ '@libsql/client':
+ specifier: ^0.14.0
+ version: 0.14.0
+ '@outerbase/sdk':
+ specifier: 2.0.0-rc.3
+ version: 2.0.0-rc.3
+ hono:
+ specifier: ^4.6.14
+ version: 4.6.14
+ jose:
+ specifier: ^5.9.6
+ version: 5.9.6
+ mongodb:
+ specifier: ^6.11.0
+ version: 6.12.0
+ mysql2:
+ specifier: ^3.11.4
+ version: 3.11.5
+ node-sql-parser:
+ specifier: ^4.18.0
+ version: 4.18.0
+ pg:
+ specifier: ^8.13.1
+ version: 8.13.1
+ devDependencies:
+ '@cloudflare/workers-types':
+ specifier: ^4.20241216.0
+ version: 4.20241216.0
+ '@types/pg':
+ specifier: ^8.11.10
+ version: 8.11.10
+ husky:
+ specifier: ^9.1.7
+ version: 9.1.7
+ lint-staged:
+ specifier: ^15.2.11
+ version: 15.2.11
+ prettier:
+ specifier: 3.4.2
+ version: 3.4.2
+ typescript:
+ specifier: ^5.7.2
+ version: 5.7.2
+ wrangler:
+ specifier: ^3.96.0
+ version: 3.96.0(@cloudflare/workers-types@4.20241216.0)
packages:
+ '@cloudflare/kv-asset-handler@0.3.4':
+ resolution:
+ {
+ integrity: sha512-YLPHc8yASwjNkmcDMQMY35yiWjoKAKnhUbPRszBRS0YgH+IXtsMp61j+yTcnCE3oO2DgP0U3iejLC8FTtKDC8Q==,
+ }
+ engines: { node: '>=16.13' }
+
+ '@cloudflare/workerd-darwin-64@1.20241205.0':
+ resolution:
+ {
+ integrity: sha512-TArEZkSZkHJyEwnlWWkSpCI99cF6lJ14OVeEoI9Um/+cD9CKZLM9vCmsLeKglKheJ0KcdCnkA+DbeD15t3VaWg==,
+ }
+ engines: { node: '>=16' }
+ cpu: [x64]
+ os: [darwin]
+
+ '@cloudflare/workerd-darwin-arm64@1.20241205.0':
+ resolution:
+ {
+ integrity: sha512-u5eqKa9QRdA8MugfgCoD+ADDjY6EpKbv3hSYJETmmUh17l7WXjWBzv4pUvOKIX67C0UzMUy4jZYwC53MymhX3w==,
+ }
+ engines: { node: '>=16' }
+ cpu: [arm64]
+ os: [darwin]
+
+ '@cloudflare/workerd-linux-64@1.20241205.0':
+ resolution:
+ {
+ integrity: sha512-OYA7S5zpumMamWEW+IhhBU6YojIEocyE5X/YFPiTOCrDE3dsfr9t6oqNE7hxGm1VAAu+Irtl+a/5LwmBOU681w==,
+ }
+ engines: { node: '>=16' }
+ cpu: [x64]
+ os: [linux]
+
+ '@cloudflare/workerd-linux-arm64@1.20241205.0':
+ resolution:
+ {
+ integrity: sha512-qAzecONjFJGIAVJZKExQ5dlbic0f3d4A+GdKa+H6SoUJtPaWiE3K6WuePo4JOT7W3/Zfh25McmX+MmpMUUcM5Q==,
+ }
+ engines: { node: '>=16' }
+ cpu: [arm64]
+ os: [linux]
+
+ '@cloudflare/workerd-windows-64@1.20241205.0':
+ resolution:
+ {
+ integrity: sha512-BEab+HiUgCdl6GXAT7EI2yaRtDPiRJlB94XLvRvXi1ZcmQqsrq6awGo6apctFo4WUL29V7c09LxmN4HQ3X2Tvg==,
+ }
+ engines: { node: '>=16' }
+ cpu: [x64]
+ os: [win32]
+
+ '@cloudflare/workers-types@4.20241216.0':
+ resolution:
+ {
+ integrity: sha512-PGIINXS+aE9vD2GYyWXfRG+VyxxceRkGDCoPxqwUweh1Bfv75HVotyL/adJ7mRVwh3XZDifGBdTaLReTT+Fcog==,
+ }
+
+ '@cspotcode/source-map-support@0.8.1':
+ resolution:
+ {
+ integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==,
+ }
+ engines: { node: '>=12' }
+
+ '@esbuild-plugins/node-globals-polyfill@0.2.3':
+ resolution:
+ {
+ integrity: sha512-r3MIryXDeXDOZh7ih1l/yE9ZLORCd5e8vWg02azWRGj5SPTuoh69A2AIyn0Z31V/kHBfZ4HgWJ+OK3GTTwLmnw==,
+ }
+ peerDependencies:
+ esbuild: '*'
+
+ '@esbuild-plugins/node-modules-polyfill@0.2.2':
+ resolution:
+ {
+ integrity: sha512-LXV7QsWJxRuMYvKbiznh+U1ilIop3g2TeKRzUxOG5X3YITc8JyyTa90BmLwqqv0YnX4v32CSlG+vsziZp9dMvA==,
+ }
+ peerDependencies:
+ esbuild: '*'
+
+ '@esbuild/android-arm64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm64]
+ os: [android]
+
+ '@esbuild/android-arm@0.17.19':
+ resolution:
+ {
+ integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm]
+ os: [android]
+
+ '@esbuild/android-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [android]
+
+ '@esbuild/darwin-arm64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm64]
+ os: [darwin]
+
+ '@esbuild/darwin-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [darwin]
+
+ '@esbuild/freebsd-arm64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm64]
+ os: [freebsd]
+
+ '@esbuild/freebsd-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [freebsd]
+
+ '@esbuild/linux-arm64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm64]
+ os: [linux]
+
+ '@esbuild/linux-arm@0.17.19':
+ resolution:
+ {
+ integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm]
+ os: [linux]
+
+ '@esbuild/linux-ia32@0.17.19':
+ resolution:
+ {
+ integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==,
+ }
+ engines: { node: '>=12' }
+ cpu: [ia32]
+ os: [linux]
+
+ '@esbuild/linux-loong64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==,
+ }
+ engines: { node: '>=12' }
+ cpu: [loong64]
+ os: [linux]
+
+ '@esbuild/linux-mips64el@0.17.19':
+ resolution:
+ {
+ integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==,
+ }
+ engines: { node: '>=12' }
+ cpu: [mips64el]
+ os: [linux]
+
+ '@esbuild/linux-ppc64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==,
+ }
+ engines: { node: '>=12' }
+ cpu: [ppc64]
+ os: [linux]
+
+ '@esbuild/linux-riscv64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==,
+ }
+ engines: { node: '>=12' }
+ cpu: [riscv64]
+ os: [linux]
+
+ '@esbuild/linux-s390x@0.17.19':
+ resolution:
+ {
+ integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==,
+ }
+ engines: { node: '>=12' }
+ cpu: [s390x]
+ os: [linux]
+
+ '@esbuild/linux-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [linux]
+
+ '@esbuild/netbsd-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [netbsd]
+
+ '@esbuild/openbsd-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [openbsd]
+
+ '@esbuild/sunos-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [sunos]
+
+ '@esbuild/win32-arm64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==,
+ }
+ engines: { node: '>=12' }
+ cpu: [arm64]
+ os: [win32]
+
+ '@esbuild/win32-ia32@0.17.19':
+ resolution:
+ {
+ integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==,
+ }
+ engines: { node: '>=12' }
+ cpu: [ia32]
+ os: [win32]
+
+ '@esbuild/win32-x64@0.17.19':
+ resolution:
+ {
+ integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==,
+ }
+ engines: { node: '>=12' }
+ cpu: [x64]
+ os: [win32]
+
+ '@fastify/busboy@2.1.1':
+ resolution:
+ {
+ integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==,
+ }
+ engines: { node: '>=14' }
+
+ '@jridgewell/resolve-uri@3.1.2':
+ resolution:
+ {
+ integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==,
+ }
+ engines: { node: '>=6.0.0' }
+
+ '@jridgewell/sourcemap-codec@1.5.0':
+ resolution:
+ {
+ integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==,
+ }
+
+ '@jridgewell/trace-mapping@0.3.9':
+ resolution:
+ {
+ integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==,
+ }
+
+ '@libsql/client@0.14.0':
+ resolution:
+ {
+ integrity: sha512-/9HEKfn6fwXB5aTEEoMeFh4CtG0ZzbncBb1e++OCdVpgKZ/xyMsIVYXm0w7Pv4RUel803vE6LwniB3PqD72R0Q==,
+ }
+
+ '@libsql/core@0.14.0':
+ resolution:
+ {
+ integrity: sha512-nhbuXf7GP3PSZgdCY2Ecj8vz187ptHlZQ0VRc751oB2C1W8jQUXKKklvt7t1LJiUTQBVJuadF628eUk+3cRi4Q==,
+ }
+
+ '@libsql/darwin-arm64@0.4.7':
+ resolution:
+ {
+ integrity: sha512-yOL742IfWUlUevnI5PdnIT4fryY3LYTdLm56bnY0wXBw7dhFcnjuA7jrH3oSVz2mjZTHujxoITgAE7V6Z+eAbg==,
+ }
+ cpu: [arm64]
+ os: [darwin]
+
+ '@libsql/darwin-x64@0.4.7':
+ resolution:
+ {
+ integrity: sha512-ezc7V75+eoyyH07BO9tIyJdqXXcRfZMbKcLCeF8+qWK5nP8wWuMcfOVywecsXGRbT99zc5eNra4NEx6z5PkSsA==,
+ }
+ cpu: [x64]
+ os: [darwin]
+
+ '@libsql/hrana-client@0.7.0':
+ resolution:
+ {
+ integrity: sha512-OF8fFQSkbL7vJY9rfuegK1R7sPgQ6kFMkDamiEccNUvieQ+3urzfDFI616oPl8V7T9zRmnTkSjMOImYCAVRVuw==,
+ }
+
+ '@libsql/isomorphic-fetch@0.3.1':
+ resolution:
+ {
+ integrity: sha512-6kK3SUK5Uu56zPq/Las620n5aS9xJq+jMBcNSOmjhNf/MUvdyji4vrMTqD7ptY7/4/CAVEAYDeotUz60LNQHtw==,
+ }
+ engines: { node: '>=18.0.0' }
+
+ '@libsql/isomorphic-ws@0.1.5':
+ resolution:
+ {
+ integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==,
+ }
+
+ '@libsql/linux-arm64-gnu@0.4.7':
+ resolution:
+ {
+ integrity: sha512-WlX2VYB5diM4kFfNaYcyhw5y+UJAI3xcMkEUJZPtRDEIu85SsSFrQ+gvoKfcVh76B//ztSeEX2wl9yrjF7BBCA==,
+ }
+ cpu: [arm64]
+ os: [linux]
+
+ '@libsql/linux-arm64-musl@0.4.7':
+ resolution:
+ {
+ integrity: sha512-6kK9xAArVRlTCpWeqnNMCoXW1pe7WITI378n4NpvU5EJ0Ok3aNTIC2nRPRjhro90QcnmLL1jPcrVwO4WD1U0xw==,
+ }
+ cpu: [arm64]
+ os: [linux]
+
+ '@libsql/linux-x64-gnu@0.4.7':
+ resolution:
+ {
+ integrity: sha512-CMnNRCmlWQqqzlTw6NeaZXzLWI8bydaXDke63JTUCvu8R+fj/ENsLrVBtPDlxQ0wGsYdXGlrUCH8Qi9gJep0yQ==,
+ }
+ cpu: [x64]
+ os: [linux]
+
+ '@libsql/linux-x64-musl@0.4.7':
+ resolution:
+ {
+ integrity: sha512-nI6tpS1t6WzGAt1Kx1n1HsvtBbZ+jHn0m7ogNNT6pQHZQj7AFFTIMeDQw/i/Nt5H38np1GVRNsFe99eSIMs9XA==,
+ }
+ cpu: [x64]
+ os: [linux]
+
+ '@libsql/win32-x64-msvc@0.4.7':
+ resolution:
+ {
+ integrity: sha512-7pJzOWzPm6oJUxml+PCDRzYQ4A1hTMHAciTAHfFK4fkbDZX33nWPVG7Y3vqdKtslcwAzwmrNDc6sXy2nwWnbiw==,
+ }
+ cpu: [x64]
+ os: [win32]
+
+ '@mongodb-js/saslprep@1.1.9':
+ resolution:
+ {
+ integrity: sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==,
+ }
+
+ '@neon-rs/load@0.0.4':
+ resolution:
+ {
+ integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==,
+ }
+
+ '@outerbase/sdk@2.0.0-rc.3':
+ resolution:
+ {
+ integrity: sha512-bmV4hlzs5sz01IDWNHdJC2ZD4ezM4UEwG1fEQi59yByHRtPOVDjK7Z5iQ8e1MbR0814vdhv9hMcUKP8SJDA7vQ==,
+ }
+ hasBin: true
+
+ '@types/node-forge@1.3.11':
+ resolution:
+ {
+ integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==,
+ }
+
+ '@types/node@22.10.2':
+ resolution:
+ {
+ integrity: sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==,
+ }
+
+ '@types/pg@8.11.10':
+ resolution:
+ {
+ integrity: sha512-LczQUW4dbOQzsH2RQ5qoeJ6qJPdrcM/DcMLoqWQkMLMsq83J5lAX3LXjdkWdpscFy67JSOWDnh7Ny/sPFykmkg==,
+ }
+
+ '@types/webidl-conversions@7.0.3':
+ resolution:
+ {
+ integrity: sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==,
+ }
+
+ '@types/whatwg-url@11.0.5':
+ resolution:
+ {
+ integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==,
+ }
+
+ '@types/ws@8.5.13':
+ resolution:
+ {
+ integrity: sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==,
+ }
+
+ acorn-walk@8.3.4:
+ resolution:
+ {
+ integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==,
+ }
+ engines: { node: '>=0.4.0' }
+
+ acorn@8.14.0:
+ resolution:
+ {
+ integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==,
+ }
+ engines: { node: '>=0.4.0' }
+ hasBin: true
+
+ ansi-escapes@7.0.0:
+ resolution:
+ {
+ integrity: sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==,
+ }
+ engines: { node: '>=18' }
+
+ ansi-regex@6.1.0:
+ resolution:
+ {
+ integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==,
+ }
+ engines: { node: '>=12' }
+
+ ansi-styles@6.2.1:
+ resolution:
+ {
+ integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==,
+ }
+ engines: { node: '>=12' }
+
+ as-table@1.0.55:
+ resolution:
+ {
+ integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==,
+ }
+
+ aws-ssl-profiles@1.1.2:
+ resolution:
+ {
+ integrity: sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==,
+ }
+ engines: { node: '>= 6.0.0' }
+
+ big-integer@1.6.52:
+ resolution:
+ {
+ integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==,
+ }
+ engines: { node: '>=0.6' }
+
+ blake3-wasm@2.1.5:
+ resolution:
+ {
+ integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==,
+ }
+
+ braces@3.0.3:
+ resolution:
+ {
+ integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==,
+ }
+ engines: { node: '>=8' }
+
+ bson@6.10.1:
+ resolution:
+ {
+ integrity: sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==,
+ }
+ engines: { node: '>=16.20.1' }
+
+ capnp-ts@0.7.0:
+ resolution:
+ {
+ integrity: sha512-XKxXAC3HVPv7r674zP0VC3RTXz+/JKhfyw94ljvF80yynK6VkTnqE3jMuN8b3dUVmmc43TjyxjW4KTsmB3c86g==,
+ }
+
+ chalk@5.3.0:
+ resolution:
+ {
+ integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==,
+ }
+ engines: { node: ^12.17.0 || ^14.13 || >=16.0.0 }
+
+ chokidar@4.0.2:
+ resolution:
+ {
+ integrity: sha512-/b57FK+bblSU+dfewfFe0rT1YjVDfOmeLQwCAuC+vwvgLkXboATqqmy+Ipux6JrF6L5joe5CBnFOw+gLWH6yKg==,
+ }
+ engines: { node: '>= 14.16.0' }
+
+ cli-cursor@5.0.0:
+ resolution:
+ {
+ integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==,
+ }
+ engines: { node: '>=18' }
+
+ cli-truncate@4.0.0:
+ resolution:
+ {
+ integrity: sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==,
+ }
+ engines: { node: '>=18' }
+
+ colorette@2.0.20:
+ resolution:
+ {
+ integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==,
+ }
+
+ commander@12.1.0:
+ resolution:
+ {
+ integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==,
+ }
+ engines: { node: '>=18' }
+
+ cookie@0.7.2:
+ resolution:
+ {
+ integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==,
+ }
+ engines: { node: '>= 0.6' }
+
+ cross-spawn@7.0.6:
+ resolution:
+ {
+ integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==,
+ }
+ engines: { node: '>= 8' }
+
+ data-uri-to-buffer@2.0.2:
+ resolution:
+ {
+ integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==,
+ }
+
+ data-uri-to-buffer@4.0.1:
+ resolution:
+ {
+ integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==,
+ }
+ engines: { node: '>= 12' }
+
+ date-fns@4.1.0:
+ resolution:
+ {
+ integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==,
+ }
+
+ debug@4.4.0:
+ resolution:
+ {
+ integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==,
+ }
+ engines: { node: '>=6.0' }
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+
+ defu@6.1.4:
+ resolution:
+ {
+ integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==,
+ }
+
+ denque@2.1.0:
+ resolution:
+ {
+ integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==,
+ }
+ engines: { node: '>=0.10' }
+
+ detect-libc@2.0.2:
+ resolution:
+ {
+ integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==,
+ }
+ engines: { node: '>=8' }
+
+ emoji-regex@10.4.0:
+ resolution:
+ {
+ integrity: sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==,
+ }
+
+ environment@1.1.0:
+ resolution:
+ {
+ integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==,
+ }
+ engines: { node: '>=18' }
+
+ esbuild@0.17.19:
+ resolution:
+ {
+ integrity: sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==,
+ }
+ engines: { node: '>=12' }
+ hasBin: true
+
+ escape-string-regexp@4.0.0:
+ resolution:
+ {
+ integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==,
+ }
+ engines: { node: '>=10' }
+
+ estree-walker@0.6.1:
+ resolution:
+ {
+ integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==,
+ }
+
+ eventemitter3@5.0.1:
+ resolution:
+ {
+ integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==,
+ }
+
+ execa@8.0.1:
+ resolution:
+ {
+ integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==,
+ }
+ engines: { node: '>=16.17' }
+
+ exit-hook@2.2.1:
+ resolution:
+ {
+ integrity: sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==,
+ }
+ engines: { node: '>=6' }
+
+ fetch-blob@3.2.0:
+ resolution:
+ {
+ integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==,
+ }
+ engines: { node: ^12.20 || >= 14.13 }
+
+ fill-range@7.1.1:
+ resolution:
+ {
+ integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==,
+ }
+ engines: { node: '>=8' }
+
+ formdata-polyfill@4.0.10:
+ resolution:
+ {
+ integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==,
+ }
+ engines: { node: '>=12.20.0' }
+
+ fsevents@2.3.3:
+ resolution:
+ {
+ integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==,
+ }
+ engines: { node: ^8.16.0 || ^10.6.0 || >=11.0.0 }
+ os: [darwin]
+
+ function-bind@1.1.2:
+ resolution:
+ {
+ integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==,
+ }
+
+ generate-function@2.3.1:
+ resolution:
+ {
+ integrity: sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==,
+ }
+
+ get-east-asian-width@1.3.0:
+ resolution:
+ {
+ integrity: sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==,
+ }
+ engines: { node: '>=18' }
+
+ get-source@2.0.12:
+ resolution:
+ {
+ integrity: sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==,
+ }
+
+ get-stream@8.0.1:
+ resolution:
+ {
+ integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==,
+ }
+ engines: { node: '>=16' }
+
+ glob-to-regexp@0.4.1:
+ resolution:
+ {
+ integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==,
+ }
+
+ handlebars@4.7.8:
+ resolution:
+ {
+ integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==,
+ }
+ engines: { node: '>=0.4.7' }
+ hasBin: true
+
+ hasown@2.0.2:
+ resolution:
+ {
+ integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==,
+ }
+ engines: { node: '>= 0.4' }
+
+ hono@4.6.14:
+ resolution:
+ {
+ integrity: sha512-j4VkyUp2xazGJ8eCCLN1Vm/bxdvm/j5ZuU9AIjLu9vapn2M44p9L3Ktr9Vnb2RN2QtcR/wVjZVMlT5k7GJQgPw==,
+ }
+ engines: { node: '>=16.9.0' }
+
+ human-signals@5.0.0:
+ resolution:
+ {
+ integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==,
+ }
+ engines: { node: '>=16.17.0' }
+
+ husky@9.1.7:
+ resolution:
+ {
+ integrity: sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==,
+ }
+ engines: { node: '>=18' }
+ hasBin: true
+
+ iconv-lite@0.6.3:
+ resolution:
+ {
+ integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==,
+ }
+ engines: { node: '>=0.10.0' }
+
+ is-core-module@2.16.0:
+ resolution:
+ {
+ integrity: sha512-urTSINYfAYgcbLb0yDQ6egFm6h3Mo1DcF9EkyXSRjjzdHbsulg01qhwWuXdOoUBuTkbQ80KDboXa0vFJ+BDH+g==,
+ }
+ engines: { node: '>= 0.4' }
+
+ is-fullwidth-code-point@4.0.0:
+ resolution:
+ {
+ integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==,
+ }
+ engines: { node: '>=12' }
+
+ is-fullwidth-code-point@5.0.0:
+ resolution:
+ {
+ integrity: sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==,
+ }
+ engines: { node: '>=18' }
+
+ is-number@7.0.0:
+ resolution:
+ {
+ integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==,
+ }
+ engines: { node: '>=0.12.0' }
+
+ is-property@1.0.2:
+ resolution:
+ {
+ integrity: sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==,
+ }
+
+ is-stream@3.0.0:
+ resolution:
+ {
+ integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==,
+ }
+ engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
+
+ isexe@2.0.0:
+ resolution:
+ {
+ integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==,
+ }
+
+ itty-time@1.0.6:
+ resolution:
+ {
+ integrity: sha512-+P8IZaLLBtFv8hCkIjcymZOp4UJ+xW6bSlQsXGqrkmJh7vSiMFSlNne0mCYagEE0N7HDNR5jJBRxwN0oYv61Rw==,
+ }
+
+ jose@5.9.6:
+ resolution:
+ {
+ integrity: sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==,
+ }
+
+ js-base64@3.7.7:
+ resolution:
+ {
+ integrity: sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==,
+ }
+
+ libsql@0.4.7:
+ resolution:
+ {
+ integrity: sha512-T9eIRCs6b0J1SHKYIvD8+KCJMcWZ900iZyxdnSCdqxN12Z1ijzT+jY5nrk72Jw4B0HGzms2NgpryArlJqvc3Lw==,
+ }
+ os: [darwin, linux, win32]
+
+ lilconfig@3.1.3:
+ resolution:
+ {
+ integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==,
+ }
+ engines: { node: '>=14' }
+
+ lint-staged@15.2.11:
+ resolution:
+ {
+ integrity: sha512-Ev6ivCTYRTGs9ychvpVw35m/bcNDuBN+mnTeObCL5h+boS5WzBEC6LHI4I9F/++sZm1m+J2LEiy0gxL/R9TBqQ==,
+ }
+ engines: { node: '>=18.12.0' }
+ hasBin: true
+
+ listr2@8.2.5:
+ resolution:
+ {
+ integrity: sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==,
+ }
+ engines: { node: '>=18.0.0' }
+
+ log-update@6.1.0:
+ resolution:
+ {
+ integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==,
+ }
+ engines: { node: '>=18' }
+
+ long@5.2.3:
+ resolution:
+ {
+ integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==,
+ }
+
+ lru-cache@7.18.3:
+ resolution:
+ {
+ integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==,
+ }
+ engines: { node: '>=12' }
+
+ lru.min@1.1.1:
+ resolution:
+ {
+ integrity: sha512-FbAj6lXil6t8z4z3j0E5mfRlPzxkySotzUHwRXjlpRh10vc6AI6WN62ehZj82VG7M20rqogJ0GLwar2Xa05a8Q==,
+ }
+ engines: { bun: '>=1.0.0', deno: '>=1.30.0', node: '>=8.0.0' }
+
+ magic-string@0.25.9:
+ resolution:
+ {
+ integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==,
+ }
+
+ memory-pager@1.5.0:
+ resolution:
+ {
+ integrity: sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==,
+ }
+
+ merge-stream@2.0.0:
+ resolution:
+ {
+ integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==,
+ }
+
+ micromatch@4.0.8:
+ resolution:
+ {
+ integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==,
+ }
+ engines: { node: '>=8.6' }
+
+ mime@3.0.0:
+ resolution:
+ {
+ integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==,
+ }
+ engines: { node: '>=10.0.0' }
+ hasBin: true
+
+ mimic-fn@4.0.0:
+ resolution:
+ {
+ integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==,
+ }
+ engines: { node: '>=12' }
+
+ mimic-function@5.0.1:
+ resolution:
+ {
+ integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==,
+ }
+ engines: { node: '>=18' }
+
+ miniflare@3.20241205.0:
+ resolution:
+ {
+ integrity: sha512-Z0cTtIf6ZrcAJ3SrOI9EUM3s4dkGhNeU6Ubl8sroYhsPVD+rtz3m5+p6McHFWCkcMff1o60X5XEKVTmkz0gbpA==,
+ }
+ engines: { node: '>=16.13' }
+ hasBin: true
+
+ minimist@1.2.8:
+ resolution:
+ {
+ integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==,
+ }
+
+ mongodb-connection-string-url@3.0.1:
+ resolution:
+ {
+ integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==,
+ }
+
+ mongodb@6.12.0:
+ resolution:
+ {
+ integrity: sha512-RM7AHlvYfS7jv7+BXund/kR64DryVI+cHbVAy9P61fnb1RcWZqOW1/Wj2YhqMCx+MuYhqTRGv7AwHBzmsCKBfA==,
+ }
+ engines: { node: '>=16.20.1' }
+ peerDependencies:
+ '@aws-sdk/credential-providers': ^3.188.0
+ '@mongodb-js/zstd': ^1.1.0 || ^2.0.0
+ gcp-metadata: ^5.2.0
+ kerberos: ^2.0.1
+ mongodb-client-encryption: '>=6.0.0 <7'
+ snappy: ^7.2.2
+ socks: ^2.7.1
+ peerDependenciesMeta:
+ '@aws-sdk/credential-providers':
+ optional: true
+ '@mongodb-js/zstd':
+ optional: true
+ gcp-metadata:
+ optional: true
+ kerberos:
+ optional: true
+ mongodb-client-encryption:
+ optional: true
+ snappy:
+ optional: true
+ socks:
+ optional: true
+
+ ms@2.1.3:
+ resolution:
+ {
+ integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==,
+ }
+
+ mustache@4.2.0:
+ resolution:
+ {
+ integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==,
+ }
+ hasBin: true
+
+ mysql2@3.11.5:
+ resolution:
+ {
+ integrity: sha512-0XFu8rUmFN9vC0ME36iBvCUObftiMHItrYFhlCRvFWbLgpNqtC4Br/NmZX1HNCszxT0GGy5QtP+k3Q3eCJPaYA==,
+ }
+ engines: { node: '>= 8.0' }
+
+ named-placeholders@1.1.3:
+ resolution:
+ {
+ integrity: sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==,
+ }
+ engines: { node: '>=12.0.0' }
+
+ nanoid@3.3.8:
+ resolution:
+ {
+ integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==,
+ }
+ engines: { node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 }
+ hasBin: true
+
+ neo-async@2.6.2:
+ resolution:
+ {
+ integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==,
+ }
+
+ node-domexception@1.0.0:
+ resolution:
+ {
+ integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==,
+ }
+ engines: { node: '>=10.5.0' }
+
+ node-fetch@3.3.2:
+ resolution:
+ {
+ integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==,
+ }
+ engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
+
+ node-forge@1.3.1:
+ resolution:
+ {
+ integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==,
+ }
+ engines: { node: '>= 6.13.0' }
+
+ node-sql-parser@4.18.0:
+ resolution:
+ {
+ integrity: sha512-2YEOR5qlI1zUFbGMLKNfsrR5JUvFg9LxIRVE+xJe962pfVLH0rnItqLzv96XVs1Y1UIR8FxsXAuvX/lYAWZ2BQ==,
+ }
+ engines: { node: '>=8' }
+
+ npm-run-path@5.3.0:
+ resolution:
+ {
+ integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==,
+ }
+ engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
+
+ obuf@1.1.2:
+ resolution:
+ {
+ integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==,
+ }
+
+ ohash@1.1.4:
+ resolution:
+ {
+ integrity: sha512-FlDryZAahJmEF3VR3w1KogSEdWX3WhA5GPakFx4J81kEAiHyLMpdLLElS8n8dfNadMgAne/MywcvmogzscVt4g==,
+ }
+
+ onetime@6.0.0:
+ resolution:
+ {
+ integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==,
+ }
+ engines: { node: '>=12' }
+
+ onetime@7.0.0:
+ resolution:
+ {
+ integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==,
+ }
+ engines: { node: '>=18' }
+
+ path-key@3.1.1:
+ resolution:
+ {
+ integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==,
+ }
+ engines: { node: '>=8' }
+
+ path-key@4.0.0:
+ resolution:
+ {
+ integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==,
+ }
+ engines: { node: '>=12' }
+
+ path-parse@1.0.7:
+ resolution:
+ {
+ integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==,
+ }
+
+ path-to-regexp@6.3.0:
+ resolution:
+ {
+ integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==,
+ }
+
+ pathe@1.1.2:
+ resolution:
+ {
+ integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==,
+ }
+
+ pg-cloudflare@1.1.1:
+ resolution:
+ {
+ integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==,
+ }
+
+ pg-connection-string@2.7.0:
+ resolution:
+ {
+ integrity: sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==,
+ }
+
+ pg-int8@1.0.1:
+ resolution:
+ {
+ integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==,
+ }
+ engines: { node: '>=4.0.0' }
+
+ pg-numeric@1.0.2:
+ resolution:
+ {
+ integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==,
+ }
+ engines: { node: '>=4' }
+
+ pg-pool@3.7.0:
+ resolution:
+ {
+ integrity: sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==,
+ }
+ peerDependencies:
+ pg: '>=8.0'
+
+ pg-protocol@1.7.0:
+ resolution:
+ {
+ integrity: sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==,
+ }
+
+ pg-types@2.2.0:
+ resolution:
+ {
+ integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==,
+ }
+ engines: { node: '>=4' }
+
+ pg-types@4.0.2:
+ resolution:
+ {
+ integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==,
+ }
+ engines: { node: '>=10' }
+
+ pg@8.13.1:
+ resolution:
+ {
+ integrity: sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==,
+ }
+ engines: { node: '>= 8.0.0' }
+ peerDependencies:
+ pg-native: '>=3.0.1'
+ peerDependenciesMeta:
+ pg-native:
+ optional: true
+
+ pgpass@1.0.5:
+ resolution:
+ {
+ integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==,
+ }
+
+ picomatch@2.3.1:
+ resolution:
+ {
+ integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==,
+ }
+ engines: { node: '>=8.6' }
+
+ pidtree@0.6.0:
+ resolution:
+ {
+ integrity: sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==,
+ }
+ engines: { node: '>=0.10' }
+ hasBin: true
+
+ postgres-array@2.0.0:
+ resolution:
+ {
+ integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==,
+ }
+ engines: { node: '>=4' }
+
+ postgres-array@3.0.2:
+ resolution:
+ {
+ integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==,
+ }
+ engines: { node: '>=12' }
+
+ postgres-bytea@1.0.0:
+ resolution:
+ {
+ integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==,
+ }
+ engines: { node: '>=0.10.0' }
+
+ postgres-bytea@3.0.0:
+ resolution:
+ {
+ integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==,
+ }
+ engines: { node: '>= 6' }
+
+ postgres-date@1.0.7:
+ resolution:
+ {
+ integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==,
+ }
+ engines: { node: '>=0.10.0' }
+
+ postgres-date@2.1.0:
+ resolution:
+ {
+ integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==,
+ }
+ engines: { node: '>=12' }
+
+ postgres-interval@1.2.0:
+ resolution:
+ {
+ integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==,
+ }
+ engines: { node: '>=0.10.0' }
+
+ postgres-interval@3.0.0:
+ resolution:
+ {
+ integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==,
+ }
+ engines: { node: '>=12' }
+
+ postgres-range@1.1.4:
+ resolution:
+ {
+ integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==,
+ }
+
+ prettier@3.4.2:
+ resolution:
+ {
+ integrity: sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==,
+ }
+ engines: { node: '>=14' }
+ hasBin: true
+
+ printable-characters@1.0.42:
+ resolution:
+ {
+ integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==,
+ }
+
+ promise-limit@2.7.0:
+ resolution:
+ {
+ integrity: sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==,
+ }
+
+ punycode@2.3.1:
+ resolution:
+ {
+ integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==,
+ }
+ engines: { node: '>=6' }
+
+ readdirp@4.0.2:
+ resolution:
+ {
+ integrity: sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==,
+ }
+ engines: { node: '>= 14.16.0' }
+
+ resolve@1.22.9:
+ resolution:
+ {
+ integrity: sha512-QxrmX1DzraFIi9PxdG5VkRfRwIgjwyud+z/iBwfRRrVmHc+P9Q7u2lSSpQ6bjr2gy5lrqIiU9vb6iAeGf2400A==,
+ }
+ hasBin: true
+
+ restore-cursor@5.1.0:
+ resolution:
+ {
+ integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==,
+ }
+ engines: { node: '>=18' }
+
+ rfdc@1.4.1:
+ resolution:
+ {
+ integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==,
+ }
+
+ rollup-plugin-inject@3.0.2:
+ resolution:
+ {
+ integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==,
+ }
+ deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject.
+
+ rollup-plugin-node-polyfills@0.2.1:
+ resolution:
+ {
+ integrity: sha512-4kCrKPTJ6sK4/gLL/U5QzVT8cxJcofO0OU74tnB19F40cmuAKSzH5/siithxlofFEjwvw1YAhPmbvGNA6jEroA==,
+ }
+
+ rollup-pluginutils@2.8.2:
+ resolution:
+ {
+ integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==,
+ }
+
+ safer-buffer@2.1.2:
+ resolution:
+ {
+ integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==,
+ }
+
+ selfsigned@2.4.1:
+ resolution:
+ {
+ integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==,
+ }
+ engines: { node: '>=10' }
+
+ seq-queue@0.0.5:
+ resolution:
+ {
+ integrity: sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==,
+ }
+
+ shebang-command@2.0.0:
+ resolution:
+ {
+ integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==,
+ }
+ engines: { node: '>=8' }
+
+ shebang-regex@3.0.0:
+ resolution:
+ {
+ integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==,
+ }
+ engines: { node: '>=8' }
+
+ signal-exit@4.1.0:
+ resolution:
+ {
+ integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==,
+ }
+ engines: { node: '>=14' }
+
+ slice-ansi@5.0.0:
+ resolution:
+ {
+ integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==,
+ }
+ engines: { node: '>=12' }
+
+ slice-ansi@7.1.0:
+ resolution:
+ {
+ integrity: sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==,
+ }
+ engines: { node: '>=18' }
+
+ source-map@0.6.1:
+ resolution:
+ {
+ integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==,
+ }
+ engines: { node: '>=0.10.0' }
+
+ sourcemap-codec@1.4.8:
+ resolution:
+ {
+ integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==,
+ }
+ deprecated: Please use @jridgewell/sourcemap-codec instead
+
+ sparse-bitfield@3.0.3:
+ resolution:
+ {
+ integrity: sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==,
+ }
+
+ split2@4.2.0:
+ resolution:
+ {
+ integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==,
+ }
+ engines: { node: '>= 10.x' }
+
+ sqlstring@2.3.3:
+ resolution:
+ {
+ integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==,
+ }
+ engines: { node: '>= 0.6' }
+
+ stacktracey@2.1.8:
+ resolution:
+ {
+ integrity: sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==,
+ }
+
+ stoppable@1.1.0:
+ resolution:
+ {
+ integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==,
+ }
+ engines: { node: '>=4', npm: '>=6' }
+
+ string-argv@0.3.2:
+ resolution:
+ {
+ integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==,
+ }
+ engines: { node: '>=0.6.19' }
+
+ string-width@7.2.0:
+ resolution:
+ {
+ integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==,
+ }
+ engines: { node: '>=18' }
+
+ strip-ansi@7.1.0:
+ resolution:
+ {
+ integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==,
+ }
+ engines: { node: '>=12' }
+
+ strip-final-newline@3.0.0:
+ resolution:
+ {
+ integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==,
+ }
+ engines: { node: '>=12' }
+
+ supports-preserve-symlinks-flag@1.0.0:
+ resolution:
+ {
+ integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==,
+ }
+ engines: { node: '>= 0.4' }
+
+ to-regex-range@5.0.1:
+ resolution:
+ {
+ integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==,
+ }
+ engines: { node: '>=8.0' }
+
+ tr46@4.1.1:
+ resolution:
+ {
+ integrity: sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==,
+ }
+ engines: { node: '>=14' }
+
+ tslib@2.8.1:
+ resolution:
+ {
+ integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==,
+ }
+
+ typescript@5.7.2:
+ resolution:
+ {
+ integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==,
+ }
+ engines: { node: '>=14.17' }
+ hasBin: true
+
+ ufo@1.5.4:
+ resolution:
+ {
+ integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==,
+ }
+
+ uglify-js@3.19.3:
+ resolution:
+ {
+ integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==,
+ }
+ engines: { node: '>=0.8.0' }
+ hasBin: true
+
+ undici-types@6.20.0:
+ resolution:
+ {
+ integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==,
+ }
+
+ undici@5.28.4:
+ resolution:
+ {
+ integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==,
+ }
+ engines: { node: '>=14.0' }
+
+ unenv-nightly@2.0.0-20241204-140205-a5d5190:
+ resolution:
+ {
+ integrity: sha512-jpmAytLeiiW01pl5bhVn9wYJ4vtiLdhGe10oXlJBuQEX8mxjxO8BlEXGHU4vr4yEikjFP1wsomTHt/CLU8kUwg==,
+ }
+
+ web-streams-polyfill@3.3.3:
+ resolution:
+ {
+ integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==,
+ }
+ engines: { node: '>= 8' }
+
+ webidl-conversions@7.0.0:
+ resolution:
+ {
+ integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==,
+ }
+ engines: { node: '>=12' }
+
+ whatwg-url@13.0.0:
+ resolution:
+ {
+ integrity: sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==,
+ }
+ engines: { node: '>=16' }
+
+ which@2.0.2:
+ resolution:
+ {
+ integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==,
+ }
+ engines: { node: '>= 8' }
+ hasBin: true
+
+ wordwrap@1.0.0:
+ resolution:
+ {
+ integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==,
+ }
+
+ workerd@1.20241205.0:
+ resolution:
+ {
+ integrity: sha512-vso/2n0c5SdBDWiD+Sx5gM7unA6SiZXRVUHDqH1euoP/9mFVHZF8icoYsNLB87b/TX8zNgpae+I5N/xFpd9v0g==,
+ }
+ engines: { node: '>=16' }
+ hasBin: true
+
+ wrangler@3.96.0:
+ resolution:
+ {
+ integrity: sha512-KjbHTUnwTa5eKl3hzv2h6nHBfAsbUkdurL7f6Y288/Bdn6tcEis13jLVR/nw/eWa3tNCBG1xOMZJboUyzWcC1g==,
+ }
+ engines: { node: '>=16.17.0' }
+ hasBin: true
+ peerDependencies:
+ '@cloudflare/workers-types': ^4.20241205.0
+ peerDependenciesMeta:
+ '@cloudflare/workers-types':
+ optional: true
+
+ wrap-ansi@9.0.0:
+ resolution:
+ {
+ integrity: sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==,
+ }
+ engines: { node: '>=18' }
+
+ ws@8.18.0:
+ resolution:
+ {
+ integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==,
+ }
+ engines: { node: '>=10.0.0' }
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: '>=5.0.2'
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
+
+ xtend@4.0.2:
+ resolution:
+ {
+ integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==,
+ }
+ engines: { node: '>=0.4' }
+
+ xxhash-wasm@1.1.0:
+ resolution:
+ {
+ integrity: sha512-147y/6YNh+tlp6nd/2pWq38i9h6mz/EuQ6njIrmW8D1BS5nCqs0P6DG+m6zTGnNz5I+uhZ0SHxBs9BsPrwcKDA==,
+ }
+
+ yaml@2.6.1:
+ resolution:
+ {
+ integrity: sha512-7r0XPzioN/Q9kXBro/XPnA6kznR73DHq+GXh5ON7ZozRO6aMjbmiBuKste2wslTFkC5d1dw0GooOCepZXJ2SAg==,
+ }
+ engines: { node: '>= 14' }
+ hasBin: true
+
+ youch@3.3.4:
+ resolution:
+ {
+ integrity: sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg==,
+ }
+
+ zod@3.24.1:
+ resolution:
+ {
+ integrity: sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==,
+ }
- '@cloudflare/kv-asset-handler@0.3.4':
- resolution: {integrity: sha512-YLPHc8yASwjNkmcDMQMY35yiWjoKAKnhUbPRszBRS0YgH+IXtsMp61j+yTcnCE3oO2DgP0U3iejLC8FTtKDC8Q==}
- engines: {node: '>=16.13'}
-
- '@cloudflare/workerd-darwin-64@1.20241205.0':
- resolution: {integrity: sha512-TArEZkSZkHJyEwnlWWkSpCI99cF6lJ14OVeEoI9Um/+cD9CKZLM9vCmsLeKglKheJ0KcdCnkA+DbeD15t3VaWg==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [darwin]
-
- '@cloudflare/workerd-darwin-arm64@1.20241205.0':
- resolution: {integrity: sha512-u5eqKa9QRdA8MugfgCoD+ADDjY6EpKbv3hSYJETmmUh17l7WXjWBzv4pUvOKIX67C0UzMUy4jZYwC53MymhX3w==}
- engines: {node: '>=16'}
- cpu: [arm64]
- os: [darwin]
-
- '@cloudflare/workerd-linux-64@1.20241205.0':
- resolution: {integrity: sha512-OYA7S5zpumMamWEW+IhhBU6YojIEocyE5X/YFPiTOCrDE3dsfr9t6oqNE7hxGm1VAAu+Irtl+a/5LwmBOU681w==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [linux]
-
- '@cloudflare/workerd-linux-arm64@1.20241205.0':
- resolution: {integrity: sha512-qAzecONjFJGIAVJZKExQ5dlbic0f3d4A+GdKa+H6SoUJtPaWiE3K6WuePo4JOT7W3/Zfh25McmX+MmpMUUcM5Q==}
- engines: {node: '>=16'}
- cpu: [arm64]
- os: [linux]
-
- '@cloudflare/workerd-windows-64@1.20241205.0':
- resolution: {integrity: sha512-BEab+HiUgCdl6GXAT7EI2yaRtDPiRJlB94XLvRvXi1ZcmQqsrq6awGo6apctFo4WUL29V7c09LxmN4HQ3X2Tvg==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [win32]
-
- '@cloudflare/workers-types@4.20241216.0':
- resolution: {integrity: sha512-PGIINXS+aE9vD2GYyWXfRG+VyxxceRkGDCoPxqwUweh1Bfv75HVotyL/adJ7mRVwh3XZDifGBdTaLReTT+Fcog==}
-
- '@cspotcode/source-map-support@0.8.1':
- resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
- engines: {node: '>=12'}
-
- '@esbuild-plugins/node-globals-polyfill@0.2.3':
- resolution: {integrity: sha512-r3MIryXDeXDOZh7ih1l/yE9ZLORCd5e8vWg02azWRGj5SPTuoh69A2AIyn0Z31V/kHBfZ4HgWJ+OK3GTTwLmnw==}
- peerDependencies:
- esbuild: '*'
-
- '@esbuild-plugins/node-modules-polyfill@0.2.2':
- resolution: {integrity: sha512-LXV7QsWJxRuMYvKbiznh+U1ilIop3g2TeKRzUxOG5X3YITc8JyyTa90BmLwqqv0YnX4v32CSlG+vsziZp9dMvA==}
- peerDependencies:
- esbuild: '*'
-
- '@esbuild/android-arm64@0.17.19':
- resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==}
- engines: {node: '>=12'}
- cpu: [arm64]
- os: [android]
-
- '@esbuild/android-arm@0.17.19':
- resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==}
- engines: {node: '>=12'}
- cpu: [arm]
- os: [android]
-
- '@esbuild/android-x64@0.17.19':
- resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [android]
-
- '@esbuild/darwin-arm64@0.17.19':
- resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==}
- engines: {node: '>=12'}
- cpu: [arm64]
- os: [darwin]
-
- '@esbuild/darwin-x64@0.17.19':
- resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [darwin]
-
- '@esbuild/freebsd-arm64@0.17.19':
- resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==}
- engines: {node: '>=12'}
- cpu: [arm64]
- os: [freebsd]
-
- '@esbuild/freebsd-x64@0.17.19':
- resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [freebsd]
-
- '@esbuild/linux-arm64@0.17.19':
- resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==}
- engines: {node: '>=12'}
- cpu: [arm64]
- os: [linux]
-
- '@esbuild/linux-arm@0.17.19':
- resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==}
- engines: {node: '>=12'}
- cpu: [arm]
- os: [linux]
-
- '@esbuild/linux-ia32@0.17.19':
- resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==}
- engines: {node: '>=12'}
- cpu: [ia32]
- os: [linux]
-
- '@esbuild/linux-loong64@0.17.19':
- resolution: {integrity: sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==}
- engines: {node: '>=12'}
- cpu: [loong64]
- os: [linux]
-
- '@esbuild/linux-mips64el@0.17.19':
- resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==}
- engines: {node: '>=12'}
- cpu: [mips64el]
- os: [linux]
-
- '@esbuild/linux-ppc64@0.17.19':
- resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==}
- engines: {node: '>=12'}
- cpu: [ppc64]
- os: [linux]
-
- '@esbuild/linux-riscv64@0.17.19':
- resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==}
- engines: {node: '>=12'}
- cpu: [riscv64]
- os: [linux]
-
- '@esbuild/linux-s390x@0.17.19':
- resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==}
- engines: {node: '>=12'}
- cpu: [s390x]
- os: [linux]
-
- '@esbuild/linux-x64@0.17.19':
- resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [linux]
-
- '@esbuild/netbsd-x64@0.17.19':
- resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [netbsd]
-
- '@esbuild/openbsd-x64@0.17.19':
- resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [openbsd]
-
- '@esbuild/sunos-x64@0.17.19':
- resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [sunos]
-
- '@esbuild/win32-arm64@0.17.19':
- resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==}
- engines: {node: '>=12'}
- cpu: [arm64]
- os: [win32]
-
- '@esbuild/win32-ia32@0.17.19':
- resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==}
- engines: {node: '>=12'}
- cpu: [ia32]
- os: [win32]
-
- '@esbuild/win32-x64@0.17.19':
- resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==}
- engines: {node: '>=12'}
- cpu: [x64]
- os: [win32]
-
- '@fastify/busboy@2.1.1':
- resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==}
- engines: {node: '>=14'}
-
- '@jridgewell/resolve-uri@3.1.2':
- resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
- engines: {node: '>=6.0.0'}
-
- '@jridgewell/sourcemap-codec@1.5.0':
- resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
-
- '@jridgewell/trace-mapping@0.3.9':
- resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
-
- '@libsql/client@0.14.0':
- resolution: {integrity: sha512-/9HEKfn6fwXB5aTEEoMeFh4CtG0ZzbncBb1e++OCdVpgKZ/xyMsIVYXm0w7Pv4RUel803vE6LwniB3PqD72R0Q==}
-
- '@libsql/core@0.14.0':
- resolution: {integrity: sha512-nhbuXf7GP3PSZgdCY2Ecj8vz187ptHlZQ0VRc751oB2C1W8jQUXKKklvt7t1LJiUTQBVJuadF628eUk+3cRi4Q==}
-
- '@libsql/darwin-arm64@0.4.7':
- resolution: {integrity: sha512-yOL742IfWUlUevnI5PdnIT4fryY3LYTdLm56bnY0wXBw7dhFcnjuA7jrH3oSVz2mjZTHujxoITgAE7V6Z+eAbg==}
- cpu: [arm64]
- os: [darwin]
-
- '@libsql/darwin-x64@0.4.7':
- resolution: {integrity: sha512-ezc7V75+eoyyH07BO9tIyJdqXXcRfZMbKcLCeF8+qWK5nP8wWuMcfOVywecsXGRbT99zc5eNra4NEx6z5PkSsA==}
- cpu: [x64]
- os: [darwin]
-
- '@libsql/hrana-client@0.7.0':
- resolution: {integrity: sha512-OF8fFQSkbL7vJY9rfuegK1R7sPgQ6kFMkDamiEccNUvieQ+3urzfDFI616oPl8V7T9zRmnTkSjMOImYCAVRVuw==}
-
- '@libsql/isomorphic-fetch@0.3.1':
- resolution: {integrity: sha512-6kK3SUK5Uu56zPq/Las620n5aS9xJq+jMBcNSOmjhNf/MUvdyji4vrMTqD7ptY7/4/CAVEAYDeotUz60LNQHtw==}
- engines: {node: '>=18.0.0'}
-
- '@libsql/isomorphic-ws@0.1.5':
- resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==}
-
- '@libsql/linux-arm64-gnu@0.4.7':
- resolution: {integrity: sha512-WlX2VYB5diM4kFfNaYcyhw5y+UJAI3xcMkEUJZPtRDEIu85SsSFrQ+gvoKfcVh76B//ztSeEX2wl9yrjF7BBCA==}
- cpu: [arm64]
- os: [linux]
-
- '@libsql/linux-arm64-musl@0.4.7':
- resolution: {integrity: sha512-6kK9xAArVRlTCpWeqnNMCoXW1pe7WITI378n4NpvU5EJ0Ok3aNTIC2nRPRjhro90QcnmLL1jPcrVwO4WD1U0xw==}
- cpu: [arm64]
- os: [linux]
-
- '@libsql/linux-x64-gnu@0.4.7':
- resolution: {integrity: sha512-CMnNRCmlWQqqzlTw6NeaZXzLWI8bydaXDke63JTUCvu8R+fj/ENsLrVBtPDlxQ0wGsYdXGlrUCH8Qi9gJep0yQ==}
- cpu: [x64]
- os: [linux]
-
- '@libsql/linux-x64-musl@0.4.7':
- resolution: {integrity: sha512-nI6tpS1t6WzGAt1Kx1n1HsvtBbZ+jHn0m7ogNNT6pQHZQj7AFFTIMeDQw/i/Nt5H38np1GVRNsFe99eSIMs9XA==}
- cpu: [x64]
- os: [linux]
-
- '@libsql/win32-x64-msvc@0.4.7':
- resolution: {integrity: sha512-7pJzOWzPm6oJUxml+PCDRzYQ4A1hTMHAciTAHfFK4fkbDZX33nWPVG7Y3vqdKtslcwAzwmrNDc6sXy2nwWnbiw==}
- cpu: [x64]
- os: [win32]
-
- '@mongodb-js/saslprep@1.1.9':
- resolution: {integrity: sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==}
-
- '@neon-rs/load@0.0.4':
- resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==}
-
- '@outerbase/sdk@2.0.0-rc.3':
- resolution: {integrity: sha512-bmV4hlzs5sz01IDWNHdJC2ZD4ezM4UEwG1fEQi59yByHRtPOVDjK7Z5iQ8e1MbR0814vdhv9hMcUKP8SJDA7vQ==}
- hasBin: true
-
- '@types/node-forge@1.3.11':
- resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==}
-
- '@types/node@22.10.2':
- resolution: {integrity: sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==}
-
- '@types/pg@8.11.10':
- resolution: {integrity: sha512-LczQUW4dbOQzsH2RQ5qoeJ6qJPdrcM/DcMLoqWQkMLMsq83J5lAX3LXjdkWdpscFy67JSOWDnh7Ny/sPFykmkg==}
-
- '@types/webidl-conversions@7.0.3':
- resolution: {integrity: sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==}
-
- '@types/whatwg-url@11.0.5':
- resolution: {integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==}
-
- '@types/ws@8.5.13':
- resolution: {integrity: sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==}
-
- acorn-walk@8.3.4:
- resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==}
- engines: {node: '>=0.4.0'}
-
- acorn@8.14.0:
- resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==}
- engines: {node: '>=0.4.0'}
- hasBin: true
-
- as-table@1.0.55:
- resolution: {integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==}
-
- aws-ssl-profiles@1.1.2:
- resolution: {integrity: sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==}
- engines: {node: '>= 6.0.0'}
-
- big-integer@1.6.52:
- resolution: {integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==}
- engines: {node: '>=0.6'}
-
- blake3-wasm@2.1.5:
- resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==}
-
- bson@6.10.1:
- resolution: {integrity: sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==}
- engines: {node: '>=16.20.1'}
-
- capnp-ts@0.7.0:
- resolution: {integrity: sha512-XKxXAC3HVPv7r674zP0VC3RTXz+/JKhfyw94ljvF80yynK6VkTnqE3jMuN8b3dUVmmc43TjyxjW4KTsmB3c86g==}
-
- chokidar@4.0.2:
- resolution: {integrity: sha512-/b57FK+bblSU+dfewfFe0rT1YjVDfOmeLQwCAuC+vwvgLkXboATqqmy+Ipux6JrF6L5joe5CBnFOw+gLWH6yKg==}
- engines: {node: '>= 14.16.0'}
-
- cookie@0.7.2:
- resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==}
- engines: {node: '>= 0.6'}
-
- data-uri-to-buffer@2.0.2:
- resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==}
-
- data-uri-to-buffer@4.0.1:
- resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==}
- engines: {node: '>= 12'}
-
- date-fns@4.1.0:
- resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==}
+snapshots:
+ '@cloudflare/kv-asset-handler@0.3.4':
+ dependencies:
+ mime: 3.0.0
- debug@4.4.0:
- resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==}
- engines: {node: '>=6.0'}
- peerDependencies:
- supports-color: '*'
- peerDependenciesMeta:
- supports-color:
+ '@cloudflare/workerd-darwin-64@1.20241205.0':
optional: true
- defu@6.1.4:
- resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==}
-
- denque@2.1.0:
- resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==}
- engines: {node: '>=0.10'}
-
- detect-libc@2.0.2:
- resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==}
- engines: {node: '>=8'}
-
- esbuild@0.17.19:
- resolution: {integrity: sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==}
- engines: {node: '>=12'}
- hasBin: true
-
- escape-string-regexp@4.0.0:
- resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
- engines: {node: '>=10'}
-
- estree-walker@0.6.1:
- resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==}
-
- exit-hook@2.2.1:
- resolution: {integrity: sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==}
- engines: {node: '>=6'}
-
- fetch-blob@3.2.0:
- resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==}
- engines: {node: ^12.20 || >= 14.13}
-
- formdata-polyfill@4.0.10:
- resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==}
- engines: {node: '>=12.20.0'}
-
- fsevents@2.3.3:
- resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
- engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
- os: [darwin]
-
- function-bind@1.1.2:
- resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
-
- generate-function@2.3.1:
- resolution: {integrity: sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==}
-
- get-source@2.0.12:
- resolution: {integrity: sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==}
-
- glob-to-regexp@0.4.1:
- resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==}
-
- handlebars@4.7.8:
- resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==}
- engines: {node: '>=0.4.7'}
- hasBin: true
-
- hasown@2.0.2:
- resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
- engines: {node: '>= 0.4'}
-
- hono@4.6.14:
- resolution: {integrity: sha512-j4VkyUp2xazGJ8eCCLN1Vm/bxdvm/j5ZuU9AIjLu9vapn2M44p9L3Ktr9Vnb2RN2QtcR/wVjZVMlT5k7GJQgPw==}
- engines: {node: '>=16.9.0'}
-
- iconv-lite@0.6.3:
- resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
- engines: {node: '>=0.10.0'}
-
- is-core-module@2.16.0:
- resolution: {integrity: sha512-urTSINYfAYgcbLb0yDQ6egFm6h3Mo1DcF9EkyXSRjjzdHbsulg01qhwWuXdOoUBuTkbQ80KDboXa0vFJ+BDH+g==}
- engines: {node: '>= 0.4'}
-
- is-property@1.0.2:
- resolution: {integrity: sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==}
-
- itty-time@1.0.6:
- resolution: {integrity: sha512-+P8IZaLLBtFv8hCkIjcymZOp4UJ+xW6bSlQsXGqrkmJh7vSiMFSlNne0mCYagEE0N7HDNR5jJBRxwN0oYv61Rw==}
-
- jose@5.9.6:
- resolution: {integrity: sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==}
-
- js-base64@3.7.7:
- resolution: {integrity: sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==}
-
- libsql@0.4.7:
- resolution: {integrity: sha512-T9eIRCs6b0J1SHKYIvD8+KCJMcWZ900iZyxdnSCdqxN12Z1ijzT+jY5nrk72Jw4B0HGzms2NgpryArlJqvc3Lw==}
- cpu: [x64, arm64, wasm32]
- os: [darwin, linux, win32]
-
- long@5.2.3:
- resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==}
-
- lru-cache@7.18.3:
- resolution: {integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==}
- engines: {node: '>=12'}
-
- lru.min@1.1.1:
- resolution: {integrity: sha512-FbAj6lXil6t8z4z3j0E5mfRlPzxkySotzUHwRXjlpRh10vc6AI6WN62ehZj82VG7M20rqogJ0GLwar2Xa05a8Q==}
- engines: {bun: '>=1.0.0', deno: '>=1.30.0', node: '>=8.0.0'}
-
- magic-string@0.25.9:
- resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
-
- memory-pager@1.5.0:
- resolution: {integrity: sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==}
-
- mime@3.0.0:
- resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==}
- engines: {node: '>=10.0.0'}
- hasBin: true
-
- miniflare@3.20241205.0:
- resolution: {integrity: sha512-Z0cTtIf6ZrcAJ3SrOI9EUM3s4dkGhNeU6Ubl8sroYhsPVD+rtz3m5+p6McHFWCkcMff1o60X5XEKVTmkz0gbpA==}
- engines: {node: '>=16.13'}
- hasBin: true
-
- minimist@1.2.8:
- resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
-
- mongodb-connection-string-url@3.0.1:
- resolution: {integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==}
-
- mongodb@6.12.0:
- resolution: {integrity: sha512-RM7AHlvYfS7jv7+BXund/kR64DryVI+cHbVAy9P61fnb1RcWZqOW1/Wj2YhqMCx+MuYhqTRGv7AwHBzmsCKBfA==}
- engines: {node: '>=16.20.1'}
- peerDependencies:
- '@aws-sdk/credential-providers': ^3.188.0
- '@mongodb-js/zstd': ^1.1.0 || ^2.0.0
- gcp-metadata: ^5.2.0
- kerberos: ^2.0.1
- mongodb-client-encryption: '>=6.0.0 <7'
- snappy: ^7.2.2
- socks: ^2.7.1
- peerDependenciesMeta:
- '@aws-sdk/credential-providers':
- optional: true
- '@mongodb-js/zstd':
- optional: true
- gcp-metadata:
- optional: true
- kerberos:
- optional: true
- mongodb-client-encryption:
- optional: true
- snappy:
- optional: true
- socks:
+ '@cloudflare/workerd-darwin-arm64@1.20241205.0':
optional: true
- ms@2.1.3:
- resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
-
- mustache@4.2.0:
- resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==}
- hasBin: true
-
- mysql2@3.11.5:
- resolution: {integrity: sha512-0XFu8rUmFN9vC0ME36iBvCUObftiMHItrYFhlCRvFWbLgpNqtC4Br/NmZX1HNCszxT0GGy5QtP+k3Q3eCJPaYA==}
- engines: {node: '>= 8.0'}
-
- named-placeholders@1.1.3:
- resolution: {integrity: sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==}
- engines: {node: '>=12.0.0'}
-
- nanoid@3.3.8:
- resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==}
- engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
- hasBin: true
-
- neo-async@2.6.2:
- resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
-
- node-domexception@1.0.0:
- resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
- engines: {node: '>=10.5.0'}
-
- node-fetch@3.3.2:
- resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==}
- engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
-
- node-forge@1.3.1:
- resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==}
- engines: {node: '>= 6.13.0'}
-
- node-sql-parser@4.18.0:
- resolution: {integrity: sha512-2YEOR5qlI1zUFbGMLKNfsrR5JUvFg9LxIRVE+xJe962pfVLH0rnItqLzv96XVs1Y1UIR8FxsXAuvX/lYAWZ2BQ==}
- engines: {node: '>=8'}
-
- obuf@1.1.2:
- resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==}
-
- ohash@1.1.4:
- resolution: {integrity: sha512-FlDryZAahJmEF3VR3w1KogSEdWX3WhA5GPakFx4J81kEAiHyLMpdLLElS8n8dfNadMgAne/MywcvmogzscVt4g==}
-
- path-parse@1.0.7:
- resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
-
- path-to-regexp@6.3.0:
- resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==}
-
- pathe@1.1.2:
- resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==}
-
- pg-cloudflare@1.1.1:
- resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==}
-
- pg-connection-string@2.7.0:
- resolution: {integrity: sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==}
-
- pg-int8@1.0.1:
- resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==}
- engines: {node: '>=4.0.0'}
-
- pg-numeric@1.0.2:
- resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==}
- engines: {node: '>=4'}
-
- pg-pool@3.7.0:
- resolution: {integrity: sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==}
- peerDependencies:
- pg: '>=8.0'
-
- pg-protocol@1.7.0:
- resolution: {integrity: sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==}
-
- pg-types@2.2.0:
- resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==}
- engines: {node: '>=4'}
-
- pg-types@4.0.2:
- resolution: {integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==}
- engines: {node: '>=10'}
-
- pg@8.13.1:
- resolution: {integrity: sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==}
- engines: {node: '>= 8.0.0'}
- peerDependencies:
- pg-native: '>=3.0.1'
- peerDependenciesMeta:
- pg-native:
+ '@cloudflare/workerd-linux-64@1.20241205.0':
optional: true
- pgpass@1.0.5:
- resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==}
-
- postgres-array@2.0.0:
- resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==}
- engines: {node: '>=4'}
-
- postgres-array@3.0.2:
- resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==}
- engines: {node: '>=12'}
-
- postgres-bytea@1.0.0:
- resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==}
- engines: {node: '>=0.10.0'}
-
- postgres-bytea@3.0.0:
- resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==}
- engines: {node: '>= 6'}
-
- postgres-date@1.0.7:
- resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==}
- engines: {node: '>=0.10.0'}
-
- postgres-date@2.1.0:
- resolution: {integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==}
- engines: {node: '>=12'}
-
- postgres-interval@1.2.0:
- resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
- engines: {node: '>=0.10.0'}
-
- postgres-interval@3.0.0:
- resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==}
- engines: {node: '>=12'}
+ '@cloudflare/workerd-linux-arm64@1.20241205.0':
+ optional: true
- postgres-range@1.1.4:
- resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==}
+ '@cloudflare/workerd-windows-64@1.20241205.0':
+ optional: true
- printable-characters@1.0.42:
- resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==}
+ '@cloudflare/workers-types@4.20241216.0': {}
- promise-limit@2.7.0:
- resolution: {integrity: sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==}
+ '@cspotcode/source-map-support@0.8.1':
+ dependencies:
+ '@jridgewell/trace-mapping': 0.3.9
- punycode@2.3.1:
- resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
- engines: {node: '>=6'}
+ '@esbuild-plugins/node-globals-polyfill@0.2.3(esbuild@0.17.19)':
+ dependencies:
+ esbuild: 0.17.19
- readdirp@4.0.2:
- resolution: {integrity: sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==}
- engines: {node: '>= 14.16.0'}
+ '@esbuild-plugins/node-modules-polyfill@0.2.2(esbuild@0.17.19)':
+ dependencies:
+ esbuild: 0.17.19
+ escape-string-regexp: 4.0.0
+ rollup-plugin-node-polyfills: 0.2.1
- resolve@1.22.9:
- resolution: {integrity: sha512-QxrmX1DzraFIi9PxdG5VkRfRwIgjwyud+z/iBwfRRrVmHc+P9Q7u2lSSpQ6bjr2gy5lrqIiU9vb6iAeGf2400A==}
- hasBin: true
+ '@esbuild/android-arm64@0.17.19':
+ optional: true
- rollup-plugin-inject@3.0.2:
- resolution: {integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==}
- deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject.
+ '@esbuild/android-arm@0.17.19':
+ optional: true
- rollup-plugin-node-polyfills@0.2.1:
- resolution: {integrity: sha512-4kCrKPTJ6sK4/gLL/U5QzVT8cxJcofO0OU74tnB19F40cmuAKSzH5/siithxlofFEjwvw1YAhPmbvGNA6jEroA==}
+ '@esbuild/android-x64@0.17.19':
+ optional: true
- rollup-pluginutils@2.8.2:
- resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==}
+ '@esbuild/darwin-arm64@0.17.19':
+ optional: true
- safer-buffer@2.1.2:
- resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
+ '@esbuild/darwin-x64@0.17.19':
+ optional: true
- selfsigned@2.4.1:
- resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==}
- engines: {node: '>=10'}
+ '@esbuild/freebsd-arm64@0.17.19':
+ optional: true
- seq-queue@0.0.5:
- resolution: {integrity: sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==}
+ '@esbuild/freebsd-x64@0.17.19':
+ optional: true
- source-map@0.6.1:
- resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
- engines: {node: '>=0.10.0'}
+ '@esbuild/linux-arm64@0.17.19':
+ optional: true
- sourcemap-codec@1.4.8:
- resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
- deprecated: Please use @jridgewell/sourcemap-codec instead
+ '@esbuild/linux-arm@0.17.19':
+ optional: true
- sparse-bitfield@3.0.3:
- resolution: {integrity: sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==}
+ '@esbuild/linux-ia32@0.17.19':
+ optional: true
- split2@4.2.0:
- resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==}
- engines: {node: '>= 10.x'}
+ '@esbuild/linux-loong64@0.17.19':
+ optional: true
- sqlstring@2.3.3:
- resolution: {integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==}
- engines: {node: '>= 0.6'}
+ '@esbuild/linux-mips64el@0.17.19':
+ optional: true
- stacktracey@2.1.8:
- resolution: {integrity: sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==}
+ '@esbuild/linux-ppc64@0.17.19':
+ optional: true
- stoppable@1.1.0:
- resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==}
- engines: {node: '>=4', npm: '>=6'}
+ '@esbuild/linux-riscv64@0.17.19':
+ optional: true
- supports-preserve-symlinks-flag@1.0.0:
- resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
- engines: {node: '>= 0.4'}
+ '@esbuild/linux-s390x@0.17.19':
+ optional: true
- tr46@4.1.1:
- resolution: {integrity: sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==}
- engines: {node: '>=14'}
+ '@esbuild/linux-x64@0.17.19':
+ optional: true
- tslib@2.8.1:
- resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
+ '@esbuild/netbsd-x64@0.17.19':
+ optional: true
- typescript@5.7.2:
- resolution: {integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==}
- engines: {node: '>=14.17'}
- hasBin: true
+ '@esbuild/openbsd-x64@0.17.19':
+ optional: true
- ufo@1.5.4:
- resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==}
+ '@esbuild/sunos-x64@0.17.19':
+ optional: true
- uglify-js@3.19.3:
- resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==}
- engines: {node: '>=0.8.0'}
- hasBin: true
+ '@esbuild/win32-arm64@0.17.19':
+ optional: true
- undici-types@6.20.0:
- resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==}
+ '@esbuild/win32-ia32@0.17.19':
+ optional: true
- undici@5.28.4:
- resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==}
- engines: {node: '>=14.0'}
+ '@esbuild/win32-x64@0.17.19':
+ optional: true
- unenv-nightly@2.0.0-20241204-140205-a5d5190:
- resolution: {integrity: sha512-jpmAytLeiiW01pl5bhVn9wYJ4vtiLdhGe10oXlJBuQEX8mxjxO8BlEXGHU4vr4yEikjFP1wsomTHt/CLU8kUwg==}
+ '@fastify/busboy@2.1.1': {}
- web-streams-polyfill@3.3.3:
- resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==}
- engines: {node: '>= 8'}
+ '@jridgewell/resolve-uri@3.1.2': {}
- webidl-conversions@7.0.0:
- resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==}
- engines: {node: '>=12'}
+ '@jridgewell/sourcemap-codec@1.5.0': {}
- whatwg-url@13.0.0:
- resolution: {integrity: sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==}
- engines: {node: '>=16'}
+ '@jridgewell/trace-mapping@0.3.9':
+ dependencies:
+ '@jridgewell/resolve-uri': 3.1.2
+ '@jridgewell/sourcemap-codec': 1.5.0
- wordwrap@1.0.0:
- resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==}
+ '@libsql/client@0.14.0':
+ dependencies:
+ '@libsql/core': 0.14.0
+ '@libsql/hrana-client': 0.7.0
+ js-base64: 3.7.7
+ libsql: 0.4.7
+ promise-limit: 2.7.0
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
- workerd@1.20241205.0:
- resolution: {integrity: sha512-vso/2n0c5SdBDWiD+Sx5gM7unA6SiZXRVUHDqH1euoP/9mFVHZF8icoYsNLB87b/TX8zNgpae+I5N/xFpd9v0g==}
- engines: {node: '>=16'}
- hasBin: true
+ '@libsql/core@0.14.0':
+ dependencies:
+ js-base64: 3.7.7
- wrangler@3.96.0:
- resolution: {integrity: sha512-KjbHTUnwTa5eKl3hzv2h6nHBfAsbUkdurL7f6Y288/Bdn6tcEis13jLVR/nw/eWa3tNCBG1xOMZJboUyzWcC1g==}
- engines: {node: '>=16.17.0'}
- hasBin: true
- peerDependencies:
- '@cloudflare/workers-types': ^4.20241205.0
- peerDependenciesMeta:
- '@cloudflare/workers-types':
+ '@libsql/darwin-arm64@0.4.7':
optional: true
- ws@8.18.0:
- resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==}
- engines: {node: '>=10.0.0'}
- peerDependencies:
- bufferutil: ^4.0.1
- utf-8-validate: '>=5.0.2'
- peerDependenciesMeta:
- bufferutil:
- optional: true
- utf-8-validate:
+ '@libsql/darwin-x64@0.4.7':
optional: true
- xtend@4.0.2:
- resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==}
- engines: {node: '>=0.4'}
-
- xxhash-wasm@1.1.0:
- resolution: {integrity: sha512-147y/6YNh+tlp6nd/2pWq38i9h6mz/EuQ6njIrmW8D1BS5nCqs0P6DG+m6zTGnNz5I+uhZ0SHxBs9BsPrwcKDA==}
-
- youch@3.3.4:
- resolution: {integrity: sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg==}
-
- zod@3.24.1:
- resolution: {integrity: sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A==}
-
-snapshots:
-
- '@cloudflare/kv-asset-handler@0.3.4':
- dependencies:
- mime: 3.0.0
-
- '@cloudflare/workerd-darwin-64@1.20241205.0':
- optional: true
-
- '@cloudflare/workerd-darwin-arm64@1.20241205.0':
- optional: true
+ '@libsql/hrana-client@0.7.0':
+ dependencies:
+ '@libsql/isomorphic-fetch': 0.3.1
+ '@libsql/isomorphic-ws': 0.1.5
+ js-base64: 3.7.7
+ node-fetch: 3.3.2
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
+
+ '@libsql/isomorphic-fetch@0.3.1': {}
+
+ '@libsql/isomorphic-ws@0.1.5':
+ dependencies:
+ '@types/ws': 8.5.13
+ ws: 8.18.0
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
+
+ '@libsql/linux-arm64-gnu@0.4.7':
+ optional: true
- '@cloudflare/workerd-linux-64@1.20241205.0':
- optional: true
+ '@libsql/linux-arm64-musl@0.4.7':
+ optional: true
- '@cloudflare/workerd-linux-arm64@1.20241205.0':
- optional: true
+ '@libsql/linux-x64-gnu@0.4.7':
+ optional: true
- '@cloudflare/workerd-windows-64@1.20241205.0':
- optional: true
+ '@libsql/linux-x64-musl@0.4.7':
+ optional: true
- '@cloudflare/workers-types@4.20241216.0': {}
+ '@libsql/win32-x64-msvc@0.4.7':
+ optional: true
- '@cspotcode/source-map-support@0.8.1':
- dependencies:
- '@jridgewell/trace-mapping': 0.3.9
+ '@mongodb-js/saslprep@1.1.9':
+ dependencies:
+ sparse-bitfield: 3.0.3
- '@esbuild-plugins/node-globals-polyfill@0.2.3(esbuild@0.17.19)':
- dependencies:
- esbuild: 0.17.19
+ '@neon-rs/load@0.0.4': {}
- '@esbuild-plugins/node-modules-polyfill@0.2.2(esbuild@0.17.19)':
- dependencies:
- esbuild: 0.17.19
- escape-string-regexp: 4.0.0
- rollup-plugin-node-polyfills: 0.2.1
+ '@outerbase/sdk@2.0.0-rc.3':
+ dependencies:
+ handlebars: 4.7.8
- '@esbuild/android-arm64@0.17.19':
- optional: true
+ '@types/node-forge@1.3.11':
+ dependencies:
+ '@types/node': 22.10.2
- '@esbuild/android-arm@0.17.19':
- optional: true
+ '@types/node@22.10.2':
+ dependencies:
+ undici-types: 6.20.0
- '@esbuild/android-x64@0.17.19':
- optional: true
+ '@types/pg@8.11.10':
+ dependencies:
+ '@types/node': 22.10.2
+ pg-protocol: 1.7.0
+ pg-types: 4.0.2
- '@esbuild/darwin-arm64@0.17.19':
- optional: true
+ '@types/webidl-conversions@7.0.3': {}
- '@esbuild/darwin-x64@0.17.19':
- optional: true
+ '@types/whatwg-url@11.0.5':
+ dependencies:
+ '@types/webidl-conversions': 7.0.3
- '@esbuild/freebsd-arm64@0.17.19':
- optional: true
+ '@types/ws@8.5.13':
+ dependencies:
+ '@types/node': 22.10.2
- '@esbuild/freebsd-x64@0.17.19':
- optional: true
+ acorn-walk@8.3.4:
+ dependencies:
+ acorn: 8.14.0
- '@esbuild/linux-arm64@0.17.19':
- optional: true
+ acorn@8.14.0: {}
- '@esbuild/linux-arm@0.17.19':
- optional: true
+ ansi-escapes@7.0.0:
+ dependencies:
+ environment: 1.1.0
- '@esbuild/linux-ia32@0.17.19':
- optional: true
+ ansi-regex@6.1.0: {}
- '@esbuild/linux-loong64@0.17.19':
- optional: true
+ ansi-styles@6.2.1: {}
- '@esbuild/linux-mips64el@0.17.19':
- optional: true
+ as-table@1.0.55:
+ dependencies:
+ printable-characters: 1.0.42
- '@esbuild/linux-ppc64@0.17.19':
- optional: true
+ aws-ssl-profiles@1.1.2: {}
- '@esbuild/linux-riscv64@0.17.19':
- optional: true
+ big-integer@1.6.52: {}
- '@esbuild/linux-s390x@0.17.19':
- optional: true
+ blake3-wasm@2.1.5: {}
- '@esbuild/linux-x64@0.17.19':
- optional: true
+ braces@3.0.3:
+ dependencies:
+ fill-range: 7.1.1
- '@esbuild/netbsd-x64@0.17.19':
- optional: true
+ bson@6.10.1: {}
- '@esbuild/openbsd-x64@0.17.19':
- optional: true
+ capnp-ts@0.7.0:
+ dependencies:
+ debug: 4.4.0
+ tslib: 2.8.1
+ transitivePeerDependencies:
+ - supports-color
- '@esbuild/sunos-x64@0.17.19':
- optional: true
+ chalk@5.3.0: {}
- '@esbuild/win32-arm64@0.17.19':
- optional: true
+ chokidar@4.0.2:
+ dependencies:
+ readdirp: 4.0.2
- '@esbuild/win32-ia32@0.17.19':
- optional: true
+ cli-cursor@5.0.0:
+ dependencies:
+ restore-cursor: 5.1.0
- '@esbuild/win32-x64@0.17.19':
- optional: true
+ cli-truncate@4.0.0:
+ dependencies:
+ slice-ansi: 5.0.0
+ string-width: 7.2.0
- '@fastify/busboy@2.1.1': {}
+ colorette@2.0.20: {}
- '@jridgewell/resolve-uri@3.1.2': {}
+ commander@12.1.0: {}
- '@jridgewell/sourcemap-codec@1.5.0': {}
+ cookie@0.7.2: {}
- '@jridgewell/trace-mapping@0.3.9':
- dependencies:
- '@jridgewell/resolve-uri': 3.1.2
- '@jridgewell/sourcemap-codec': 1.5.0
+ cross-spawn@7.0.6:
+ dependencies:
+ path-key: 3.1.1
+ shebang-command: 2.0.0
+ which: 2.0.2
- '@libsql/client@0.14.0':
- dependencies:
- '@libsql/core': 0.14.0
- '@libsql/hrana-client': 0.7.0
- js-base64: 3.7.7
- libsql: 0.4.7
- promise-limit: 2.7.0
- transitivePeerDependencies:
- - bufferutil
- - utf-8-validate
+ data-uri-to-buffer@2.0.2: {}
- '@libsql/core@0.14.0':
- dependencies:
- js-base64: 3.7.7
+ data-uri-to-buffer@4.0.1: {}
- '@libsql/darwin-arm64@0.4.7':
- optional: true
+ date-fns@4.1.0: {}
- '@libsql/darwin-x64@0.4.7':
- optional: true
+ debug@4.4.0:
+ dependencies:
+ ms: 2.1.3
- '@libsql/hrana-client@0.7.0':
- dependencies:
- '@libsql/isomorphic-fetch': 0.3.1
- '@libsql/isomorphic-ws': 0.1.5
- js-base64: 3.7.7
- node-fetch: 3.3.2
- transitivePeerDependencies:
- - bufferutil
- - utf-8-validate
+ defu@6.1.4: {}
- '@libsql/isomorphic-fetch@0.3.1': {}
+ denque@2.1.0: {}
- '@libsql/isomorphic-ws@0.1.5':
- dependencies:
- '@types/ws': 8.5.13
- ws: 8.18.0
- transitivePeerDependencies:
- - bufferutil
- - utf-8-validate
+ detect-libc@2.0.2: {}
- '@libsql/linux-arm64-gnu@0.4.7':
- optional: true
+ emoji-regex@10.4.0: {}
- '@libsql/linux-arm64-musl@0.4.7':
- optional: true
+ environment@1.1.0: {}
- '@libsql/linux-x64-gnu@0.4.7':
- optional: true
+ esbuild@0.17.19:
+ optionalDependencies:
+ '@esbuild/android-arm': 0.17.19
+ '@esbuild/android-arm64': 0.17.19
+ '@esbuild/android-x64': 0.17.19
+ '@esbuild/darwin-arm64': 0.17.19
+ '@esbuild/darwin-x64': 0.17.19
+ '@esbuild/freebsd-arm64': 0.17.19
+ '@esbuild/freebsd-x64': 0.17.19
+ '@esbuild/linux-arm': 0.17.19
+ '@esbuild/linux-arm64': 0.17.19
+ '@esbuild/linux-ia32': 0.17.19
+ '@esbuild/linux-loong64': 0.17.19
+ '@esbuild/linux-mips64el': 0.17.19
+ '@esbuild/linux-ppc64': 0.17.19
+ '@esbuild/linux-riscv64': 0.17.19
+ '@esbuild/linux-s390x': 0.17.19
+ '@esbuild/linux-x64': 0.17.19
+ '@esbuild/netbsd-x64': 0.17.19
+ '@esbuild/openbsd-x64': 0.17.19
+ '@esbuild/sunos-x64': 0.17.19
+ '@esbuild/win32-arm64': 0.17.19
+ '@esbuild/win32-ia32': 0.17.19
+ '@esbuild/win32-x64': 0.17.19
- '@libsql/linux-x64-musl@0.4.7':
- optional: true
+ escape-string-regexp@4.0.0: {}
- '@libsql/win32-x64-msvc@0.4.7':
- optional: true
+ estree-walker@0.6.1: {}
- '@mongodb-js/saslprep@1.1.9':
- dependencies:
- sparse-bitfield: 3.0.3
+ eventemitter3@5.0.1: {}
- '@neon-rs/load@0.0.4': {}
+ execa@8.0.1:
+ dependencies:
+ cross-spawn: 7.0.6
+ get-stream: 8.0.1
+ human-signals: 5.0.0
+ is-stream: 3.0.0
+ merge-stream: 2.0.0
+ npm-run-path: 5.3.0
+ onetime: 6.0.0
+ signal-exit: 4.1.0
+ strip-final-newline: 3.0.0
- '@outerbase/sdk@2.0.0-rc.3':
- dependencies:
- handlebars: 4.7.8
+ exit-hook@2.2.1: {}
- '@types/node-forge@1.3.11':
- dependencies:
- '@types/node': 22.10.2
+ fetch-blob@3.2.0:
+ dependencies:
+ node-domexception: 1.0.0
+ web-streams-polyfill: 3.3.3
- '@types/node@22.10.2':
- dependencies:
- undici-types: 6.20.0
+ fill-range@7.1.1:
+ dependencies:
+ to-regex-range: 5.0.1
- '@types/pg@8.11.10':
- dependencies:
- '@types/node': 22.10.2
- pg-protocol: 1.7.0
- pg-types: 4.0.2
+ formdata-polyfill@4.0.10:
+ dependencies:
+ fetch-blob: 3.2.0
- '@types/webidl-conversions@7.0.3': {}
+ fsevents@2.3.3:
+ optional: true
- '@types/whatwg-url@11.0.5':
- dependencies:
- '@types/webidl-conversions': 7.0.3
+ function-bind@1.1.2: {}
- '@types/ws@8.5.13':
- dependencies:
- '@types/node': 22.10.2
+ generate-function@2.3.1:
+ dependencies:
+ is-property: 1.0.2
- acorn-walk@8.3.4:
- dependencies:
- acorn: 8.14.0
+ get-east-asian-width@1.3.0: {}
- acorn@8.14.0: {}
+ get-source@2.0.12:
+ dependencies:
+ data-uri-to-buffer: 2.0.2
+ source-map: 0.6.1
- as-table@1.0.55:
- dependencies:
- printable-characters: 1.0.42
+ get-stream@8.0.1: {}
- aws-ssl-profiles@1.1.2: {}
+ glob-to-regexp@0.4.1: {}
- big-integer@1.6.52: {}
+ handlebars@4.7.8:
+ dependencies:
+ minimist: 1.2.8
+ neo-async: 2.6.2
+ source-map: 0.6.1
+ wordwrap: 1.0.0
+ optionalDependencies:
+ uglify-js: 3.19.3
- blake3-wasm@2.1.5: {}
+ hasown@2.0.2:
+ dependencies:
+ function-bind: 1.1.2
- bson@6.10.1: {}
+ hono@4.6.14: {}
- capnp-ts@0.7.0:
- dependencies:
- debug: 4.4.0
- tslib: 2.8.1
- transitivePeerDependencies:
- - supports-color
+ human-signals@5.0.0: {}
- chokidar@4.0.2:
- dependencies:
- readdirp: 4.0.2
+ husky@9.1.7: {}
- cookie@0.7.2: {}
+ iconv-lite@0.6.3:
+ dependencies:
+ safer-buffer: 2.1.2
- data-uri-to-buffer@2.0.2: {}
+ is-core-module@2.16.0:
+ dependencies:
+ hasown: 2.0.2
- data-uri-to-buffer@4.0.1: {}
+ is-fullwidth-code-point@4.0.0: {}
- date-fns@4.1.0: {}
+ is-fullwidth-code-point@5.0.0:
+ dependencies:
+ get-east-asian-width: 1.3.0
- debug@4.4.0:
- dependencies:
- ms: 2.1.3
+ is-number@7.0.0: {}
- defu@6.1.4: {}
+ is-property@1.0.2: {}
- denque@2.1.0: {}
+ is-stream@3.0.0: {}
- detect-libc@2.0.2: {}
+ isexe@2.0.0: {}
- esbuild@0.17.19:
- optionalDependencies:
- '@esbuild/android-arm': 0.17.19
- '@esbuild/android-arm64': 0.17.19
- '@esbuild/android-x64': 0.17.19
- '@esbuild/darwin-arm64': 0.17.19
- '@esbuild/darwin-x64': 0.17.19
- '@esbuild/freebsd-arm64': 0.17.19
- '@esbuild/freebsd-x64': 0.17.19
- '@esbuild/linux-arm': 0.17.19
- '@esbuild/linux-arm64': 0.17.19
- '@esbuild/linux-ia32': 0.17.19
- '@esbuild/linux-loong64': 0.17.19
- '@esbuild/linux-mips64el': 0.17.19
- '@esbuild/linux-ppc64': 0.17.19
- '@esbuild/linux-riscv64': 0.17.19
- '@esbuild/linux-s390x': 0.17.19
- '@esbuild/linux-x64': 0.17.19
- '@esbuild/netbsd-x64': 0.17.19
- '@esbuild/openbsd-x64': 0.17.19
- '@esbuild/sunos-x64': 0.17.19
- '@esbuild/win32-arm64': 0.17.19
- '@esbuild/win32-ia32': 0.17.19
- '@esbuild/win32-x64': 0.17.19
+ itty-time@1.0.6: {}
- escape-string-regexp@4.0.0: {}
+ jose@5.9.6: {}
- estree-walker@0.6.1: {}
+ js-base64@3.7.7: {}
- exit-hook@2.2.1: {}
+ libsql@0.4.7:
+ dependencies:
+ '@neon-rs/load': 0.0.4
+ detect-libc: 2.0.2
+ optionalDependencies:
+ '@libsql/darwin-arm64': 0.4.7
+ '@libsql/darwin-x64': 0.4.7
+ '@libsql/linux-arm64-gnu': 0.4.7
+ '@libsql/linux-arm64-musl': 0.4.7
+ '@libsql/linux-x64-gnu': 0.4.7
+ '@libsql/linux-x64-musl': 0.4.7
+ '@libsql/win32-x64-msvc': 0.4.7
- fetch-blob@3.2.0:
- dependencies:
- node-domexception: 1.0.0
- web-streams-polyfill: 3.3.3
+ lilconfig@3.1.3: {}
+
+ lint-staged@15.2.11:
+ dependencies:
+ chalk: 5.3.0
+ commander: 12.1.0
+ debug: 4.4.0
+ execa: 8.0.1
+ lilconfig: 3.1.3
+ listr2: 8.2.5
+ micromatch: 4.0.8
+ pidtree: 0.6.0
+ string-argv: 0.3.2
+ yaml: 2.6.1
+ transitivePeerDependencies:
+ - supports-color
- formdata-polyfill@4.0.10:
- dependencies:
- fetch-blob: 3.2.0
+ listr2@8.2.5:
+ dependencies:
+ cli-truncate: 4.0.0
+ colorette: 2.0.20
+ eventemitter3: 5.0.1
+ log-update: 6.1.0
+ rfdc: 1.4.1
+ wrap-ansi: 9.0.0
+
+ log-update@6.1.0:
+ dependencies:
+ ansi-escapes: 7.0.0
+ cli-cursor: 5.0.0
+ slice-ansi: 7.1.0
+ strip-ansi: 7.1.0
+ wrap-ansi: 9.0.0
+
+ long@5.2.3: {}
- fsevents@2.3.3:
- optional: true
+ lru-cache@7.18.3: {}
- function-bind@1.1.2: {}
+ lru.min@1.1.1: {}
+
+ magic-string@0.25.9:
+ dependencies:
+ sourcemap-codec: 1.4.8
+
+ memory-pager@1.5.0: {}
- generate-function@2.3.1:
- dependencies:
- is-property: 1.0.2
+ merge-stream@2.0.0: {}
- get-source@2.0.12:
- dependencies:
- data-uri-to-buffer: 2.0.2
- source-map: 0.6.1
+ micromatch@4.0.8:
+ dependencies:
+ braces: 3.0.3
+ picomatch: 2.3.1
- glob-to-regexp@0.4.1: {}
+ mime@3.0.0: {}
- handlebars@4.7.8:
- dependencies:
- minimist: 1.2.8
- neo-async: 2.6.2
- source-map: 0.6.1
- wordwrap: 1.0.0
- optionalDependencies:
- uglify-js: 3.19.3
-
- hasown@2.0.2:
- dependencies:
- function-bind: 1.1.2
-
- hono@4.6.14: {}
-
- iconv-lite@0.6.3:
- dependencies:
- safer-buffer: 2.1.2
-
- is-core-module@2.16.0:
- dependencies:
- hasown: 2.0.2
-
- is-property@1.0.2: {}
-
- itty-time@1.0.6: {}
-
- jose@5.9.6: {}
-
- js-base64@3.7.7: {}
-
- libsql@0.4.7:
- dependencies:
- '@neon-rs/load': 0.0.4
- detect-libc: 2.0.2
- optionalDependencies:
- '@libsql/darwin-arm64': 0.4.7
- '@libsql/darwin-x64': 0.4.7
- '@libsql/linux-arm64-gnu': 0.4.7
- '@libsql/linux-arm64-musl': 0.4.7
- '@libsql/linux-x64-gnu': 0.4.7
- '@libsql/linux-x64-musl': 0.4.7
- '@libsql/win32-x64-msvc': 0.4.7
-
- long@5.2.3: {}
-
- lru-cache@7.18.3: {}
+ mimic-fn@4.0.0: {}
- lru.min@1.1.1: {}
+ mimic-function@5.0.1: {}
- magic-string@0.25.9:
- dependencies:
- sourcemap-codec: 1.4.8
+ miniflare@3.20241205.0:
+ dependencies:
+ '@cspotcode/source-map-support': 0.8.1
+ acorn: 8.14.0
+ acorn-walk: 8.3.4
+ capnp-ts: 0.7.0
+ exit-hook: 2.2.1
+ glob-to-regexp: 0.4.1
+ stoppable: 1.1.0
+ undici: 5.28.4
+ workerd: 1.20241205.0
+ ws: 8.18.0
+ youch: 3.3.4
+ zod: 3.24.1
+ transitivePeerDependencies:
+ - bufferutil
+ - supports-color
+ - utf-8-validate
- memory-pager@1.5.0: {}
+ minimist@1.2.8: {}
+
+ mongodb-connection-string-url@3.0.1:
+ dependencies:
+ '@types/whatwg-url': 11.0.5
+ whatwg-url: 13.0.0
+
+ mongodb@6.12.0:
+ dependencies:
+ '@mongodb-js/saslprep': 1.1.9
+ bson: 6.10.1
+ mongodb-connection-string-url: 3.0.1
+
+ ms@2.1.3: {}
+
+ mustache@4.2.0: {}
+
+ mysql2@3.11.5:
+ dependencies:
+ aws-ssl-profiles: 1.1.2
+ denque: 2.1.0
+ generate-function: 2.3.1
+ iconv-lite: 0.6.3
+ long: 5.2.3
+ lru.min: 1.1.1
+ named-placeholders: 1.1.3
+ seq-queue: 0.0.5
+ sqlstring: 2.3.3
- mime@3.0.0: {}
+ named-placeholders@1.1.3:
+ dependencies:
+ lru-cache: 7.18.3
- miniflare@3.20241205.0:
- dependencies:
- '@cspotcode/source-map-support': 0.8.1
- acorn: 8.14.0
- acorn-walk: 8.3.4
- capnp-ts: 0.7.0
- exit-hook: 2.2.1
- glob-to-regexp: 0.4.1
- stoppable: 1.1.0
- undici: 5.28.4
- workerd: 1.20241205.0
- ws: 8.18.0
- youch: 3.3.4
- zod: 3.24.1
- transitivePeerDependencies:
- - bufferutil
- - supports-color
- - utf-8-validate
+ nanoid@3.3.8: {}
- minimist@1.2.8: {}
+ neo-async@2.6.2: {}
- mongodb-connection-string-url@3.0.1:
- dependencies:
- '@types/whatwg-url': 11.0.5
- whatwg-url: 13.0.0
+ node-domexception@1.0.0: {}
- mongodb@6.12.0:
- dependencies:
- '@mongodb-js/saslprep': 1.1.9
- bson: 6.10.1
- mongodb-connection-string-url: 3.0.1
+ node-fetch@3.3.2:
+ dependencies:
+ data-uri-to-buffer: 4.0.1
+ fetch-blob: 3.2.0
+ formdata-polyfill: 4.0.10
- ms@2.1.3: {}
+ node-forge@1.3.1: {}
- mustache@4.2.0: {}
+ node-sql-parser@4.18.0:
+ dependencies:
+ big-integer: 1.6.52
- mysql2@3.11.5:
- dependencies:
- aws-ssl-profiles: 1.1.2
- denque: 2.1.0
- generate-function: 2.3.1
- iconv-lite: 0.6.3
- long: 5.2.3
- lru.min: 1.1.1
- named-placeholders: 1.1.3
- seq-queue: 0.0.5
- sqlstring: 2.3.3
+ npm-run-path@5.3.0:
+ dependencies:
+ path-key: 4.0.0
- named-placeholders@1.1.3:
- dependencies:
- lru-cache: 7.18.3
+ obuf@1.1.2: {}
- nanoid@3.3.8: {}
+ ohash@1.1.4: {}
- neo-async@2.6.2: {}
+ onetime@6.0.0:
+ dependencies:
+ mimic-fn: 4.0.0
- node-domexception@1.0.0: {}
+ onetime@7.0.0:
+ dependencies:
+ mimic-function: 5.0.1
- node-fetch@3.3.2:
- dependencies:
- data-uri-to-buffer: 4.0.1
- fetch-blob: 3.2.0
- formdata-polyfill: 4.0.10
+ path-key@3.1.1: {}
- node-forge@1.3.1: {}
+ path-key@4.0.0: {}
- node-sql-parser@4.18.0:
- dependencies:
- big-integer: 1.6.52
+ path-parse@1.0.7: {}
- obuf@1.1.2: {}
+ path-to-regexp@6.3.0: {}
- ohash@1.1.4: {}
+ pathe@1.1.2: {}
- path-parse@1.0.7: {}
+ pg-cloudflare@1.1.1:
+ optional: true
- path-to-regexp@6.3.0: {}
+ pg-connection-string@2.7.0: {}
- pathe@1.1.2: {}
+ pg-int8@1.0.1: {}
- pg-cloudflare@1.1.1:
- optional: true
+ pg-numeric@1.0.2: {}
- pg-connection-string@2.7.0: {}
+ pg-pool@3.7.0(pg@8.13.1):
+ dependencies:
+ pg: 8.13.1
- pg-int8@1.0.1: {}
+ pg-protocol@1.7.0: {}
- pg-numeric@1.0.2: {}
+ pg-types@2.2.0:
+ dependencies:
+ pg-int8: 1.0.1
+ postgres-array: 2.0.0
+ postgres-bytea: 1.0.0
+ postgres-date: 1.0.7
+ postgres-interval: 1.2.0
- pg-pool@3.7.0(pg@8.13.1):
- dependencies:
- pg: 8.13.1
+ pg-types@4.0.2:
+ dependencies:
+ pg-int8: 1.0.1
+ pg-numeric: 1.0.2
+ postgres-array: 3.0.2
+ postgres-bytea: 3.0.0
+ postgres-date: 2.1.0
+ postgres-interval: 3.0.0
+ postgres-range: 1.1.4
- pg-protocol@1.7.0: {}
+ pg@8.13.1:
+ dependencies:
+ pg-connection-string: 2.7.0
+ pg-pool: 3.7.0(pg@8.13.1)
+ pg-protocol: 1.7.0
+ pg-types: 2.2.0
+ pgpass: 1.0.5
+ optionalDependencies:
+ pg-cloudflare: 1.1.1
- pg-types@2.2.0:
- dependencies:
- pg-int8: 1.0.1
- postgres-array: 2.0.0
- postgres-bytea: 1.0.0
- postgres-date: 1.0.7
- postgres-interval: 1.2.0
+ pgpass@1.0.5:
+ dependencies:
+ split2: 4.2.0
- pg-types@4.0.2:
- dependencies:
- pg-int8: 1.0.1
- pg-numeric: 1.0.2
- postgres-array: 3.0.2
- postgres-bytea: 3.0.0
- postgres-date: 2.1.0
- postgres-interval: 3.0.0
- postgres-range: 1.1.4
+ picomatch@2.3.1: {}
- pg@8.13.1:
- dependencies:
- pg-connection-string: 2.7.0
- pg-pool: 3.7.0(pg@8.13.1)
- pg-protocol: 1.7.0
- pg-types: 2.2.0
- pgpass: 1.0.5
- optionalDependencies:
- pg-cloudflare: 1.1.1
+ pidtree@0.6.0: {}
- pgpass@1.0.5:
- dependencies:
- split2: 4.2.0
+ postgres-array@2.0.0: {}
- postgres-array@2.0.0: {}
+ postgres-array@3.0.2: {}
- postgres-array@3.0.2: {}
+ postgres-bytea@1.0.0: {}
- postgres-bytea@1.0.0: {}
+ postgres-bytea@3.0.0:
+ dependencies:
+ obuf: 1.1.2
- postgres-bytea@3.0.0:
- dependencies:
- obuf: 1.1.2
+ postgres-date@1.0.7: {}
- postgres-date@1.0.7: {}
+ postgres-date@2.1.0: {}
- postgres-date@2.1.0: {}
+ postgres-interval@1.2.0:
+ dependencies:
+ xtend: 4.0.2
- postgres-interval@1.2.0:
- dependencies:
- xtend: 4.0.2
+ postgres-interval@3.0.0: {}
- postgres-interval@3.0.0: {}
+ postgres-range@1.1.4: {}
- postgres-range@1.1.4: {}
+ prettier@3.4.2: {}
- printable-characters@1.0.42: {}
+ printable-characters@1.0.42: {}
- promise-limit@2.7.0: {}
+ promise-limit@2.7.0: {}
- punycode@2.3.1: {}
+ punycode@2.3.1: {}
- readdirp@4.0.2: {}
+ readdirp@4.0.2: {}
- resolve@1.22.9:
- dependencies:
- is-core-module: 2.16.0
- path-parse: 1.0.7
- supports-preserve-symlinks-flag: 1.0.0
+ resolve@1.22.9:
+ dependencies:
+ is-core-module: 2.16.0
+ path-parse: 1.0.7
+ supports-preserve-symlinks-flag: 1.0.0
- rollup-plugin-inject@3.0.2:
- dependencies:
- estree-walker: 0.6.1
- magic-string: 0.25.9
- rollup-pluginutils: 2.8.2
+ restore-cursor@5.1.0:
+ dependencies:
+ onetime: 7.0.0
+ signal-exit: 4.1.0
- rollup-plugin-node-polyfills@0.2.1:
- dependencies:
- rollup-plugin-inject: 3.0.2
+ rfdc@1.4.1: {}
- rollup-pluginutils@2.8.2:
- dependencies:
- estree-walker: 0.6.1
+ rollup-plugin-inject@3.0.2:
+ dependencies:
+ estree-walker: 0.6.1
+ magic-string: 0.25.9
+ rollup-pluginutils: 2.8.2
- safer-buffer@2.1.2: {}
+ rollup-plugin-node-polyfills@0.2.1:
+ dependencies:
+ rollup-plugin-inject: 3.0.2
- selfsigned@2.4.1:
- dependencies:
- '@types/node-forge': 1.3.11
- node-forge: 1.3.1
+ rollup-pluginutils@2.8.2:
+ dependencies:
+ estree-walker: 0.6.1
- seq-queue@0.0.5: {}
+ safer-buffer@2.1.2: {}
- source-map@0.6.1: {}
+ selfsigned@2.4.1:
+ dependencies:
+ '@types/node-forge': 1.3.11
+ node-forge: 1.3.1
- sourcemap-codec@1.4.8: {}
+ seq-queue@0.0.5: {}
- sparse-bitfield@3.0.3:
- dependencies:
- memory-pager: 1.5.0
+ shebang-command@2.0.0:
+ dependencies:
+ shebang-regex: 3.0.0
- split2@4.2.0: {}
+ shebang-regex@3.0.0: {}
- sqlstring@2.3.3: {}
+ signal-exit@4.1.0: {}
- stacktracey@2.1.8:
- dependencies:
- as-table: 1.0.55
- get-source: 2.0.12
+ slice-ansi@5.0.0:
+ dependencies:
+ ansi-styles: 6.2.1
+ is-fullwidth-code-point: 4.0.0
- stoppable@1.1.0: {}
+ slice-ansi@7.1.0:
+ dependencies:
+ ansi-styles: 6.2.1
+ is-fullwidth-code-point: 5.0.0
- supports-preserve-symlinks-flag@1.0.0: {}
+ source-map@0.6.1: {}
- tr46@4.1.1:
- dependencies:
- punycode: 2.3.1
+ sourcemap-codec@1.4.8: {}
- tslib@2.8.1: {}
+ sparse-bitfield@3.0.3:
+ dependencies:
+ memory-pager: 1.5.0
- typescript@5.7.2: {}
+ split2@4.2.0: {}
- ufo@1.5.4: {}
+ sqlstring@2.3.3: {}
- uglify-js@3.19.3:
- optional: true
+ stacktracey@2.1.8:
+ dependencies:
+ as-table: 1.0.55
+ get-source: 2.0.12
- undici-types@6.20.0: {}
+ stoppable@1.1.0: {}
- undici@5.28.4:
- dependencies:
- '@fastify/busboy': 2.1.1
+ string-argv@0.3.2: {}
- unenv-nightly@2.0.0-20241204-140205-a5d5190:
- dependencies:
- defu: 6.1.4
- ohash: 1.1.4
- pathe: 1.1.2
- ufo: 1.5.4
+ string-width@7.2.0:
+ dependencies:
+ emoji-regex: 10.4.0
+ get-east-asian-width: 1.3.0
+ strip-ansi: 7.1.0
- web-streams-polyfill@3.3.3: {}
+ strip-ansi@7.1.0:
+ dependencies:
+ ansi-regex: 6.1.0
- webidl-conversions@7.0.0: {}
+ strip-final-newline@3.0.0: {}
- whatwg-url@13.0.0:
- dependencies:
- tr46: 4.1.1
- webidl-conversions: 7.0.0
+ supports-preserve-symlinks-flag@1.0.0: {}
- wordwrap@1.0.0: {}
+ to-regex-range@5.0.1:
+ dependencies:
+ is-number: 7.0.0
- workerd@1.20241205.0:
- optionalDependencies:
- '@cloudflare/workerd-darwin-64': 1.20241205.0
- '@cloudflare/workerd-darwin-arm64': 1.20241205.0
- '@cloudflare/workerd-linux-64': 1.20241205.0
- '@cloudflare/workerd-linux-arm64': 1.20241205.0
- '@cloudflare/workerd-windows-64': 1.20241205.0
+ tr46@4.1.1:
+ dependencies:
+ punycode: 2.3.1
- wrangler@3.96.0(@cloudflare/workers-types@4.20241216.0):
- dependencies:
- '@cloudflare/kv-asset-handler': 0.3.4
- '@esbuild-plugins/node-globals-polyfill': 0.2.3(esbuild@0.17.19)
- '@esbuild-plugins/node-modules-polyfill': 0.2.2(esbuild@0.17.19)
- blake3-wasm: 2.1.5
- chokidar: 4.0.2
- date-fns: 4.1.0
- esbuild: 0.17.19
- itty-time: 1.0.6
- miniflare: 3.20241205.0
- nanoid: 3.3.8
- path-to-regexp: 6.3.0
- resolve: 1.22.9
- selfsigned: 2.4.1
- source-map: 0.6.1
- unenv: unenv-nightly@2.0.0-20241204-140205-a5d5190
- workerd: 1.20241205.0
- xxhash-wasm: 1.1.0
- optionalDependencies:
- '@cloudflare/workers-types': 4.20241216.0
- fsevents: 2.3.3
- transitivePeerDependencies:
- - bufferutil
- - supports-color
- - utf-8-validate
+ tslib@2.8.1: {}
- ws@8.18.0: {}
+ typescript@5.7.2: {}
- xtend@4.0.2: {}
+ ufo@1.5.4: {}
- xxhash-wasm@1.1.0: {}
+ uglify-js@3.19.3:
+ optional: true
- youch@3.3.4:
- dependencies:
- cookie: 0.7.2
- mustache: 4.2.0
- stacktracey: 2.1.8
-
- zod@3.24.1: {}
+ undici-types@6.20.0: {}
+
+ undici@5.28.4:
+ dependencies:
+ '@fastify/busboy': 2.1.1
+
+ unenv-nightly@2.0.0-20241204-140205-a5d5190:
+ dependencies:
+ defu: 6.1.4
+ ohash: 1.1.4
+ pathe: 1.1.2
+ ufo: 1.5.4
+
+ web-streams-polyfill@3.3.3: {}
+
+ webidl-conversions@7.0.0: {}
+
+ whatwg-url@13.0.0:
+ dependencies:
+ tr46: 4.1.1
+ webidl-conversions: 7.0.0
+
+ which@2.0.2:
+ dependencies:
+ isexe: 2.0.0
+
+ wordwrap@1.0.0: {}
+
+ workerd@1.20241205.0:
+ optionalDependencies:
+ '@cloudflare/workerd-darwin-64': 1.20241205.0
+ '@cloudflare/workerd-darwin-arm64': 1.20241205.0
+ '@cloudflare/workerd-linux-64': 1.20241205.0
+ '@cloudflare/workerd-linux-arm64': 1.20241205.0
+ '@cloudflare/workerd-windows-64': 1.20241205.0
+
+ wrangler@3.96.0(@cloudflare/workers-types@4.20241216.0):
+ dependencies:
+ '@cloudflare/kv-asset-handler': 0.3.4
+ '@esbuild-plugins/node-globals-polyfill': 0.2.3(esbuild@0.17.19)
+ '@esbuild-plugins/node-modules-polyfill': 0.2.2(esbuild@0.17.19)
+ blake3-wasm: 2.1.5
+ chokidar: 4.0.2
+ date-fns: 4.1.0
+ esbuild: 0.17.19
+ itty-time: 1.0.6
+ miniflare: 3.20241205.0
+ nanoid: 3.3.8
+ path-to-regexp: 6.3.0
+ resolve: 1.22.9
+ selfsigned: 2.4.1
+ source-map: 0.6.1
+ unenv: unenv-nightly@2.0.0-20241204-140205-a5d5190
+ workerd: 1.20241205.0
+ xxhash-wasm: 1.1.0
+ optionalDependencies:
+ '@cloudflare/workers-types': 4.20241216.0
+ fsevents: 2.3.3
+ transitivePeerDependencies:
+ - bufferutil
+ - supports-color
+ - utf-8-validate
+
+ wrap-ansi@9.0.0:
+ dependencies:
+ ansi-styles: 6.2.1
+ string-width: 7.2.0
+ strip-ansi: 7.1.0
+
+ ws@8.18.0: {}
+
+ xtend@4.0.2: {}
+
+ xxhash-wasm@1.1.0: {}
+
+ yaml@2.6.1: {}
+
+ youch@3.3.4:
+ dependencies:
+ cookie: 0.7.2
+ mustache: 4.2.0
+ stacktracey: 2.1.8
+
+ zod@3.24.1: {}
diff --git a/src/allowlist/index.ts b/src/allowlist/index.ts
index dcf5368..8d90a23 100644
--- a/src/allowlist/index.ts
+++ b/src/allowlist/index.ts
@@ -1,95 +1,97 @@
-import { StarbaseDBConfiguration } from "../handler";
-import { DataSource, QueryResult } from "../types";
+import { StarbaseDBConfiguration } from '../handler'
+import { DataSource, QueryResult } from '../types'
-const parser = new (require("node-sql-parser").Parser)();
+const parser = new (require('node-sql-parser').Parser)()
-let allowlist: string[] | null = null;
-let normalizedAllowlist: any[] | null = null;
+let allowlist: string[] | null = null
+let normalizedAllowlist: any[] | null = null
function normalizeSQL(sql: string) {
- // Remove trailing semicolon. This allows a user to send a SQL statement that has
- // a semicolon where the allow list might not include it but both statements can
- // equate to being the same. AST seems to have an issue with matching the difference
- // when included in one query vs another.
- return sql.trim().replace(/;\s*$/, "");
+ // Remove trailing semicolon. This allows a user to send a SQL statement that has
+ // a semicolon where the allow list might not include it but both statements can
+ // equate to being the same. AST seems to have an issue with matching the difference
+ // when included in one query vs another.
+ return sql.trim().replace(/;\s*$/, '')
}
async function loadAllowlist(dataSource: DataSource): Promise {
- try {
- const statement = "SELECT sql_statement FROM tmp_allowlist_queries";
- const result = await dataSource.rpc.executeQuery({ sql: statement }) as QueryResult[]
- return result.map((row) => String(row.sql_statement));
- } catch (error) {
- console.error("Error loading allowlist:", error);
- return [];
- }
+ try {
+ const statement = 'SELECT sql_statement FROM tmp_allowlist_queries'
+ const result = (await dataSource.rpc.executeQuery({
+ sql: statement,
+ })) as QueryResult[]
+ return result.map((row) => String(row.sql_statement))
+ } catch (error) {
+ console.error('Error loading allowlist:', error)
+ return []
+ }
}
export async function isQueryAllowed(opts: {
- sql: string;
- isEnabled: boolean;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ sql: string
+ isEnabled: boolean
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}): Promise {
- const { sql, isEnabled, dataSource, config } = opts;
-
- // If the feature is not turned on then by default the query is allowed
- if (!isEnabled) return true;
-
- // If we are using the administrative AUTHORIZATION token value, this request is allowed.
- // We want database UI's to be able to have more free reign to run queries so we can load
- // tables, run queries, and more. If you want to block queries with the allowlist then we
- // advise you to do so by implementing user authentication with JWT.
- if (config.role === "admin") {
- return true;
- }
-
- allowlist = await loadAllowlist(dataSource);
- normalizedAllowlist = allowlist.map((query) =>
- parser.astify(normalizeSQL(query))
- );
-
- try {
- if (!sql) {
- return Error("No SQL provided for allowlist check");
+ const { sql, isEnabled, dataSource, config } = opts
+
+ // If the feature is not turned on then by default the query is allowed
+ if (!isEnabled) return true
+
+ // If we are using the administrative AUTHORIZATION token value, this request is allowed.
+ // We want database UI's to be able to have more free reign to run queries so we can load
+ // tables, run queries, and more. If you want to block queries with the allowlist then we
+ // advise you to do so by implementing user authentication with JWT.
+ if (config.role === 'admin') {
+ return true
}
- const normalizedQuery = parser.astify(normalizeSQL(sql));
-
- // Compare ASTs while ignoring specific values
- const isCurrentAllowed = normalizedAllowlist?.some((allowedQuery) => {
- // Create deep copies to avoid modifying original ASTs
- const allowedAst = JSON.parse(JSON.stringify(allowedQuery));
- const queryAst = JSON.parse(JSON.stringify(normalizedQuery));
-
- // Remove or normalize value fields from both ASTs
- const normalizeAst = (ast: any) => {
- if (Array.isArray(ast)) {
- ast.forEach(normalizeAst);
- } else if (ast && typeof ast === "object") {
- // Remove or normalize fields that contain specific values
- if ("value" in ast) {
- ast.value = "?";
- }
-
- Object.values(ast).forEach(normalizeAst);
+ allowlist = await loadAllowlist(dataSource)
+ normalizedAllowlist = allowlist.map((query) =>
+ parser.astify(normalizeSQL(query))
+ )
+
+ try {
+ if (!sql) {
+ return Error('No SQL provided for allowlist check')
}
- return ast;
- };
+ const normalizedQuery = parser.astify(normalizeSQL(sql))
- normalizeAst(allowedAst);
- normalizeAst(queryAst);
+ // Compare ASTs while ignoring specific values
+ const isCurrentAllowed = normalizedAllowlist?.some((allowedQuery) => {
+ // Create deep copies to avoid modifying original ASTs
+ const allowedAst = JSON.parse(JSON.stringify(allowedQuery))
+ const queryAst = JSON.parse(JSON.stringify(normalizedQuery))
- return JSON.stringify(allowedAst) === JSON.stringify(queryAst);
- });
+ // Remove or normalize value fields from both ASTs
+ const normalizeAst = (ast: any) => {
+ if (Array.isArray(ast)) {
+ ast.forEach(normalizeAst)
+ } else if (ast && typeof ast === 'object') {
+ // Remove or normalize fields that contain specific values
+ if ('value' in ast) {
+ ast.value = '?'
+ }
- if (!isCurrentAllowed) {
- throw new Error("Query not allowed");
- }
+ Object.values(ast).forEach(normalizeAst)
+ }
+
+ return ast
+ }
- return true;
- } catch (error: any) {
- throw new Error(error?.message ?? "Error");
- }
+ normalizeAst(allowedAst)
+ normalizeAst(queryAst)
+
+ return JSON.stringify(allowedAst) === JSON.stringify(queryAst)
+ })
+
+ if (!isCurrentAllowed) {
+ throw new Error('Query not allowed')
+ }
+
+ return true
+ } catch (error: any) {
+ throw new Error(error?.message ?? 'Error')
+ }
}
diff --git a/src/api/index.ts b/src/api/index.ts
index baf2500..88934e7 100644
--- a/src/api/index.ts
+++ b/src/api/index.ts
@@ -3,12 +3,12 @@
// https://starbasedb.YOUR-IDENTIFIER.workers.dev/api/your/path/here
export async function handleApiRequest(request: Request): Promise {
- const url = new URL(request.url);
+ const url = new URL(request.url)
// EXAMPLE:
// if (request.method === 'GET' && url.pathname === '/api/your/path/here') {
// return new Response('Success', { status: 200 });
// }
- return new Response('Not found', { status: 404 });
-}
\ No newline at end of file
+ return new Response('Not found', { status: 404 })
+}
diff --git a/src/cache/index.ts b/src/cache/index.ts
index 2cc1ffa..e35d54e 100644
--- a/src/cache/index.ts
+++ b/src/cache/index.ts
@@ -1,79 +1,81 @@
-import { StarbaseDBConfiguration } from "../handler";
-import { DataSource, QueryResult } from "../types";
-import sqlparser from "node-sql-parser";
-const parser = new sqlparser.Parser();
+import { StarbaseDBConfiguration } from '../handler'
+import { DataSource, QueryResult } from '../types'
+import sqlparser from 'node-sql-parser'
+const parser = new sqlparser.Parser()
function hasModifyingStatement(ast: any): boolean {
- // Check if current node is a modifying statement
- if (
- ast.type &&
- ["insert", "update", "delete"].includes(ast.type.toLowerCase())
- ) {
- return true;
- }
-
- // Recursively check all properties of the AST
- for (const key in ast) {
- if (typeof ast[key] === "object" && ast[key] !== null) {
- if (Array.isArray(ast[key])) {
- if (ast[key].some((item) => hasModifyingStatement(item))) {
- return true;
+ // Check if current node is a modifying statement
+ if (
+ ast.type &&
+ ['insert', 'update', 'delete'].includes(ast.type.toLowerCase())
+ ) {
+ return true
+ }
+
+ // Recursively check all properties of the AST
+ for (const key in ast) {
+ if (typeof ast[key] === 'object' && ast[key] !== null) {
+ if (Array.isArray(ast[key])) {
+ if (ast[key].some((item) => hasModifyingStatement(item))) {
+ return true
+ }
+ } else if (hasModifyingStatement(ast[key])) {
+ return true
+ }
}
- } else if (hasModifyingStatement(ast[key])) {
- return true;
- }
}
- }
- return false;
+ return false
}
export async function beforeQueryCache(opts: {
- sql: string;
- params?: unknown[];
- dataSource: DataSource;
+ sql: string
+ params?: unknown[]
+ dataSource: DataSource
}): Promise {
- const { sql, params = [], dataSource } = opts;
+ const { sql, params = [], dataSource } = opts
- // Currently we do not support caching queries that have dynamic parameters
- if (params.length) {
- return null;
- }
+ // Currently we do not support caching queries that have dynamic parameters
+ if (params.length) {
+ return null
+ }
- // If it's an internal request, or cache is not enabled, return null.
- if (dataSource.source === "internal" || !dataSource.cache) {
- return null;
- }
+ // If it's an internal request, or cache is not enabled, return null.
+ if (dataSource.source === 'internal' || !dataSource.cache) {
+ return null
+ }
- const dialect =
- dataSource.source === "external" ? dataSource.external!.dialect : "sqlite";
+ const dialect =
+ dataSource.source === 'external'
+ ? dataSource.external!.dialect
+ : 'sqlite'
- let ast = parser.astify(sql, { database: dialect });
- if (hasModifyingStatement(ast)) return null;
+ let ast = parser.astify(sql, { database: dialect })
+ if (hasModifyingStatement(ast)) return null
- const fetchCacheStatement = `SELECT timestamp, ttl, query, results FROM tmp_cache WHERE query = ?`;
+ const fetchCacheStatement = `SELECT timestamp, ttl, query, results FROM tmp_cache WHERE query = ?`
- type QueryResult = {
- timestamp: string;
- ttl: number;
- results: string;
- };
+ type QueryResult = {
+ timestamp: string
+ ttl: number
+ results: string
+ }
- const result = await dataSource.rpc.executeQuery({
- sql: fetchCacheStatement,
- params: [sql],
- }) as any[];
+ const result = (await dataSource.rpc.executeQuery({
+ sql: fetchCacheStatement,
+ params: [sql],
+ })) as any[]
- if (result?.length) {
- const { timestamp, ttl, results } = result[0] as QueryResult;
- const expirationTime = new Date(timestamp).getTime() + ttl * 1000;
+ if (result?.length) {
+ const { timestamp, ttl, results } = result[0] as QueryResult
+ const expirationTime = new Date(timestamp).getTime() + ttl * 1000
- if (Date.now() < expirationTime) {
- return JSON.parse(results);
+ if (Date.now() < expirationTime) {
+ return JSON.parse(results)
+ }
}
- }
- return null;
+ return null
}
// Serialized RPC arguemnts are limited to 1MiB in size at the moment for Cloudflare
@@ -83,56 +85,56 @@ export async function beforeQueryCache(opts: {
// response in a safe way for our use case. Another option is another service for queues
// or another way to ingest it directly to the Durable Object.
export async function afterQueryCache(opts: {
- sql: string;
- params: unknown[] | undefined;
- result: unknown;
- dataSource: DataSource;
+ sql: string
+ params: unknown[] | undefined
+ result: unknown
+ dataSource: DataSource
}) {
- const { sql, params, result, dataSource } = opts;
-
- // Currently we do not support caching queries that have dynamic parameters
- if (params?.length) return;
+ const { sql, params, result, dataSource } = opts
- // If it's an internal request, or cache is not enabled, return null.
- if (dataSource.source === "internal" || !dataSource.cache) {
- return null;
- }
+ // Currently we do not support caching queries that have dynamic parameters
+ if (params?.length) return
- try {
- const dialect =
- dataSource.source === "external"
- ? dataSource.external!.dialect
- : "sqlite";
-
- let ast = parser.astify(sql, { database: dialect });
-
- // If any modifying query exists within our SQL statement then we shouldn't proceed
- if (hasModifyingStatement(ast)) return;
-
- const timestamp = Date.now();
- const results = JSON.stringify(result);
-
- const exists = await dataSource.rpc.executeQuery({
- sql: "SELECT 1 FROM tmp_cache WHERE query = ? LIMIT 1",
- params: [sql],
- }) as QueryResult[];
+ // If it's an internal request, or cache is not enabled, return null.
+ if (dataSource.source === 'internal' || !dataSource.cache) {
+ return null
+ }
- const query = exists?.length
- ? {
- sql: "UPDATE tmp_cache SET timestamp = ?, results = ? WHERE query = ?",
- params: [timestamp, results, sql],
- }
- : {
- sql: "INSERT INTO tmp_cache (timestamp, ttl, query, results) VALUES (?, ?, ?, ?)",
- params: [timestamp, dataSource.cacheTTL ?? 60, sql, results],
- };
-
- await dataSource.rpc.executeQuery({
- sql: query.sql,
- params: query.params,
- });
- } catch (error) {
- console.error("Error in cache operation:", error);
- return;
- }
+ try {
+ const dialect =
+ dataSource.source === 'external'
+ ? dataSource.external!.dialect
+ : 'sqlite'
+
+ let ast = parser.astify(sql, { database: dialect })
+
+ // If any modifying query exists within our SQL statement then we shouldn't proceed
+ if (hasModifyingStatement(ast)) return
+
+ const timestamp = Date.now()
+ const results = JSON.stringify(result)
+
+ const exists = (await dataSource.rpc.executeQuery({
+ sql: 'SELECT 1 FROM tmp_cache WHERE query = ? LIMIT 1',
+ params: [sql],
+ })) as QueryResult[]
+
+ const query = exists?.length
+ ? {
+ sql: 'UPDATE tmp_cache SET timestamp = ?, results = ? WHERE query = ?',
+ params: [timestamp, results, sql],
+ }
+ : {
+ sql: 'INSERT INTO tmp_cache (timestamp, ttl, query, results) VALUES (?, ?, ?, ?)',
+ params: [timestamp, dataSource.cacheTTL ?? 60, sql, results],
+ }
+
+ await dataSource.rpc.executeQuery({
+ sql: query.sql,
+ params: query.params,
+ })
+ } catch (error) {
+ console.error('Error in cache operation:', error)
+ return
+ }
}
diff --git a/src/cors.ts b/src/cors.ts
index 7e0f78b..660a26c 100644
--- a/src/cors.ts
+++ b/src/cors.ts
@@ -1,14 +1,14 @@
export const corsHeaders = {
- "Access-Control-Allow-Origin": "*",
- "Access-Control-Allow-Methods": "GET, POST, OPTIONS",
- "Access-Control-Allow-Headers":
- "Authorization, Content-Type, X-Starbase-Source, X-Data-Source",
- "Access-Control-Max-Age": "86400",
-};
+ 'Access-Control-Allow-Origin': '*',
+ 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
+ 'Access-Control-Allow-Headers':
+ 'Authorization, Content-Type, X-Starbase-Source, X-Data-Source',
+ 'Access-Control-Max-Age': '86400',
+}
export function corsPreflight(): Response {
- return new Response(null, {
- status: 204,
- headers: corsHeaders,
- });
+ return new Response(null, {
+ status: 204,
+ headers: corsHeaders,
+ })
}
diff --git a/src/do.ts b/src/do.ts
index 111d3ff..8be3f01 100644
--- a/src/do.ts
+++ b/src/do.ts
@@ -1,47 +1,47 @@
-import { DurableObject } from "cloudflare:workers";
+import { DurableObject } from 'cloudflare:workers'
// import { OperationQueueItem } from "./operation";
// import { createResponse } from "./utils";
export class StarbaseDBDurableObject extends DurableObject {
- // Durable storage for the SQL database
- public sql: SqlStorage;
- public storage: DurableObjectStorage;
-
- // // Queue of operations to be processed, with each operation containing a list of queries to be executed
- // private operationQueue: Array = [];
-
- // // Flag to indicate if an operation is currently being processed
- // private processingOperation: { value: boolean } = { value: false };
-
- /**
- * The constructor is invoked once upon creation of the Durable Object, i.e. the first call to
- * `DurableObjectStub::get` for a given identifier (no-op constructors can be omitted)
- *
- * @param ctx - The interface for interacting with Durable Object state
- * @param env - The interface to reference bindings declared in wrangler.toml
- */
- constructor(ctx: DurableObjectState, env: Env) {
- super(ctx, env);
- this.sql = ctx.storage.sql;
- this.storage = ctx.storage;
-
- // Install default necessary `tmp_` tables for various features here.
- const cacheStatement = `
+ // Durable storage for the SQL database
+ public sql: SqlStorage
+ public storage: DurableObjectStorage
+
+ // // Queue of operations to be processed, with each operation containing a list of queries to be executed
+ // private operationQueue: Array = [];
+
+ // // Flag to indicate if an operation is currently being processed
+ // private processingOperation: { value: boolean } = { value: false };
+
+ /**
+ * The constructor is invoked once upon creation of the Durable Object, i.e. the first call to
+ * `DurableObjectStub::get` for a given identifier (no-op constructors can be omitted)
+ *
+ * @param ctx - The interface for interacting with Durable Object state
+ * @param env - The interface to reference bindings declared in wrangler.toml
+ */
+ constructor(ctx: DurableObjectState, env: Env) {
+ super(ctx, env)
+ this.sql = ctx.storage.sql
+ this.storage = ctx.storage
+
+ // Install default necessary `tmp_` tables for various features here.
+ const cacheStatement = `
CREATE TABLE IF NOT EXISTS tmp_cache (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"timestamp" REAL NOT NULL,
"ttl" INTEGER NOT NULL,
"query" TEXT UNIQUE NOT NULL,
"results" TEXT
- );`;
+ );`
- const allowlistStatement = `
+ const allowlistStatement = `
CREATE TABLE IF NOT EXISTS tmp_allowlist_queries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sql_statement TEXT NOT NULL
- )`;
+ )`
- const rlsStatement = `
+ const rlsStatement = `
CREATE TABLE IF NOT EXISTS tmp_rls_policies (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"actions" TEXT NOT NULL CHECK(actions IN ('SELECT', 'UPDATE', 'INSERT', 'DELETE')),
@@ -51,207 +51,210 @@ export class StarbaseDBDurableObject extends DurableObject {
"value" TEXT NOT NULL,
"value_type" TEXT NOT NULL DEFAULT 'string',
"operator" TEXT DEFAULT '='
- )`;
-
- this.executeQuery({ sql: cacheStatement });
- this.executeQuery({ sql: allowlistStatement });
- this.executeQuery({ sql: rlsStatement });
- }
-
- init() {
- return {
- executeQuery: this.executeQuery.bind(this),
- };
- }
-
- // TODO: Hiding for now as it's not used in the current implementation
- // /**
- // * Execute a raw SQL query on the database, typically used for external requests
- // * from other service bindings (e.g. auth). This serves as an exposed function for
- // * other service bindings to query the database without having to have knowledge of
- // * the current operation queue or processing state.
- // *
- // * @param sql - The SQL query to execute.
- // * @param params - Optional parameters for the SQL query.
- // * @returns A response containing the query result or an error message.
- // */
- // private async executeExternalQuery(
- // sql: string,
- // params: any[] | undefined
- // ): Promise {
- // try {
- // const queries = [{ sql, params }];
- // const response = await this.enqueueOperation(
- // queries,
- // false,
- // false,
- // this.operationQueue,
- // () =>
- // this.processNextOperation(
- // this.sql,
- // this.operationQueue,
- // this.ctx,
- // this.processingOperation
- // )
- // );
-
- // return response;
- // } catch (error: any) {
- // console.error("Execute External Query Error:", error);
- // return null;
- // }
- // }
-
- private async executeRawQuery<
- T extends Record = Record
- >(opts: { sql: string; params?: unknown[] }) {
- const { sql, params } = opts;
-
- try {
- let cursor;
-
- if (params && params.length) {
- cursor = this.sql.exec(sql, ...params);
- } else {
- cursor = this.sql.exec(sql);
- }
-
- return cursor;
- } catch (error) {
- console.error("SQL Execution Error:", error);
- throw error;
+ )`
+
+ this.executeQuery({ sql: cacheStatement })
+ this.executeQuery({ sql: allowlistStatement })
+ this.executeQuery({ sql: rlsStatement })
+ }
+
+ init() {
+ return {
+ executeQuery: this.executeQuery.bind(this),
+ }
}
- }
-
- public async executeQuery(opts: {
- sql: string;
- params?: unknown[];
- isRaw?: boolean;
- }) {
- const cursor = await this.executeRawQuery(opts);
-
- if (opts.isRaw) {
- return {
- columns: cursor.columnNames,
- rows: Array.from(cursor.raw()),
- meta: {
- rows_read: cursor.rowsRead,
- rows_written: cursor.rowsWritten,
- },
- };
+
+ // TODO: Hiding for now as it's not used in the current implementation
+ // /**
+ // * Execute a raw SQL query on the database, typically used for external requests
+ // * from other service bindings (e.g. auth). This serves as an exposed function for
+ // * other service bindings to query the database without having to have knowledge of
+ // * the current operation queue or processing state.
+ // *
+ // * @param sql - The SQL query to execute.
+ // * @param params - Optional parameters for the SQL query.
+ // * @returns A response containing the query result or an error message.
+ // */
+ // private async executeExternalQuery(
+ // sql: string,
+ // params: any[] | undefined
+ // ): Promise {
+ // try {
+ // const queries = [{ sql, params }];
+ // const response = await this.enqueueOperation(
+ // queries,
+ // false,
+ // false,
+ // this.operationQueue,
+ // () =>
+ // this.processNextOperation(
+ // this.sql,
+ // this.operationQueue,
+ // this.ctx,
+ // this.processingOperation
+ // )
+ // );
+
+ // return response;
+ // } catch (error: any) {
+ // console.error("Execute External Query Error:", error);
+ // return null;
+ // }
+ // }
+
+ private async executeRawQuery<
+ T extends Record = Record<
+ string,
+ SqlStorageValue
+ >,
+ >(opts: { sql: string; params?: unknown[] }) {
+ const { sql, params } = opts
+
+ try {
+ let cursor
+
+ if (params && params.length) {
+ cursor = this.sql.exec(sql, ...params)
+ } else {
+ cursor = this.sql.exec(sql)
+ }
+
+ return cursor
+ } catch (error) {
+ console.error('SQL Execution Error:', error)
+ throw error
+ }
}
- return cursor.toArray();
- }
-
- public executeTransaction(
- queries: { sql: string; params?: unknown[] }[],
- isRaw: boolean
- ): unknown[] {
- return this.storage.transactionSync(() => {
- const results = [];
-
- try {
- for (const queryObj of queries) {
- const { sql, params } = queryObj;
- const result = this.executeQuery({ sql, params, isRaw });
- results.push(result);
+ public async executeQuery(opts: {
+ sql: string
+ params?: unknown[]
+ isRaw?: boolean
+ }) {
+ const cursor = await this.executeRawQuery(opts)
+
+ if (opts.isRaw) {
+ return {
+ columns: cursor.columnNames,
+ rows: Array.from(cursor.raw()),
+ meta: {
+ rows_read: cursor.rowsRead,
+ rows_written: cursor.rowsWritten,
+ },
+ }
}
- return results;
- } catch (error) {
- console.error("Transaction Execution Error:", error);
- throw error;
- }
- });
- }
-
- // TODO: Hiding for now as it's not used in the current implementation
- // private enqueueOperation(
- // queries: { sql: string; params?: any[] }[],
- // isTransaction: boolean,
- // isRaw: boolean,
- // operationQueue: any[],
- // processNextOperation: () => Promise
- // ): Promise<{ result?: any; error?: string | undefined; status: number }> {
- // const MAX_WAIT_TIME = 25000;
- // return new Promise((resolve, reject) => {
- // const timeout = setTimeout(() => {
- // reject(createResponse(undefined, "Operation timed out.", 503));
- // }, MAX_WAIT_TIME);
-
- // operationQueue.push({
- // queries,
- // isTransaction,
- // isRaw,
- // resolve: (value: any) => {
- // clearTimeout(timeout);
-
- // resolve({
- // result: value,
- // error: undefined,
- // status: 200,
- // });
- // },
- // reject: (reason?: any) => {
- // clearTimeout(timeout);
-
- // reject({
- // result: undefined,
- // error: reason ?? "Operation failed.",
- // status: 500,
- // });
- // },
- // });
-
- // processNextOperation().catch((err) => {
- // console.error("Error processing operation queue:", err);
- // });
- // });
- // }
-
- // TODO: Hiding for now as it's not used in the current implementation
- // private async processNextOperation(
- // sqlInstance: any,
- // operationQueue: OperationQueueItem[],
- // ctx: any,
- // processingOperation: { value: boolean }
- // ) {
- // if (processingOperation.value) {
- // // Already processing an operation
- // return;
- // }
-
- // if (operationQueue.length === 0) {
- // // No operations remaining to process
- // return;
- // }
-
- // processingOperation.value = true;
- // const { queries, isTransaction, isRaw, resolve, reject } =
- // operationQueue.shift()!;
-
- // try {
- // let result;
-
- // if (isTransaction) {
- // result = await this.executeTransaction(queries, isRaw);
- // } else {
- // const { sql, params } = queries[0];
- // result = this.executeQuery({ sql, params });
- // }
-
- // resolve(result);
- // } catch (error: any) {
- // reject(error.message || "Operation failed.");
- // } finally {
- // processingOperation.value = false;
- // await this.processNextOperation(
- // sqlInstance,
- // operationQueue,
- // ctx,
- // processingOperation
- // );
- // }
- // }
+ return cursor.toArray()
+ }
+
+ public executeTransaction(
+ queries: { sql: string; params?: unknown[] }[],
+ isRaw: boolean
+ ): unknown[] {
+ return this.storage.transactionSync(() => {
+ const results = []
+
+ try {
+ for (const queryObj of queries) {
+ const { sql, params } = queryObj
+ const result = this.executeQuery({ sql, params, isRaw })
+ results.push(result)
+ }
+
+ return results
+ } catch (error) {
+ console.error('Transaction Execution Error:', error)
+ throw error
+ }
+ })
+ }
+
+ // TODO: Hiding for now as it's not used in the current implementation
+ // private enqueueOperation(
+ // queries: { sql: string; params?: any[] }[],
+ // isTransaction: boolean,
+ // isRaw: boolean,
+ // operationQueue: any[],
+ // processNextOperation: () => Promise
+ // ): Promise<{ result?: any; error?: string | undefined; status: number }> {
+ // const MAX_WAIT_TIME = 25000;
+ // return new Promise((resolve, reject) => {
+ // const timeout = setTimeout(() => {
+ // reject(createResponse(undefined, "Operation timed out.", 503));
+ // }, MAX_WAIT_TIME);
+
+ // operationQueue.push({
+ // queries,
+ // isTransaction,
+ // isRaw,
+ // resolve: (value: any) => {
+ // clearTimeout(timeout);
+
+ // resolve({
+ // result: value,
+ // error: undefined,
+ // status: 200,
+ // });
+ // },
+ // reject: (reason?: any) => {
+ // clearTimeout(timeout);
+
+ // reject({
+ // result: undefined,
+ // error: reason ?? "Operation failed.",
+ // status: 500,
+ // });
+ // },
+ // });
+
+ // processNextOperation().catch((err) => {
+ // console.error("Error processing operation queue:", err);
+ // });
+ // });
+ // }
+
+ // TODO: Hiding for now as it's not used in the current implementation
+ // private async processNextOperation(
+ // sqlInstance: any,
+ // operationQueue: OperationQueueItem[],
+ // ctx: any,
+ // processingOperation: { value: boolean }
+ // ) {
+ // if (processingOperation.value) {
+ // // Already processing an operation
+ // return;
+ // }
+
+ // if (operationQueue.length === 0) {
+ // // No operations remaining to process
+ // return;
+ // }
+
+ // processingOperation.value = true;
+ // const { queries, isTransaction, isRaw, resolve, reject } =
+ // operationQueue.shift()!;
+
+ // try {
+ // let result;
+
+ // if (isTransaction) {
+ // result = await this.executeTransaction(queries, isRaw);
+ // } else {
+ // const { sql, params } = queries[0];
+ // result = this.executeQuery({ sql, params });
+ // }
+
+ // resolve(result);
+ // } catch (error: any) {
+ // reject(error.message || "Operation failed.");
+ // } finally {
+ // processingOperation.value = false;
+ // await this.processNextOperation(
+ // sqlInstance,
+ // operationQueue,
+ // ctx,
+ // processingOperation
+ // );
+ // }
+ // }
}
diff --git a/src/export/csv.ts b/src/export/csv.ts
index dffc2dc..0f5cff9 100644
--- a/src/export/csv.ts
+++ b/src/export/csv.ts
@@ -1,7 +1,7 @@
-import { getTableData, createExportResponse } from './index';
-import { createResponse } from '../utils';
-import { DataSource } from '../types';
-import { StarbaseDBConfiguration } from '../handler';
+import { getTableData, createExportResponse } from './index'
+import { createResponse } from '../utils'
+import { DataSource } from '../types'
+import { StarbaseDBConfiguration } from '../handler'
export async function exportTableToCsvRoute(
tableName: string,
@@ -9,32 +9,46 @@ export async function exportTableToCsvRoute(
config: StarbaseDBConfiguration
): Promise {
try {
- const data = await getTableData(tableName, dataSource, config);
+ const data = await getTableData(tableName, dataSource, config)
if (data === null) {
- return createResponse(undefined, `Table '${tableName}' does not exist.`, 404);
+ return createResponse(
+ undefined,
+ `Table '${tableName}' does not exist.`,
+ 404
+ )
}
// Convert the result to CSV
- let csvContent = '';
+ let csvContent = ''
if (data.length > 0) {
// Add headers
- csvContent += Object.keys(data[0]).join(',') + '\n';
+ csvContent += Object.keys(data[0]).join(',') + '\n'
// Add data rows
data.forEach((row: any) => {
- csvContent += Object.values(row).map(value => {
- if (typeof value === 'string' && value.includes(',')) {
- return `"${value.replace(/"/g, '""')}"`;
- }
- return value;
- }).join(',') + '\n';
- });
+ csvContent +=
+ Object.values(row)
+ .map((value) => {
+ if (
+ typeof value === 'string' &&
+ value.includes(',')
+ ) {
+ return `"${value.replace(/"/g, '""')}"`
+ }
+ return value
+ })
+ .join(',') + '\n'
+ })
}
- return createExportResponse(csvContent, `${tableName}_export.csv`, 'text/csv');
+ return createExportResponse(
+ csvContent,
+ `${tableName}_export.csv`,
+ 'text/csv'
+ )
} catch (error: any) {
- console.error('CSV Export Error:', error);
- return createResponse(undefined, 'Failed to export table to CSV', 500);
+ console.error('CSV Export Error:', error)
+ return createResponse(undefined, 'Failed to export table to CSV', 500)
}
}
diff --git a/src/export/dump.ts b/src/export/dump.ts
index aa492eb..91a2e89 100644
--- a/src/export/dump.ts
+++ b/src/export/dump.ts
@@ -1,7 +1,7 @@
-import { executeOperation } from '.';
-import { StarbaseDBConfiguration } from '../handler';
-import { DataSource } from '../types';
-import { createResponse } from '../utils';
+import { executeOperation } from '.'
+import { StarbaseDBConfiguration } from '../handler'
+import { DataSource } from '../types'
+import { createResponse } from '../utils'
export async function dumpDatabaseRoute(
dataSource: DataSource,
@@ -9,45 +9,63 @@ export async function dumpDatabaseRoute(
): Promise {
try {
// Get all table names
- const tablesResult = await executeOperation([{ sql: "SELECT name FROM sqlite_master WHERE type='table';" }], dataSource, config)
-
- const tables = tablesResult.map((row: any) => row.name);
- let dumpContent = "SQLite format 3\0"; // SQLite file header
+ const tablesResult = await executeOperation(
+ [{ sql: "SELECT name FROM sqlite_master WHERE type='table';" }],
+ dataSource,
+ config
+ )
+
+ const tables = tablesResult.map((row: any) => row.name)
+ let dumpContent = 'SQLite format 3\0' // SQLite file header
// Iterate through all tables
for (const table of tables) {
// Get table schema
- const schemaResult = await executeOperation([{ sql: `SELECT sql FROM sqlite_master WHERE type='table' AND name='${table}';` }], dataSource, config)
+ const schemaResult = await executeOperation(
+ [
+ {
+ sql: `SELECT sql FROM sqlite_master WHERE type='table' AND name='${table}';`,
+ },
+ ],
+ dataSource,
+ config
+ )
if (schemaResult.length) {
- const schema = schemaResult[0].sql;
- dumpContent += `\n-- Table: ${table}\n${schema};\n\n`;
+ const schema = schemaResult[0].sql
+ dumpContent += `\n-- Table: ${table}\n${schema};\n\n`
}
// Get table data
- const dataResult = await executeOperation([{ sql: `SELECT * FROM ${table};` }], dataSource, config)
+ const dataResult = await executeOperation(
+ [{ sql: `SELECT * FROM ${table};` }],
+ dataSource,
+ config
+ )
for (const row of dataResult) {
- const values = Object.values(row).map(value =>
- typeof value === 'string' ? `'${value.replace(/'/g, "''")}'` : value
- );
- dumpContent += `INSERT INTO ${table} VALUES (${values.join(', ')});\n`;
+ const values = Object.values(row).map((value) =>
+ typeof value === 'string'
+ ? `'${value.replace(/'/g, "''")}'`
+ : value
+ )
+ dumpContent += `INSERT INTO ${table} VALUES (${values.join(', ')});\n`
}
- dumpContent += '\n';
+ dumpContent += '\n'
}
// Create a Blob from the dump content
- const blob = new Blob([dumpContent], { type: 'application/x-sqlite3' });
+ const blob = new Blob([dumpContent], { type: 'application/x-sqlite3' })
const headers = new Headers({
'Content-Type': 'application/x-sqlite3',
'Content-Disposition': 'attachment; filename="database_dump.sql"',
- });
+ })
- return new Response(blob, { headers });
+ return new Response(blob, { headers })
} catch (error: any) {
- console.error('Database Dump Error:', error);
- return createResponse(undefined, 'Failed to create database dump', 500);
+ console.error('Database Dump Error:', error)
+ return createResponse(undefined, 'Failed to create database dump', 500)
}
}
diff --git a/src/export/index.ts b/src/export/index.ts
index e45ed13..6cf9f3a 100644
--- a/src/export/index.ts
+++ b/src/export/index.ts
@@ -1,15 +1,19 @@
-import { DataSource } from "../types";
-import { executeTransaction } from "../operation";
-import { StarbaseDBConfiguration } from "../handler";
+import { DataSource } from '../types'
+import { executeTransaction } from '../operation'
+import { StarbaseDBConfiguration } from '../handler'
-export async function executeOperation(queries: { sql: string, params?: any[] }[], dataSource: DataSource, config: StarbaseDBConfiguration): Promise {
+export async function executeOperation(
+ queries: { sql: string; params?: any[] }[],
+ dataSource: DataSource,
+ config: StarbaseDBConfiguration
+): Promise {
const results: any[] = (await executeTransaction({
queries,
isRaw: false,
dataSource,
- config
- })) as any[];
- return results?.length > 0 ? results[0] : undefined;
+ config,
+ })) as any[]
+ return results?.length > 0 ? results[0] : undefined
}
export async function getTableData(
@@ -19,29 +23,45 @@ export async function getTableData(
): Promise {
try {
// Verify if the table exists
- const tableExistsResult = await executeOperation([{ sql: `SELECT name FROM sqlite_master WHERE type='table' AND name=?;`, params: [tableName] }], dataSource, config)
+ const tableExistsResult = await executeOperation(
+ [
+ {
+ sql: `SELECT name FROM sqlite_master WHERE type='table' AND name=?;`,
+ params: [tableName],
+ },
+ ],
+ dataSource,
+ config
+ )
if (tableExistsResult.length === 0) {
- return null;
+ return null
}
// Get table data
- const dataResult = await executeOperation([{ sql: `SELECT * FROM ${tableName};` }], dataSource, config)
- return dataResult;
+ const dataResult = await executeOperation(
+ [{ sql: `SELECT * FROM ${tableName};` }],
+ dataSource,
+ config
+ )
+ return dataResult
} catch (error: any) {
- console.error('Table Data Fetch Error:', error);
- throw error;
+ console.error('Table Data Fetch Error:', error)
+ throw error
}
}
-export function createExportResponse(data: any, fileName: string, contentType: string): Response {
- const blob = new Blob([data], { type: contentType });
+export function createExportResponse(
+ data: any,
+ fileName: string,
+ contentType: string
+): Response {
+ const blob = new Blob([data], { type: contentType })
const headers = new Headers({
'Content-Type': contentType,
'Content-Disposition': `attachment; filename="${fileName}"`,
- });
+ })
- return new Response(blob, { headers });
+ return new Response(blob, { headers })
}
-
diff --git a/src/export/json.ts b/src/export/json.ts
index b35de8e..c0ab811 100644
--- a/src/export/json.ts
+++ b/src/export/json.ts
@@ -1,7 +1,7 @@
-import { getTableData, createExportResponse } from './index';
-import { createResponse } from '../utils';
-import { DataSource } from '../types';
-import { StarbaseDBConfiguration } from '../handler';
+import { getTableData, createExportResponse } from './index'
+import { createResponse } from '../utils'
+import { DataSource } from '../types'
+import { StarbaseDBConfiguration } from '../handler'
export async function exportTableToJsonRoute(
tableName: string,
@@ -9,18 +9,26 @@ export async function exportTableToJsonRoute(
config: StarbaseDBConfiguration
): Promise {
try {
- const data = await getTableData(tableName, dataSource, config);
+ const data = await getTableData(tableName, dataSource, config)
if (data === null) {
- return createResponse(undefined, `Table '${tableName}' does not exist.`, 404);
+ return createResponse(
+ undefined,
+ `Table '${tableName}' does not exist.`,
+ 404
+ )
}
// Convert the result to JSON
- const jsonData = JSON.stringify(data, null, 4);
+ const jsonData = JSON.stringify(data, null, 4)
- return createExportResponse(jsonData, `${tableName}_export.json`, 'application/json');
+ return createExportResponse(
+ jsonData,
+ `${tableName}_export.json`,
+ 'application/json'
+ )
} catch (error: any) {
- console.error('JSON Export Error:', error);
- return createResponse(undefined, 'Failed to export table to JSON', 500);
+ console.error('JSON Export Error:', error)
+ return createResponse(undefined, 'Failed to export table to JSON', 500)
}
}
diff --git a/src/handler.ts b/src/handler.ts
index a44a244..564c669 100644
--- a/src/handler.ts
+++ b/src/handler.ts
@@ -1,326 +1,353 @@
-import { Hono } from "hono";
-import { createMiddleware } from "hono/factory";
-import { validator } from "hono/validator";
-
-import { DataSource } from "./types";
-import { LiteREST } from "./literest";
-import { executeQuery, executeTransaction } from "./operation";
-import { createResponse, QueryRequest, QueryTransactionRequest } from "./utils";
-import { dumpDatabaseRoute } from "./export/dump";
-import { exportTableToJsonRoute } from "./export/json";
-import { exportTableToCsvRoute } from "./export/csv";
-import { importDumpRoute } from "./import/dump";
-import { importTableFromJsonRoute } from "./import/json";
-import { importTableFromCsvRoute } from "./import/csv";
-import { handleStudioRequest } from "./studio";
-import { corsPreflight } from "./cors";
-import { handleApiRequest } from "./api";
+import { Hono } from 'hono'
+import { createMiddleware } from 'hono/factory'
+import { validator } from 'hono/validator'
+
+import { DataSource } from './types'
+import { LiteREST } from './literest'
+import { executeQuery, executeTransaction } from './operation'
+import { createResponse, QueryRequest, QueryTransactionRequest } from './utils'
+import { dumpDatabaseRoute } from './export/dump'
+import { exportTableToJsonRoute } from './export/json'
+import { exportTableToCsvRoute } from './export/csv'
+import { importDumpRoute } from './import/dump'
+import { importTableFromJsonRoute } from './import/json'
+import { importTableFromCsvRoute } from './import/csv'
+import { handleStudioRequest } from './studio'
+import { corsPreflight } from './cors'
+import { handleApiRequest } from './api'
export interface StarbaseDBConfiguration {
- outerbaseApiKey?: string;
- role: "admin" | "client";
- features?: {
- allowlist?: boolean;
- rls?: boolean;
- studio?: boolean;
- rest?: boolean;
- websocket?: boolean;
- export?: boolean;
- import?: boolean;
- };
- studio?: {
- username: string;
- password: string;
- apiKey: string;
- };
+ outerbaseApiKey?: string
+ role: 'admin' | 'client'
+ features?: {
+ allowlist?: boolean
+ rls?: boolean
+ studio?: boolean
+ rest?: boolean
+ websocket?: boolean
+ export?: boolean
+ import?: boolean
+ }
+ studio?: {
+ username: string
+ password: string
+ apiKey: string
+ }
}
export class StarbaseDB {
- private dataSource: DataSource;
- private config: StarbaseDBConfiguration;
- private liteREST: LiteREST;
-
- constructor(options: {
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
- }) {
- this.dataSource = options.dataSource;
- this.config = options.config;
- this.liteREST = new LiteREST(this.dataSource, this.config);
-
- if (this.dataSource.source === "external" && !this.dataSource.external) {
- throw new Error("No external data sources available.");
- }
- }
-
- /**
- * Middleware to check if the request is coming from an internal source.
- */
- private get isInternalSource() {
- return createMiddleware(async (_, next) => {
- if (this.dataSource.source !== "internal") {
- return createResponse(
- undefined,
- "Function is only available for internal data source.",
- 400
- );
- }
-
- return next();
- });
- }
-
- /**
- * Validator middleware to check if the request path has a valid :tableName parameter.
- */
- private get hasTableName() {
- return validator("param", (params) => {
- const tableName = params["tableName"].trim();
-
- if (!tableName) {
- return createResponse(undefined, "Table name is required", 400);
- }
-
- return { tableName };
- });
- }
-
- /**
- * Helper function to get a feature flag from the configuration.
- * @param key The feature key to get.
- * @param defaultValue The default value to return if the feature is not defined.
- * @returns
- */
- private getFeature(
- key: keyof NonNullable,
- defaultValue = true
- ): boolean {
- return this.config.features?.[key] ?? !!defaultValue;
- }
-
- /**
- * Main handler function for the StarbaseDB.
- * @param request Request instance from the fetch event.
- * @returns Promise
- */
- public async handle(
- request: Request,
- ctx: ExecutionContext
- ): Promise {
- const app = new Hono();
- const isUpgrade = request.headers.get("Upgrade") === "websocket";
-
- // Non-blocking operation to remove expired cache entries from our DO
- ctx.waitUntil(this.expireCache());
-
- // General 404 not found handler
- app.notFound(() => {
- return createResponse(undefined, "Not found", 404);
- });
-
- // Thrown error handler
- app.onError((error) => {
- return createResponse(
- undefined,
- error?.message || "An unexpected error occurred.",
- 500
- );
- });
-
- // CORS preflight handler.
- app.options("*", () => corsPreflight());
-
- if (this.getFeature("studio") && this.config.studio) {
- app.get("/studio", async (c) => {
- return handleStudioRequest(request, {
- username: this.config.studio!.username,
- password: this.config.studio!.password,
- apiKey: this.config.studio!.apiKey,
- });
- });
+ private dataSource: DataSource
+ private config: StarbaseDBConfiguration
+ private liteREST: LiteREST
+
+ constructor(options: {
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
+ }) {
+ this.dataSource = options.dataSource
+ this.config = options.config
+ this.liteREST = new LiteREST(this.dataSource, this.config)
+
+ if (
+ this.dataSource.source === 'external' &&
+ !this.dataSource.external
+ ) {
+ throw new Error('No external data sources available.')
+ }
}
- if (isUpgrade && this.getFeature("websocket")) {
- app.all("/socket", () => this.clientConnected());
+ /**
+ * Middleware to check if the request is coming from an internal source.
+ */
+ private get isInternalSource() {
+ return createMiddleware(async (_, next) => {
+ if (this.dataSource.source !== 'internal') {
+ return createResponse(
+ undefined,
+ 'Function is only available for internal data source.',
+ 400
+ )
+ }
+
+ return next()
+ })
}
- app.post("/query/raw", async (c) => this.queryRoute(c.req.raw, true));
- app.post("/query", async (c) => this.queryRoute(c.req.raw, false));
+ /**
+ * Validator middleware to check if the request path has a valid :tableName parameter.
+ */
+ private get hasTableName() {
+ return validator('param', (params) => {
+ const tableName = params['tableName'].trim()
+
+ if (!tableName) {
+ return createResponse(undefined, 'Table name is required', 400)
+ }
- if (this.getFeature("rest")) {
- app.all("/rest/*", async (c) => {
- return this.liteREST.handleRequest(c.req.raw);
- });
+ return { tableName }
+ })
}
- if (this.getFeature("export")) {
- app.get("/export/dump", this.isInternalSource, async () => {
- return dumpDatabaseRoute(this.dataSource, this.config);
- });
-
- app.get(
- "/export/json/:tableName",
- this.isInternalSource,
- this.hasTableName,
- async (c) => {
- const tableName = c.req.valid("param").tableName;
- return exportTableToJsonRoute(tableName, this.dataSource, this.config);
+ /**
+ * Helper function to get a feature flag from the configuration.
+ * @param key The feature key to get.
+ * @param defaultValue The default value to return if the feature is not defined.
+ * @returns
+ */
+ private getFeature(
+ key: keyof NonNullable,
+ defaultValue = true
+ ): boolean {
+ return this.config.features?.[key] ?? !!defaultValue
+ }
+
+ /**
+ * Main handler function for the StarbaseDB.
+ * @param request Request instance from the fetch event.
+ * @returns Promise
+ */
+ public async handle(
+ request: Request,
+ ctx: ExecutionContext
+ ): Promise {
+ const app = new Hono()
+ const isUpgrade = request.headers.get('Upgrade') === 'websocket'
+
+ // Non-blocking operation to remove expired cache entries from our DO
+ ctx.waitUntil(this.expireCache())
+
+ // General 404 not found handler
+ app.notFound(() => {
+ return createResponse(undefined, 'Not found', 404)
+ })
+
+ // Thrown error handler
+ app.onError((error) => {
+ return createResponse(
+ undefined,
+ error?.message || 'An unexpected error occurred.',
+ 500
+ )
+ })
+
+ // CORS preflight handler.
+ app.options('*', () => corsPreflight())
+
+ if (this.getFeature('studio') && this.config.studio) {
+ app.get('/studio', async (c) => {
+ return handleStudioRequest(request, {
+ username: this.config.studio!.username,
+ password: this.config.studio!.password,
+ apiKey: this.config.studio!.apiKey,
+ })
+ })
}
- );
-
- app.get(
- "/export/csv/:tableName",
- this.isInternalSource,
- this.hasTableName,
- async (c) => {
- const tableName = c.req.valid("param").tableName;
- return exportTableToCsvRoute(tableName, this.dataSource, this.config);
+
+ if (isUpgrade && this.getFeature('websocket')) {
+ app.all('/socket', () => this.clientConnected())
}
- );
- }
- if (this.getFeature("import")) {
- app.post("/import/dump", this.isInternalSource, async (c) => {
- return importDumpRoute(c.req.raw, this.dataSource, this.config);
- });
-
- app.post(
- "/import/json/:tableName",
- this.isInternalSource,
- this.hasTableName,
- async (c) => {
- const tableName = c.req.valid("param").tableName;
- return importTableFromJsonRoute(tableName, request, this.dataSource, this.config);
+ app.post('/query/raw', async (c) => this.queryRoute(c.req.raw, true))
+ app.post('/query', async (c) => this.queryRoute(c.req.raw, false))
+
+ if (this.getFeature('rest')) {
+ app.all('/rest/*', async (c) => {
+ return this.liteREST.handleRequest(c.req.raw)
+ })
}
- );
-
- app.post(
- "/import/csv/:tableName",
- this.isInternalSource,
- this.hasTableName,
- async (c) => {
- const tableName = c.req.valid("param").tableName;
- return importTableFromCsvRoute(tableName, request, this.dataSource, this.config);
+
+ if (this.getFeature('export')) {
+ app.get('/export/dump', this.isInternalSource, async () => {
+ return dumpDatabaseRoute(this.dataSource, this.config)
+ })
+
+ app.get(
+ '/export/json/:tableName',
+ this.isInternalSource,
+ this.hasTableName,
+ async (c) => {
+ const tableName = c.req.valid('param').tableName
+ return exportTableToJsonRoute(
+ tableName,
+ this.dataSource,
+ this.config
+ )
+ }
+ )
+
+ app.get(
+ '/export/csv/:tableName',
+ this.isInternalSource,
+ this.hasTableName,
+ async (c) => {
+ const tableName = c.req.valid('param').tableName
+ return exportTableToCsvRoute(
+ tableName,
+ this.dataSource,
+ this.config
+ )
+ }
+ )
+ }
+
+ if (this.getFeature('import')) {
+ app.post('/import/dump', this.isInternalSource, async (c) => {
+ return importDumpRoute(c.req.raw, this.dataSource, this.config)
+ })
+
+ app.post(
+ '/import/json/:tableName',
+ this.isInternalSource,
+ this.hasTableName,
+ async (c) => {
+ const tableName = c.req.valid('param').tableName
+ return importTableFromJsonRoute(
+ tableName,
+ request,
+ this.dataSource,
+ this.config
+ )
+ }
+ )
+
+ app.post(
+ '/import/csv/:tableName',
+ this.isInternalSource,
+ this.hasTableName,
+ async (c) => {
+ const tableName = c.req.valid('param').tableName
+ return importTableFromCsvRoute(
+ tableName,
+ request,
+ this.dataSource,
+ this.config
+ )
+ }
+ )
+ }
+
+ app.all('/api/*', async (c) => handleApiRequest(c.req.raw))
+
+ return app.fetch(request)
+ }
+
+ async queryRoute(request: Request, isRaw: boolean): Promise {
+ try {
+ const contentType = request.headers.get('Content-Type') || ''
+ if (!contentType.includes('application/json')) {
+ return createResponse(
+ undefined,
+ 'Content-Type must be application/json.',
+ 400
+ )
+ }
+
+ const { sql, params, transaction } =
+ (await request.json()) as QueryRequest & QueryTransactionRequest
+
+ if (Array.isArray(transaction) && transaction.length) {
+ const queries = transaction.map((queryObj: any) => {
+ const { sql, params } = queryObj
+
+ if (typeof sql !== 'string' || !sql.trim()) {
+ throw new Error(
+ 'Invalid or empty "sql" field in transaction.'
+ )
+ } else if (
+ params !== undefined &&
+ !Array.isArray(params) &&
+ typeof params !== 'object'
+ ) {
+ throw new Error(
+ 'Invalid "params" field in transaction. Must be an array or object.'
+ )
+ }
+
+ return { sql, params }
+ })
+
+ const response = await executeTransaction({
+ queries,
+ isRaw,
+ dataSource: this.dataSource,
+ config: this.config,
+ })
+
+ return createResponse(response, undefined, 200)
+ } else if (typeof sql !== 'string' || !sql.trim()) {
+ return createResponse(
+ undefined,
+ 'Invalid or empty "sql" field.',
+ 400
+ )
+ } else if (
+ params !== undefined &&
+ !Array.isArray(params) &&
+ typeof params !== 'object'
+ ) {
+ return createResponse(
+ undefined,
+ 'Invalid "params" field. Must be an array or object.',
+ 400
+ )
+ }
+
+ const response = await executeQuery({
+ sql,
+ params,
+ isRaw,
+ dataSource: this.dataSource,
+ config: this.config,
+ })
+ return createResponse(response, undefined, 200)
+ } catch (error: any) {
+ console.error('Query Route Error:', error)
+ return createResponse(
+ undefined,
+ error?.message || 'An unexpected error occurred.',
+ 500
+ )
}
- );
}
- app.all("/api/*", async (c) => handleApiRequest(c.req.raw));
-
- return app.fetch(request);
- }
-
- async queryRoute(request: Request, isRaw: boolean): Promise {
- try {
- const contentType = request.headers.get("Content-Type") || "";
- if (!contentType.includes("application/json")) {
- return createResponse(
- undefined,
- "Content-Type must be application/json.",
- 400
- );
- }
-
- const { sql, params, transaction } =
- (await request.json()) as QueryRequest & QueryTransactionRequest;
-
- if (Array.isArray(transaction) && transaction.length) {
- const queries = transaction.map((queryObj: any) => {
- const { sql, params } = queryObj;
-
- if (typeof sql !== "string" || !sql.trim()) {
- throw new Error('Invalid or empty "sql" field in transaction.');
- } else if (
- params !== undefined &&
- !Array.isArray(params) &&
- typeof params !== "object"
- ) {
- throw new Error(
- 'Invalid "params" field in transaction. Must be an array or object.'
- );
- }
-
- return { sql, params };
- });
-
- const response = await executeTransaction({
- queries,
- isRaw,
- dataSource: this.dataSource,
- config: this.config,
- });
-
- return createResponse(response, undefined, 200);
- } else if (typeof sql !== "string" || !sql.trim()) {
- return createResponse(undefined, 'Invalid or empty "sql" field.', 400);
- } else if (
- params !== undefined &&
- !Array.isArray(params) &&
- typeof params !== "object"
- ) {
- return createResponse(
- undefined,
- 'Invalid "params" field. Must be an array or object.',
- 400
- );
- }
-
- const response = await executeQuery({
- sql,
- params,
- isRaw,
- dataSource: this.dataSource,
- config: this.config,
- });
- return createResponse(response, undefined, 200);
- } catch (error: any) {
- console.error("Query Route Error:", error);
- return createResponse(
- undefined,
- error?.message || "An unexpected error occurred.",
- 500
- );
+ private clientConnected() {
+ const webSocketPair = new WebSocketPair()
+ const [client, server] = Object.values(webSocketPair)
+
+ server.accept()
+ server.addEventListener('message', (event) => {
+ const { sql, params, action } = JSON.parse(event.data as string)
+
+ if (action === 'query') {
+ const executeQueryWrapper = async () => {
+ const response = await executeQuery({
+ sql,
+ params,
+ isRaw: false,
+ dataSource: this.dataSource,
+ config: this.config,
+ })
+ server.send(JSON.stringify(response))
+ }
+ executeQueryWrapper()
+ }
+ })
+
+ return new Response(null, { status: 101, webSocket: client })
}
- }
-
- private clientConnected() {
- const webSocketPair = new WebSocketPair();
- const [client, server] = Object.values(webSocketPair);
-
- server.accept();
- server.addEventListener("message", (event) => {
- const { sql, params, action } = JSON.parse(event.data as string);
-
- if (action === "query") {
- const executeQueryWrapper = async () => {
- const response = await executeQuery({
- sql,
- params,
- isRaw: false,
- dataSource: this.dataSource,
- config: this.config,
- });
- server.send(JSON.stringify(response));
- };
- executeQueryWrapper();
- }
- });
-
- return new Response(null, { status: 101, webSocket: client });
- }
-
- /**
- *
- */
- private async expireCache() {
- try {
- const cleanupSQL = `DELETE FROM tmp_cache WHERE timestamp + (ttl * 1000) < ?`;
- this.dataSource.rpc.executeQuery({
- sql: cleanupSQL,
- params: [Date.now()],
- });
- } catch (err) {
- console.error("Error cleaning up expired cache entries:", err);
+
+ /**
+ *
+ */
+ private async expireCache() {
+ try {
+ const cleanupSQL = `DELETE FROM tmp_cache WHERE timestamp + (ttl * 1000) < ?`
+ this.dataSource.rpc.executeQuery({
+ sql: cleanupSQL,
+ params: [Date.now()],
+ })
+ } catch (err) {
+ console.error('Error cleaning up expired cache entries:', err)
+ }
}
- }
}
diff --git a/src/import/csv.ts b/src/import/csv.ts
index 847a2e2..aaf9e86 100644
--- a/src/import/csv.ts
+++ b/src/import/csv.ts
@@ -1,15 +1,15 @@
-import { createResponse } from '../utils';
-import { DataSource } from '../types';
-import { executeOperation } from '../export';
-import { StarbaseDBConfiguration } from '../handler';
+import { createResponse } from '../utils'
+import { DataSource } from '../types'
+import { executeOperation } from '../export'
+import { StarbaseDBConfiguration } from '../handler'
interface ColumnMapping {
- [key: string]: string;
+ [key: string]: string
}
interface CsvData {
- data: string;
- columnMapping?: Record;
+ data: string
+ columnMapping?: Record
}
export async function importTableFromCsvRoute(
@@ -20,105 +20,120 @@ export async function importTableFromCsvRoute(
): Promise {
try {
if (!request.body) {
- return createResponse(undefined, 'Request body is empty', 400);
+ return createResponse(undefined, 'Request body is empty', 400)
}
- let csvData: CsvData;
- const contentType = request.headers.get('Content-Type') || '';
+ let csvData: CsvData
+ const contentType = request.headers.get('Content-Type') || ''
if (contentType.includes('application/json')) {
// Handle JSON-wrapped CSV data in POST body
- csvData = await request.json() as CsvData;
+ csvData = (await request.json()) as CsvData
} else if (contentType.includes('text/csv')) {
// Handle raw CSV data in POST body
- const csvContent = await request.text();
- csvData = { data: csvContent };
+ const csvContent = await request.text()
+ csvData = { data: csvContent }
} else if (contentType.includes('multipart/form-data')) {
// Handle file upload
- const formData = await request.formData();
- const file = formData.get('file') as File | null;
-
+ const formData = await request.formData()
+ const file = formData.get('file') as File | null
+
if (!file) {
- return createResponse(undefined, 'No file uploaded', 400);
+ return createResponse(undefined, 'No file uploaded', 400)
}
- const csvContent = await file.text();
- csvData = { data: csvContent };
+ const csvContent = await file.text()
+ csvData = { data: csvContent }
} else {
- return createResponse(undefined, 'Unsupported Content-Type', 400);
+ return createResponse(undefined, 'Unsupported Content-Type', 400)
}
- const { data: csvContent, columnMapping = {} } = csvData;
+ const { data: csvContent, columnMapping = {} } = csvData
// Parse CSV data
- const records = parseCSV(csvContent);
+ const records = parseCSV(csvContent)
if (records.length === 0) {
- return createResponse(undefined, 'Invalid CSV format or empty data', 400);
+ return createResponse(
+ undefined,
+ 'Invalid CSV format or empty data',
+ 400
+ )
}
- const failedStatements: { statement: string; error: string }[] = [];
- let successCount = 0;
+ const failedStatements: { statement: string; error: string }[] = []
+ let successCount = 0
for (const record of records) {
- const mappedRecord = mapRecord(record, columnMapping);
- const columns = Object.keys(mappedRecord);
- const values = Object.values(mappedRecord);
- const placeholders = values.map(() => '?').join(', ');
+ const mappedRecord = mapRecord(record, columnMapping)
+ const columns = Object.keys(mappedRecord)
+ const values = Object.values(mappedRecord)
+ const placeholders = values.map(() => '?').join(', ')
- const statement = `INSERT INTO ${tableName} (${columns.join(', ')}) VALUES (${placeholders})`;
+ const statement = `INSERT INTO ${tableName} (${columns.join(', ')}) VALUES (${placeholders})`
try {
- await executeOperation([{ sql: statement, params: values }], dataSource, config)
- successCount++;
+ await executeOperation(
+ [{ sql: statement, params: values }],
+ dataSource,
+ config
+ )
+ successCount++
} catch (error: any) {
failedStatements.push({
statement: statement,
- error: error.message || 'Unknown error'
- });
+ error: error.message || 'Unknown error',
+ })
}
}
- const totalRecords = records.length;
- const failedCount = failedStatements.length;
-
- const resultMessage = `Imported ${successCount} out of ${totalRecords} records successfully. ${failedCount} records failed.`;
+ const totalRecords = records.length
+ const failedCount = failedStatements.length
- return createResponse({
- message: resultMessage,
- failedStatements: failedStatements
- }, undefined, 200);
+ const resultMessage = `Imported ${successCount} out of ${totalRecords} records successfully. ${failedCount} records failed.`
+ return createResponse(
+ {
+ message: resultMessage,
+ failedStatements: failedStatements,
+ },
+ undefined,
+ 200
+ )
} catch (error: any) {
- console.error('CSV Import Error:', error);
- return createResponse(undefined, 'Failed to import CSV data: ' + error.message, 500);
+ console.error('CSV Import Error:', error)
+ return createResponse(
+ undefined,
+ 'Failed to import CSV data: ' + error.message,
+ 500
+ )
}
}
function parseCSV(csv: string): Record[] {
- const lines = csv.split('\n');
- const headers = lines[0].split(',').map(header => header.trim());
- const records: Record[] = [];
+ const lines = csv.split('\n')
+ const headers = lines[0].split(',').map((header) => header.trim())
+ const records: Record[] = []
for (let i = 1; i < lines.length; i++) {
- const values = lines[i].split(',').map(value => value.trim());
+ const values = lines[i].split(',').map((value) => value.trim())
if (values.length === headers.length) {
- const record: Record = {};
+ const record: Record = {}
headers.forEach((header, index) => {
- record[header] = values[index];
- });
- records.push(record);
+ record[header] = values[index]
+ })
+ records.push(record)
}
}
- return records;
+ return records
}
function mapRecord(record: any, columnMapping: ColumnMapping): any {
- const mappedRecord: any = {};
+ const mappedRecord: any = {}
for (const [key, value] of Object.entries(record)) {
- const mappedKey = columnMapping[key] || key;
- mappedRecord[mappedKey] = value;
+ const mappedKey = columnMapping[key] || key
+ mappedRecord[mappedKey] = value
}
- return mappedRecord;
+ return mappedRecord
}
diff --git a/src/import/dump.ts b/src/import/dump.ts
index 8d20480..259e490 100644
--- a/src/import/dump.ts
+++ b/src/import/dump.ts
@@ -1,33 +1,33 @@
-import { createResponse } from '../utils';
-import { DataSource } from '../types';
-import { executeOperation } from '../export';
-import { StarbaseDBConfiguration } from '../handler';
+import { createResponse } from '../utils'
+import { DataSource } from '../types'
+import { executeOperation } from '../export'
+import { StarbaseDBConfiguration } from '../handler'
function parseSqlStatements(sqlContent: string): string[] {
- const lines = sqlContent.split('\n');
- let currentStatement = '';
- const statements: string[] = [];
+ const lines = sqlContent.split('\n')
+ let currentStatement = ''
+ const statements: string[] = []
for (const line of lines) {
- const trimmedLine = line.trim();
+ const trimmedLine = line.trim()
if (trimmedLine.startsWith('--') || trimmedLine === '') {
- continue; // Skip comments and empty lines
+ continue // Skip comments and empty lines
}
- currentStatement += line + '\n';
+ currentStatement += line + '\n'
if (trimmedLine.endsWith(';')) {
- statements.push(currentStatement.trim());
- currentStatement = '';
+ statements.push(currentStatement.trim())
+ currentStatement = ''
}
}
// Add any remaining statement without a semicolon
if (currentStatement.trim()) {
- statements.push(currentStatement.trim());
+ statements.push(currentStatement.trim())
}
- return statements;
+ return statements
}
export async function importDumpRoute(
@@ -36,55 +36,79 @@ export async function importDumpRoute(
config: StarbaseDBConfiguration
): Promise {
if (request.method !== 'POST') {
- return createResponse(undefined, 'Method not allowed', 405);
+ return createResponse(undefined, 'Method not allowed', 405)
}
- const contentType = request.headers.get('Content-Type');
+ const contentType = request.headers.get('Content-Type')
if (!contentType || !contentType.includes('multipart/form-data')) {
- return createResponse(undefined, 'Content-Type must be multipart/form-data', 400);
+ return createResponse(
+ undefined,
+ 'Content-Type must be multipart/form-data',
+ 400
+ )
}
try {
- const formData = await request.formData();
- const sqlFile = formData.get('sqlFile');
+ const formData = await request.formData()
+ const sqlFile = formData.get('sqlFile')
if (!sqlFile || !(sqlFile instanceof File)) {
- return createResponse(undefined, 'No SQL file uploaded', 400);
+ return createResponse(undefined, 'No SQL file uploaded', 400)
}
if (!sqlFile.name.endsWith('.sql')) {
- return createResponse(undefined, 'Uploaded file must be a .sql file', 400);
+ return createResponse(
+ undefined,
+ 'Uploaded file must be a .sql file',
+ 400
+ )
}
- let sqlContent = await sqlFile.text();
-
+ let sqlContent = await sqlFile.text()
+
// Remove the SQLite format header if present
if (sqlContent.startsWith('SQLite format 3')) {
- sqlContent = sqlContent.substring(sqlContent.indexOf('\n') + 1);
+ sqlContent = sqlContent.substring(sqlContent.indexOf('\n') + 1)
}
- const sqlStatements = parseSqlStatements(sqlContent);
+ const sqlStatements = parseSqlStatements(sqlContent)
- const results = [];
+ const results = []
for (const statement of sqlStatements) {
try {
- const result = await executeOperation([{ sql: statement }], dataSource, config)
- results.push({ statement, success: true, result });
+ const result = await executeOperation(
+ [{ sql: statement }],
+ dataSource,
+ config
+ )
+ results.push({ statement, success: true, result })
} catch (error: any) {
- console.error(`Error executing statement: ${statement}`, error);
- results.push({ statement, success: false, error: error.message });
+ console.error(`Error executing statement: ${statement}`, error)
+ results.push({
+ statement,
+ success: false,
+ error: error.message,
+ })
}
}
- const successCount = results.filter(r => r.success).length;
- const failureCount = results.filter(r => !r.success).length;
-
- return createResponse({
- message: `SQL dump import completed. ${successCount} statements succeeded, ${failureCount} failed.`,
- details: results
- }, undefined, failureCount > 0 ? 207 : 200);
+ const successCount = results.filter((r) => r.success).length
+ const failureCount = results.filter((r) => !r.success).length
+
+ return createResponse(
+ {
+ message: `SQL dump import completed. ${successCount} statements succeeded, ${failureCount} failed.`,
+ details: results,
+ },
+ undefined,
+ failureCount > 0 ? 207 : 200
+ )
} catch (error: any) {
- console.error('Import Dump Error:', error);
- return createResponse(undefined, error.message || 'An error occurred while importing the SQL dump', 500);
+ console.error('Import Dump Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'An error occurred while importing the SQL dump',
+ 500
+ )
}
}
diff --git a/src/import/json.ts b/src/import/json.ts
index 9454a6a..244d7cc 100644
--- a/src/import/json.ts
+++ b/src/import/json.ts
@@ -1,15 +1,15 @@
-import { createResponse } from '../utils';
-import { executeOperation } from '../export';
-import { DataSource } from '../types';
-import { StarbaseDBConfiguration } from '../handler';
+import { createResponse } from '../utils'
+import { executeOperation } from '../export'
+import { DataSource } from '../types'
+import { StarbaseDBConfiguration } from '../handler'
interface ColumnMapping {
- [key: string]: string;
+ [key: string]: string
}
interface JsonData {
- data: any[];
- columnMapping?: Record;
+ data: any[]
+ columnMapping?: Record
}
export async function importTableFromJsonRoute(
@@ -20,79 +20,90 @@ export async function importTableFromJsonRoute(
): Promise {
try {
if (!request.body) {
- return createResponse(undefined, 'Request body is empty', 400);
+ return createResponse(undefined, 'Request body is empty', 400)
}
- let jsonData: JsonData;
- const contentType = request.headers.get('Content-Type') || '';
+ let jsonData: JsonData
+ const contentType = request.headers.get('Content-Type') || ''
if (contentType.includes('application/json')) {
// Handle JSON data in POST body
- jsonData = await request.json() as JsonData;
+ jsonData = (await request.json()) as JsonData
} else if (contentType.includes('multipart/form-data')) {
// Handle file upload
- const formData = await request.formData();
- const file = formData.get('file') as File | null;
-
+ const formData = await request.formData()
+ const file = formData.get('file') as File | null
+
if (!file) {
- return createResponse(undefined, 'No file uploaded', 400);
+ return createResponse(undefined, 'No file uploaded', 400)
}
- const fileContent = await file.text();
- jsonData = JSON.parse(fileContent) as JsonData;
+ const fileContent = await file.text()
+ jsonData = JSON.parse(fileContent) as JsonData
} else {
- return createResponse(undefined, 'Unsupported Content-Type', 400);
+ return createResponse(undefined, 'Unsupported Content-Type', 400)
}
if (!Array.isArray(jsonData.data)) {
- return createResponse(undefined, 'Invalid JSON format. Expected an object with "data" array and optional "columnMapping".', 400);
+ return createResponse(
+ undefined,
+ 'Invalid JSON format. Expected an object with "data" array and optional "columnMapping".',
+ 400
+ )
}
- const { data, columnMapping = {} } = jsonData;
+ const { data, columnMapping = {} } = jsonData
- const failedStatements: { statement: string; error: string }[] = [];
- let successCount = 0;
+ const failedStatements: { statement: string; error: string }[] = []
+ let successCount = 0
for (const record of data) {
- const mappedRecord = mapRecord(record, columnMapping);
- const columns = Object.keys(mappedRecord);
- const values = Object.values(mappedRecord);
- const placeholders = values.map(() => '?').join(', ');
+ const mappedRecord = mapRecord(record, columnMapping)
+ const columns = Object.keys(mappedRecord)
+ const values = Object.values(mappedRecord)
+ const placeholders = values.map(() => '?').join(', ')
- const statement = `INSERT INTO ${tableName} (${columns.join(', ')}) VALUES (${placeholders})`;
+ const statement = `INSERT INTO ${tableName} (${columns.join(', ')}) VALUES (${placeholders})`
try {
- await executeOperation([{ sql: statement, params: values }], dataSource, config)
- successCount++;
+ await executeOperation(
+ [{ sql: statement, params: values }],
+ dataSource,
+ config
+ )
+ successCount++
} catch (error: any) {
failedStatements.push({
statement: statement,
- error: error || 'Unknown error'
- });
+ error: error || 'Unknown error',
+ })
}
}
- const totalRecords = data.length;
- const failedCount = failedStatements.length;
-
- const resultMessage = `Imported ${successCount} out of ${totalRecords} records successfully. ${failedCount} records failed.`;
+ const totalRecords = data.length
+ const failedCount = failedStatements.length
- return createResponse({
- message: resultMessage,
- failedStatements: failedStatements
- }, undefined, 200);
+ const resultMessage = `Imported ${successCount} out of ${totalRecords} records successfully. ${failedCount} records failed.`
+ return createResponse(
+ {
+ message: resultMessage,
+ failedStatements: failedStatements,
+ },
+ undefined,
+ 200
+ )
} catch (error: any) {
- console.error('JSON Import Error:', error);
- return createResponse(undefined, 'Failed to import JSON data', 500);
+ console.error('JSON Import Error:', error)
+ return createResponse(undefined, 'Failed to import JSON data', 500)
}
}
function mapRecord(record: any, columnMapping: ColumnMapping): any {
- const mappedRecord: any = {};
+ const mappedRecord: any = {}
for (const [key, value] of Object.entries(record)) {
- const mappedKey = columnMapping[key] || key;
- mappedRecord[mappedKey] = value;
+ const mappedKey = columnMapping[key] || key
+ mappedRecord[mappedKey] = value
}
- return mappedRecord;
+ return mappedRecord
}
diff --git a/src/index.ts b/src/index.ts
index 7d7920a..35a88eb 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,263 +1,271 @@
-import { createResponse } from "./utils";
-import { StarbaseDB, StarbaseDBConfiguration } from "./handler";
-import { DataSource, RegionLocationHint } from "./types";
-import { createRemoteJWKSet, jwtVerify } from "jose";
-import { handleStudioRequest } from "./studio";
-import { corsPreflight } from "./cors";
+import { createResponse } from './utils'
+import { StarbaseDB, StarbaseDBConfiguration } from './handler'
+import { DataSource, RegionLocationHint } from './types'
+import { createRemoteJWKSet, jwtVerify } from 'jose'
+import { handleStudioRequest } from './studio'
+import { corsPreflight } from './cors'
-export { StarbaseDBDurableObject } from "./do";
+export { StarbaseDBDurableObject } from './do'
-const DURABLE_OBJECT_ID = "sql-durable-object";
+const DURABLE_OBJECT_ID = 'sql-durable-object'
export interface Env {
- ADMIN_AUTHORIZATION_TOKEN: string;
- CLIENT_AUTHORIZATION_TOKEN: string;
- DATABASE_DURABLE_OBJECT: DurableObjectNamespace<
- import("./do").StarbaseDBDurableObject
- >;
- REGION: string;
-
- // Studio credentials
- STUDIO_USER?: string;
- STUDIO_PASS?: string;
-
- ENABLE_ALLOWLIST?: boolean;
- ENABLE_RLS?: boolean;
-
- // External database source details
- OUTERBASE_API_KEY?: string;
- EXTERNAL_DB_TYPE?: string;
- EXTERNAL_DB_HOST?: string;
- EXTERNAL_DB_PORT?: number;
- EXTERNAL_DB_USER?: string;
- EXTERNAL_DB_PASS?: string;
- EXTERNAL_DB_DATABASE?: string;
- EXTERNAL_DB_DEFAULT_SCHEMA?: string;
-
- EXTERNAL_DB_MONGODB_URI?: string;
- EXTERNAL_DB_TURSO_URI?: string;
- EXTERNAL_DB_TURSO_TOKEN?: string;
- EXTERNAL_DB_STARBASEDB_URI?: string;
- EXTERNAL_DB_STARBASEDB_TOKEN?: string;
- EXTERNAL_DB_CLOUDFLARE_API_KEY?: string;
- EXTERNAL_DB_CLOUDFLARE_ACCOUNT_ID?: string;
- EXTERNAL_DB_CLOUDFLARE_DATABASE_ID?: string;
-
- AUTH_ALGORITHM?: string;
- AUTH_JWKS_ENDPOINT?: string;
-
- // ## DO NOT REMOVE: TEMPLATE INTERFACE ##
+ ADMIN_AUTHORIZATION_TOKEN: string
+ CLIENT_AUTHORIZATION_TOKEN: string
+ DATABASE_DURABLE_OBJECT: DurableObjectNamespace<
+ import('./do').StarbaseDBDurableObject
+ >
+ REGION: string
+
+ // Studio credentials
+ STUDIO_USER?: string
+ STUDIO_PASS?: string
+
+ ENABLE_ALLOWLIST?: boolean
+ ENABLE_RLS?: boolean
+
+ // External database source details
+ OUTERBASE_API_KEY?: string
+ EXTERNAL_DB_TYPE?: string
+ EXTERNAL_DB_HOST?: string
+ EXTERNAL_DB_PORT?: number
+ EXTERNAL_DB_USER?: string
+ EXTERNAL_DB_PASS?: string
+ EXTERNAL_DB_DATABASE?: string
+ EXTERNAL_DB_DEFAULT_SCHEMA?: string
+
+ EXTERNAL_DB_MONGODB_URI?: string
+ EXTERNAL_DB_TURSO_URI?: string
+ EXTERNAL_DB_TURSO_TOKEN?: string
+ EXTERNAL_DB_STARBASEDB_URI?: string
+ EXTERNAL_DB_STARBASEDB_TOKEN?: string
+ EXTERNAL_DB_CLOUDFLARE_API_KEY?: string
+ EXTERNAL_DB_CLOUDFLARE_ACCOUNT_ID?: string
+ EXTERNAL_DB_CLOUDFLARE_DATABASE_ID?: string
+
+ AUTH_ALGORITHM?: string
+ AUTH_JWKS_ENDPOINT?: string
+
+ // ## DO NOT REMOVE: TEMPLATE INTERFACE ##
}
export default {
- /**
- * This is the standard fetch handler for a Cloudflare Worker
- *
- * @param request - The request submitted to the Worker from the client
- * @param env - The interface to reference bindings declared in wrangler.toml
- * @param ctx - The execution context of the Worker
- * @returns The response to be sent back to the client
- */
- async fetch(request, env, ctx): Promise {
- try {
- const url = new URL(request.url);
- const isWebSocket = request.headers.get("Upgrade") === "websocket";
-
- let role: StarbaseDBConfiguration["role"] = "client";
- let context = {};
-
- // Authorize the request with CORS rules before proceeding.
- if (request.method === "OPTIONS") {
- const preflightResponse = corsPreflight();
-
- if (preflightResponse) {
- return preflightResponse;
- }
- }
-
- // Handle Studio requests before auth checks in the worker.
- // StarbaseDB can handle this for us, but we need to handle it
- // here before auth checks.
- if (
- env.STUDIO_USER &&
- env.STUDIO_PASS &&
- request.method === "GET" &&
- url.pathname === "/studio"
- ) {
- return handleStudioRequest(request, {
- username: env.STUDIO_USER,
- password: env.STUDIO_PASS,
- apiKey: env.ADMIN_AUTHORIZATION_TOKEN,
- });
- }
-
- async function authenticate(token: string) {
- const isAdminAuthorization = token === env.ADMIN_AUTHORIZATION_TOKEN;
- const isClientAuthorization = token === env.CLIENT_AUTHORIZATION_TOKEN;
-
- // If not admin or client auth, check if JWT auth is available
- if (!isAdminAuthorization && !isClientAuthorization) {
- if (env.AUTH_JWKS_ENDPOINT) {
- const { payload } = await jwtVerify(
- token,
- createRemoteJWKSet(new URL(env.AUTH_JWKS_ENDPOINT)),
- {
- algorithms: env.AUTH_ALGORITHM
- ? [env.AUTH_ALGORITHM]
- : undefined,
- }
- );
-
- if (!payload.sub) {
- throw new Error("Invalid JWT payload, subject not found.");
+ /**
+ * This is the standard fetch handler for a Cloudflare Worker
+ *
+ * @param request - The request submitted to the Worker from the client
+ * @param env - The interface to reference bindings declared in wrangler.toml
+ * @param ctx - The execution context of the Worker
+ * @returns The response to be sent back to the client
+ */
+ async fetch(request, env, ctx): Promise {
+ try {
+ const url = new URL(request.url)
+ const isWebSocket = request.headers.get('Upgrade') === 'websocket'
+
+ let role: StarbaseDBConfiguration['role'] = 'client'
+ let context = {}
+
+ // Authorize the request with CORS rules before proceeding.
+ if (request.method === 'OPTIONS') {
+ const preflightResponse = corsPreflight()
+
+ if (preflightResponse) {
+ return preflightResponse
+ }
}
- context = payload;
- } else {
- // If no JWT secret or JWKS endpoint is provided, then the request has no authorization.
- throw new Error("Unauthorized request");
- }
- } else if (isAdminAuthorization) {
- role = "admin";
- }
- }
-
- // JWT Payload from Header or WebSocket query param.
- let authenticationToken: string | null = null;
-
- /**
- * Prior to proceeding to the Durable Object, we can perform any necessary validation or
- * authorization checks here to ensure the request signature is valid and authorized to
- * interact with the Durable Object.
- */
- if (!isWebSocket) {
- authenticationToken =
- request.headers.get("Authorization")?.replace("Bearer ", "") ?? null;
- } else if (isWebSocket) {
- authenticationToken = url.searchParams.get("token");
- }
-
- // There must be some form of authentication token provided to proceed.
- if (!authenticationToken) {
- return createResponse(undefined, "Unauthorized request", 401);
- }
-
- try {
- await authenticate(authenticationToken);
- } catch (error: any) {
- return createResponse(
- undefined,
- error?.message ?? "Unable to process request.",
- 400
- );
- }
-
- /**
- * Retrieve the Durable Object identifier from the environment bindings and instantiate a
- * Durable Object stub to interact with the Durable Object.
- */
- const region = env.REGION ?? RegionLocationHint.AUTO;
- const id: DurableObjectId =
- env.DATABASE_DURABLE_OBJECT.idFromName(DURABLE_OBJECT_ID);
- const stub =
- region !== RegionLocationHint.AUTO
- ? env.DATABASE_DURABLE_OBJECT.get(id, {
- locationHint: region as DurableObjectLocationHint,
- })
- : env.DATABASE_DURABLE_OBJECT.get(id);
-
- // Create a new RPC Session on the Durable Object.
- const rpc = await stub.init();
-
- // Get the source type from headers/query params.
- const source =
- request.headers.get("X-Starbase-Source") ||
- url.searchParams.get("source"); // TODO: Should this come from here, or per-websocket message?
-
- const dataSource: DataSource = {
- rpc,
- source: source
- ? source.toLowerCase().trim() === "external"
- ? "external"
- : "internal"
- : "internal",
- cache: request.headers.get("X-Starbase-Cache") === "true",
- context: {
- ...context,
- },
- };
-
- if (
- env.EXTERNAL_DB_TYPE === "postgresql" ||
- env.EXTERNAL_DB_TYPE === "mysql"
- ) {
- dataSource.external = {
- dialect: env.EXTERNAL_DB_TYPE,
- host: env.EXTERNAL_DB_HOST!,
- port: env.EXTERNAL_DB_PORT!,
- user: env.EXTERNAL_DB_USER!,
- password: env.EXTERNAL_DB_PASS!,
- database: env.EXTERNAL_DB_DATABASE!,
- defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
- };
- }
-
- if (env.EXTERNAL_DB_TYPE === "sqlite") {
- if (env.EXTERNAL_DB_CLOUDFLARE_API_KEY) {
- dataSource.external = {
- dialect: "sqlite",
- provider: "cloudflare-d1",
- apiKey: env.EXTERNAL_DB_CLOUDFLARE_API_KEY,
- accountId: env.EXTERNAL_DB_CLOUDFLARE_ACCOUNT_ID!,
- databaseId: env.EXTERNAL_DB_CLOUDFLARE_DATABASE_ID!,
- };
- }
+ // Handle Studio requests before auth checks in the worker.
+ // StarbaseDB can handle this for us, but we need to handle it
+ // here before auth checks.
+ if (
+ env.STUDIO_USER &&
+ env.STUDIO_PASS &&
+ request.method === 'GET' &&
+ url.pathname === '/studio'
+ ) {
+ return handleStudioRequest(request, {
+ username: env.STUDIO_USER,
+ password: env.STUDIO_PASS,
+ apiKey: env.ADMIN_AUTHORIZATION_TOKEN,
+ })
+ }
- if (env.EXTERNAL_DB_STARBASEDB_URI) {
- dataSource.external = {
- dialect: "sqlite",
- provider: "starbase",
- apiKey: env.EXTERNAL_DB_STARBASEDB_URI,
- token: env.EXTERNAL_DB_STARBASEDB_TOKEN!,
- defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
- };
- }
+ async function authenticate(token: string) {
+ const isAdminAuthorization =
+ token === env.ADMIN_AUTHORIZATION_TOKEN
+ const isClientAuthorization =
+ token === env.CLIENT_AUTHORIZATION_TOKEN
+
+ // If not admin or client auth, check if JWT auth is available
+ if (!isAdminAuthorization && !isClientAuthorization) {
+ if (env.AUTH_JWKS_ENDPOINT) {
+ const { payload } = await jwtVerify(
+ token,
+ createRemoteJWKSet(new URL(env.AUTH_JWKS_ENDPOINT)),
+ {
+ algorithms: env.AUTH_ALGORITHM
+ ? [env.AUTH_ALGORITHM]
+ : undefined,
+ }
+ )
+
+ if (!payload.sub) {
+ throw new Error(
+ 'Invalid JWT payload, subject not found.'
+ )
+ }
+
+ context = payload
+ } else {
+ // If no JWT secret or JWKS endpoint is provided, then the request has no authorization.
+ throw new Error('Unauthorized request')
+ }
+ } else if (isAdminAuthorization) {
+ role = 'admin'
+ }
+ }
+
+ // JWT Payload from Header or WebSocket query param.
+ let authenticationToken: string | null = null
+
+ /**
+ * Prior to proceeding to the Durable Object, we can perform any necessary validation or
+ * authorization checks here to ensure the request signature is valid and authorized to
+ * interact with the Durable Object.
+ */
+ if (!isWebSocket) {
+ authenticationToken =
+ request.headers
+ .get('Authorization')
+ ?.replace('Bearer ', '') ?? null
+ } else if (isWebSocket) {
+ authenticationToken = url.searchParams.get('token')
+ }
+
+ // There must be some form of authentication token provided to proceed.
+ if (!authenticationToken) {
+ return createResponse(undefined, 'Unauthorized request', 401)
+ }
+
+ try {
+ await authenticate(authenticationToken)
+ } catch (error: any) {
+ return createResponse(
+ undefined,
+ error?.message ?? 'Unable to process request.',
+ 400
+ )
+ }
+
+ /**
+ * Retrieve the Durable Object identifier from the environment bindings and instantiate a
+ * Durable Object stub to interact with the Durable Object.
+ */
+ const region = env.REGION ?? RegionLocationHint.AUTO
+ const id: DurableObjectId =
+ env.DATABASE_DURABLE_OBJECT.idFromName(DURABLE_OBJECT_ID)
+ const stub =
+ region !== RegionLocationHint.AUTO
+ ? env.DATABASE_DURABLE_OBJECT.get(id, {
+ locationHint: region as DurableObjectLocationHint,
+ })
+ : env.DATABASE_DURABLE_OBJECT.get(id)
+
+ // Create a new RPC Session on the Durable Object.
+ const rpc = await stub.init()
+
+ // Get the source type from headers/query params.
+ const source =
+ request.headers.get('X-Starbase-Source') ||
+ url.searchParams.get('source') // TODO: Should this come from here, or per-websocket message?
+
+ const dataSource: DataSource = {
+ rpc,
+ source: source
+ ? source.toLowerCase().trim() === 'external'
+ ? 'external'
+ : 'internal'
+ : 'internal',
+ cache: request.headers.get('X-Starbase-Cache') === 'true',
+ context: {
+ ...context,
+ },
+ }
+
+ if (
+ env.EXTERNAL_DB_TYPE === 'postgresql' ||
+ env.EXTERNAL_DB_TYPE === 'mysql'
+ ) {
+ dataSource.external = {
+ dialect: env.EXTERNAL_DB_TYPE,
+ host: env.EXTERNAL_DB_HOST!,
+ port: env.EXTERNAL_DB_PORT!,
+ user: env.EXTERNAL_DB_USER!,
+ password: env.EXTERNAL_DB_PASS!,
+ database: env.EXTERNAL_DB_DATABASE!,
+ defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
+ }
+ }
+
+ if (env.EXTERNAL_DB_TYPE === 'sqlite') {
+ if (env.EXTERNAL_DB_CLOUDFLARE_API_KEY) {
+ dataSource.external = {
+ dialect: 'sqlite',
+ provider: 'cloudflare-d1',
+ apiKey: env.EXTERNAL_DB_CLOUDFLARE_API_KEY,
+ accountId: env.EXTERNAL_DB_CLOUDFLARE_ACCOUNT_ID!,
+ databaseId: env.EXTERNAL_DB_CLOUDFLARE_DATABASE_ID!,
+ }
+ }
+
+ if (env.EXTERNAL_DB_STARBASEDB_URI) {
+ dataSource.external = {
+ dialect: 'sqlite',
+ provider: 'starbase',
+ apiKey: env.EXTERNAL_DB_STARBASEDB_URI,
+ token: env.EXTERNAL_DB_STARBASEDB_TOKEN!,
+ defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
+ }
+ }
+
+ if (env.EXTERNAL_DB_TURSO_URI) {
+ dataSource.external = {
+ dialect: 'sqlite',
+ provider: 'turso',
+ uri: env.EXTERNAL_DB_TURSO_URI,
+ token: env.EXTERNAL_DB_TURSO_TOKEN!,
+ defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
+ }
+ }
+ }
+
+ const config: StarbaseDBConfiguration = {
+ outerbaseApiKey: env.OUTERBASE_API_KEY,
+ role,
+ features: {
+ allowlist: env.ENABLE_ALLOWLIST,
+ rls: env.ENABLE_RLS,
+ studio: false, // This is handled above in the worker flow.
+ },
+ }
+
+ const starbase = new StarbaseDB({
+ dataSource,
+ config,
+ })
- if (env.EXTERNAL_DB_TURSO_URI) {
- dataSource.external = {
- dialect: "sqlite",
- provider: "turso",
- uri: env.EXTERNAL_DB_TURSO_URI,
- token: env.EXTERNAL_DB_TURSO_TOKEN!,
- defaultSchema: env.EXTERNAL_DB_DEFAULT_SCHEMA,
- };
+ // Return the final response to our user
+ return await starbase.handle(request, ctx)
+ } catch (error) {
+ // Return error response to client
+ return createResponse(
+ undefined,
+ error instanceof Error
+ ? error.message
+ : 'An unexpected error occurred',
+ 400
+ )
}
- }
-
- const config: StarbaseDBConfiguration = {
- outerbaseApiKey: env.OUTERBASE_API_KEY,
- role,
- features: {
- allowlist: env.ENABLE_ALLOWLIST,
- rls: env.ENABLE_RLS,
- studio: false, // This is handled above in the worker flow.
- },
- };
-
- const starbase = new StarbaseDB({
- dataSource,
- config,
- });
-
- // Return the final response to our user
- return await starbase.handle(request, ctx);
- } catch (error) {
- // Return error response to client
- return createResponse(
- undefined,
- error instanceof Error ? error.message : "An unexpected error occurred",
- 400
- );
- }
- },
-} satisfies ExportedHandler;
+ },
+} satisfies ExportedHandler
diff --git a/src/literest/README.md b/src/literest/README.md
index d141ff0..4312a1c 100644
--- a/src/literest/README.md
+++ b/src/literest/README.md
@@ -1,8 +1,11 @@
# GET
+
Fetch data from the database.
## Equals
+
Get any entry that matches the column named `name` inside the `users` table where name = `Alice`.
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?name=Alice' \
--header 'Authorization: Bearer ABC123' \
@@ -11,7 +14,9 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## Not Equals
+
Get any result that does NOT equal the provided value.
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?name.ne=Alice' \
--header 'Authorization: Bearer ABC123' \
@@ -20,7 +25,9 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## LIKE
+
The URL has `%25` appended to it which represents the `%` character. We need the `%` character to represent in SQL any number of characters can appear here to be considered "LIKE".
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?name.like=Al%25' \
--header 'Authorization: Bearer ABC123' \
@@ -29,7 +36,9 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## IN
+
Get all results that match the names in the IN criteria, which the example below includes `Alice` and `Bob`.
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?name.in=Alice,Bob' \
--header 'Authorization: Bearer ABC123' \
@@ -38,6 +47,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## Greater Than
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?user_id.gt=0' \
--header 'Authorization: Bearer ABC123' \
@@ -46,6 +56,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## Greater Than or Equal
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?user_id.gte=1' \
--header 'Authorization: Bearer ABC123' \
@@ -54,6 +65,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## Less Than
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?user_id.lt=3' \
--header 'Authorization: Bearer ABC123' \
@@ -62,6 +74,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## Less Than or Equal
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?user_id.lte=3' \
--header 'Authorization: Bearer ABC123' \
@@ -70,6 +83,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## SORT BY & ORDER
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?sort_by=user_id&order=DESC' \
--header 'Authorization: Bearer ABC123' \
@@ -78,6 +92,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## LIMIT & OFFSET
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?limit=2&offset=1' \
--header 'Authorization: Bearer ABC123' \
@@ -86,6 +101,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
## A bit of everything
+
```
curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users?name.in=Alice%2CBob&user_id.gte=0&email.like=%25example.com&sort_by=user_id&order=DESC&limit=10&offset=0' \
--header 'Authorization: Bearer ABC123' \
@@ -94,6 +110,7 @@ curl --location --request GET 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
# POST
+
```
curl --location 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users' \
--header 'Authorization: Bearer ABC123' \
@@ -106,13 +123,16 @@ curl --location 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users' \
```
# DELETE
+
```
curl --location --request DELETE 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users/4' \
--header 'Authorization: Bearer ABC123'
```
# PUT
+
A PUT command is to do a FULL replacement of the entry in the table. For partial updates see PATCH
+
```
curl --location --request PUT 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users/4' \
--header 'Authorization: Bearer ABC123' \
@@ -125,7 +145,9 @@ curl --location --request PUT 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/
```
# PATCH
+
A PATCH command is to do a PARTIAL replacement of the entry in the table. For full updates see PUT
+
```
curl --location --request PATCH 'https://starbasedb.{YOUR-IDENTIFIER}.workers.dev/rest/users/4' \
--header 'Authorization: Bearer ABC123' \
@@ -134,4 +156,4 @@ curl --location --request PATCH 'https://starbasedb.{YOUR-IDENTIFIER}.workers.de
--data-raw '{
"email": "brayden+1@outerbase.com"
}'
-```
\ No newline at end of file
+```
diff --git a/src/literest/index.ts b/src/literest/index.ts
index d7362dd..b03c263 100644
--- a/src/literest/index.ts
+++ b/src/literest/index.ts
@@ -1,40 +1,40 @@
-import { createResponse } from "../utils";
-import { DataSource } from "../types";
-import { executeQuery, executeTransaction } from "../operation";
-import { StarbaseDBConfiguration } from "../handler";
+import { createResponse } from '../utils'
+import { DataSource } from '../types'
+import { executeQuery, executeTransaction } from '../operation'
+import { StarbaseDBConfiguration } from '../handler'
export class LiteREST {
- private dataSource: DataSource;
- private config: StarbaseDBConfiguration;
-
- constructor(dataSource: DataSource, config: StarbaseDBConfiguration) {
- this.dataSource = dataSource;
- this.config = config;
- }
-
- /**
- * Sanitizes an identifier by removing all non-alphanumeric characters except underscores.
- * @param identifier - The identifier to sanitize.
- * @returns The sanitized identifier.
- */
- private sanitizeIdentifier(identifier: string): string {
- return identifier.replace(/[^a-zA-Z0-9_]/g, "");
- }
-
- /**
- * Retrieves the primary key columns for a given table.
- * @param tableName - The name of the table.
- * @returns An array of primary key column names.
- */
- private async getPrimaryKeyColumns(
- tableName: string,
- schemaName?: string
- ): Promise {
- let query = `PRAGMA table_info(${tableName});`;
-
- if (this.dataSource.source === "external") {
- if (this.dataSource.external?.dialect === "postgresql") {
- query = `
+ private dataSource: DataSource
+ private config: StarbaseDBConfiguration
+
+ constructor(dataSource: DataSource, config: StarbaseDBConfiguration) {
+ this.dataSource = dataSource
+ this.config = config
+ }
+
+ /**
+ * Sanitizes an identifier by removing all non-alphanumeric characters except underscores.
+ * @param identifier - The identifier to sanitize.
+ * @returns The sanitized identifier.
+ */
+ private sanitizeIdentifier(identifier: string): string {
+ return identifier.replace(/[^a-zA-Z0-9_]/g, '')
+ }
+
+ /**
+ * Retrieves the primary key columns for a given table.
+ * @param tableName - The name of the table.
+ * @returns An array of primary key column names.
+ */
+ private async getPrimaryKeyColumns(
+ tableName: string,
+ schemaName?: string
+ ): Promise {
+ let query = `PRAGMA table_info(${tableName});`
+
+ if (this.dataSource.source === 'external') {
+ if (this.dataSource.external?.dialect === 'postgresql') {
+ query = `
SELECT kcu.column_name AS name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
@@ -43,557 +43,575 @@ export class LiteREST {
AND tc.table_name = kcu.table_name
WHERE tc.constraint_type = 'PRIMARY KEY'
AND tc.table_name = '${tableName}'
- AND tc.table_schema = '${schemaName ?? "public"}';`;
- } else if (this.dataSource.external?.dialect === "mysql") {
- query = `
+ AND tc.table_schema = '${schemaName ?? 'public'}';`
+ } else if (this.dataSource.external?.dialect === 'mysql') {
+ query = `
SELECT COLUMN_NAME AS name FROM information_schema.key_column_usage
WHERE table_name = '${tableName}'
AND constraint_name = 'PRIMARY'
- AND table_schema = ${schemaName ?? "DATABASE()"};
- `;
- }
+ AND table_schema = ${schemaName ?? 'DATABASE()'};
+ `
+ }
+ }
+
+ const isSQLite =
+ this.dataSource.source === 'internal' ||
+ this.dataSource.external?.dialect === 'sqlite'
+
+ const schemaInfo = (await executeQuery({
+ sql: query,
+ params: [],
+ isRaw: false,
+ dataSource: this.dataSource,
+ config: this.config,
+ })) as any[]
+
+ let pkColumns = []
+
+ if (isSQLite) {
+ pkColumns = schemaInfo
+ .filter(
+ (col) =>
+ typeof col.pk === 'number' &&
+ col.pk > 0 &&
+ col.name !== null
+ )
+ .map((col) => col.name as string)
+ } else {
+ pkColumns = schemaInfo.map((col) => col.name as string)
+ }
+
+ return pkColumns
}
- const isSQLite =
- this.dataSource.source === "internal" ||
- this.dataSource.external?.dialect === "sqlite";
+ /**
+ * Checks if the provided data is valid.
+ * @param data - The data to validate.
+ * @returns True if the data is valid, false otherwise.
+ */
+ private isDataValid(data: any): boolean {
+ return data && typeof data === 'object' && !Array.isArray(data)
+ }
- const schemaInfo = (await executeQuery({
- sql: query,
- params: [],
- isRaw: false,
- dataSource: this.dataSource,
- config: this.config,
- })) as any[];
+ /**
+ * Sanitizes an operator by mapping it to a valid SQL operator.
+ * @param operator - The operator to sanitize.
+ * @returns The sanitized operator.
+ */
+ private sanitizeOperator(operator: string | undefined): string {
+ const allowedOperators: { [key: string]: string } = {
+ eq: '=',
+ ne: '!=',
+ gt: '>',
+ lt: '<',
+ gte: '>=',
+ lte: '<=',
+ like: 'LIKE',
+ in: 'IN',
+ }
+ return allowedOperators[operator || 'eq'] || '='
+ }
- let pkColumns = [];
+ /**
+ * Retrieves the primary key conditions for a given table.
+ * @param pkColumns - The primary key columns for the table.
+ * @param id - The identifier for the record.
+ * @param data - The data to use for primary key conditions.
+ * @param searchParams - The search parameters.
+ * @returns An object containing the conditions, parameters, and any error message.
+ */
+ private getPrimaryKeyConditions(
+ pkColumns: string[],
+ id: string | undefined,
+ data: any,
+ searchParams: URLSearchParams
+ ): { conditions: string[]; params: any[]; error?: string } {
+ const conditions: string[] = []
+ const params: any[] = []
+
+ if (pkColumns?.length === 1) {
+ const pk = pkColumns[0]
+ const pkValue = id || data[pk] || searchParams.get(pk)
+ if (!pkValue) {
+ return {
+ conditions,
+ params,
+ error: `Missing primary key value for '${pk}'`,
+ }
+ }
+ conditions.push(`${pk} = ?`)
+ params.push(pkValue)
+ } else {
+ // Composite primary key
+ for (const pk of pkColumns) {
+ const pkValue = data[pk] || searchParams.get(pk)
+ if (!pkValue) {
+ return {
+ conditions,
+ params,
+ error: `Missing primary key value for '${pk}'`,
+ }
+ }
+ conditions.push(`${pk} = ?`)
+ params.push(pkValue)
+ }
+ }
- if (isSQLite) {
- pkColumns = schemaInfo
- .filter(
- (col) => typeof col.pk === "number" && col.pk > 0 && col.name !== null
- )
- .map((col) => col.name as string);
- } else {
- pkColumns = schemaInfo.map((col) => col.name as string);
+ return { conditions, params }
}
- return pkColumns;
- }
-
- /**
- * Checks if the provided data is valid.
- * @param data - The data to validate.
- * @returns True if the data is valid, false otherwise.
- */
- private isDataValid(data: any): boolean {
- return data && typeof data === "object" && !Array.isArray(data);
- }
-
- /**
- * Sanitizes an operator by mapping it to a valid SQL operator.
- * @param operator - The operator to sanitize.
- * @returns The sanitized operator.
- */
- private sanitizeOperator(operator: string | undefined): string {
- const allowedOperators: { [key: string]: string } = {
- eq: "=",
- ne: "!=",
- gt: ">",
- lt: "<",
- gte: ">=",
- lte: "<=",
- like: "LIKE",
- in: "IN",
- };
- return allowedOperators[operator || "eq"] || "=";
- }
-
- /**
- * Retrieves the primary key conditions for a given table.
- * @param pkColumns - The primary key columns for the table.
- * @param id - The identifier for the record.
- * @param data - The data to use for primary key conditions.
- * @param searchParams - The search parameters.
- * @returns An object containing the conditions, parameters, and any error message.
- */
- private getPrimaryKeyConditions(
- pkColumns: string[],
- id: string | undefined,
- data: any,
- searchParams: URLSearchParams
- ): { conditions: string[]; params: any[]; error?: string } {
- const conditions: string[] = [];
- const params: any[] = [];
-
- if (pkColumns?.length === 1) {
- const pk = pkColumns[0];
- const pkValue = id || data[pk] || searchParams.get(pk);
- if (!pkValue) {
+ /**
+ * Executes a set of operations.
+ * @param queries - The operations to execute.
+ */
+ private async executeOperation(
+ queries: { sql: string; params: any[] }[]
+ ): Promise<{ result?: any; error?: string | undefined; status: number }> {
+ const results: any[] = (await executeTransaction({
+ queries,
+ isRaw: false,
+ dataSource: this.dataSource,
+ config: this.config,
+ })) as any[]
return {
- conditions,
- params,
- error: `Missing primary key value for '${pk}'`,
- };
- }
- conditions.push(`${pk} = ?`);
- params.push(pkValue);
- } else {
- // Composite primary key
- for (const pk of pkColumns) {
- const pkValue = data[pk] || searchParams.get(pk);
- if (!pkValue) {
- return {
- conditions,
- params,
- error: `Missing primary key value for '${pk}'`,
- };
+ result: results?.length > 0 ? results[0] : undefined,
+ status: 200,
}
- conditions.push(`${pk} = ?`);
- params.push(pkValue);
- }
}
- return { conditions, params };
- }
-
- /**
- * Executes a set of operations.
- * @param queries - The operations to execute.
- */
- private async executeOperation(
- queries: { sql: string; params: any[] }[]
- ): Promise<{ result?: any; error?: string | undefined; status: number }> {
- const results: any[] = (await executeTransaction({
- queries,
- isRaw: false,
- dataSource: this.dataSource,
- config: this.config
- })) as any[];
- return {
- result: results?.length > 0 ? results[0] : undefined,
- status: 200,
- };
- }
-
- /**
- * Handles the incoming request and determines the appropriate action based on the method and path.
- * @param request - The incoming request.
- * @returns The response to the request.
- */
- async handleRequest(request: Request): Promise {
- const { method, tableName, schemaName, id, searchParams, body } =
- await this.parseRequest(request);
-
- try {
- switch (method) {
- case "GET":
- return await this.handleGet(tableName, schemaName, id, searchParams);
- case "POST":
- return await this.handlePost(tableName, schemaName, body);
- case "PATCH":
- return await this.handlePatch(tableName, schemaName, id, body);
- case "PUT":
- return await this.handlePut(tableName, schemaName, id, body);
- case "DELETE":
- return await this.handleDelete(tableName, schemaName, id);
- default:
- return createResponse(undefined, "Method not allowed", 405);
- }
- } catch (error: any) {
- console.error("LiteREST Error:", error);
- return createResponse(
- undefined,
- error.message || "An unexpected error occurred",
- 500
- );
- }
- }
-
- /**
- * Parses the incoming request and extracts the method, table name, id, search parameters, and body.
- * @param request - The incoming request.
- * @returns An object containing the method, table name, id, search parameters, and body.
- */
- private async parseRequest(request: Request): Promise<{
- method: string;
- tableName: string;
- schemaName: string | undefined;
- id?: string;
- searchParams: URLSearchParams;
- body?: any;
- }> {
- const liteRequest = new Request(request.url.replace("/rest", ""), request);
- const url = new URL(liteRequest.url);
- const pathParts = url.pathname.split("/").filter(Boolean);
-
- if (pathParts.length === 0) {
- throw new Error("Expected a table name in the path");
+ /**
+ * Handles the incoming request and determines the appropriate action based on the method and path.
+ * @param request - The incoming request.
+ * @returns The response to the request.
+ */
+ async handleRequest(request: Request): Promise {
+ const { method, tableName, schemaName, id, searchParams, body } =
+ await this.parseRequest(request)
+
+ try {
+ switch (method) {
+ case 'GET':
+ return await this.handleGet(
+ tableName,
+ schemaName,
+ id,
+ searchParams
+ )
+ case 'POST':
+ return await this.handlePost(tableName, schemaName, body)
+ case 'PATCH':
+ return await this.handlePatch(
+ tableName,
+ schemaName,
+ id,
+ body
+ )
+ case 'PUT':
+ return await this.handlePut(tableName, schemaName, id, body)
+ case 'DELETE':
+ return await this.handleDelete(tableName, schemaName, id)
+ default:
+ return createResponse(undefined, 'Method not allowed', 405)
+ }
+ } catch (error: any) {
+ console.error('LiteREST Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'An unexpected error occurred',
+ 500
+ )
+ }
}
- const tableName = this.sanitizeIdentifier(
- pathParts.length === 1 ? pathParts[0] : pathParts[1]
- );
- const schemaName = this.sanitizeIdentifier(pathParts[0]);
- const id = pathParts.length === 3 ? pathParts[2] : undefined;
- const body = ["POST", "PUT", "PATCH"].includes(liteRequest.method)
- ? await liteRequest.json()
- : undefined;
-
- return {
- method: liteRequest.method,
- tableName,
- schemaName,
- id,
- searchParams: url.searchParams,
- body,
- };
- }
-
- private async buildSelectQuery(
- tableName: string,
- schemaName: string | undefined,
- id: string | undefined,
- searchParams: URLSearchParams
- ): Promise<{ query: string; params: any[] }> {
- let query = `SELECT * FROM ${
- schemaName ? `${schemaName}.` : ""
- }${tableName}`;
- const params: any[] = [];
- const conditions: string[] = [];
- const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName);
- const {
- conditions: pkConditions,
- params: pkParams,
- error,
- } = this.getPrimaryKeyConditions(pkColumns, id, {}, searchParams);
-
- if (!error) {
- conditions.push(...pkConditions);
- params.push(...pkParams);
- }
+ /**
+ * Parses the incoming request and extracts the method, table name, id, search parameters, and body.
+ * @param request - The incoming request.
+ * @returns An object containing the method, table name, id, search parameters, and body.
+ */
+ private async parseRequest(request: Request): Promise<{
+ method: string
+ tableName: string
+ schemaName: string | undefined
+ id?: string
+ searchParams: URLSearchParams
+ body?: any
+ }> {
+ const liteRequest = new Request(
+ request.url.replace('/rest', ''),
+ request
+ )
+ const url = new URL(liteRequest.url)
+ const pathParts = url.pathname.split('/').filter(Boolean)
- // Extract special parameters
- const sortBy = searchParams.get("sort_by");
- const orderParam = searchParams.get("order");
- const limitParam = searchParams.get("limit");
- const offsetParam = searchParams.get("offset");
-
- // Remove special parameters from searchParams
- ["sort_by", "order", "limit", "offset"].forEach((param) =>
- searchParams.delete(param)
- );
-
- // Handle other search parameters
- for (const [key, value] of searchParams.entries()) {
- if (pkColumns.includes(key)) continue; // Skip primary key columns
- const [column, op] = key.split(".");
- const sanitizedColumn = this.sanitizeIdentifier(column);
- const operator = this.sanitizeOperator(op);
-
- if (operator === "IN") {
- const values = value.split(",").map((val) => val.trim());
- const placeholders = values.map(() => "?").join(", ");
- conditions.push(`${sanitizedColumn} IN (${placeholders})`);
- params.push(...values);
- } else {
- conditions.push(`${sanitizedColumn} ${operator} ?`);
- params.push(value);
- }
- }
+ if (pathParts.length === 0) {
+ throw new Error('Expected a table name in the path')
+ }
- // Add WHERE clause if there are conditions
- if (conditions.length > 0) {
- query += ` WHERE ${conditions.join(" AND ")}`;
- }
+ const tableName = this.sanitizeIdentifier(
+ pathParts.length === 1 ? pathParts[0] : pathParts[1]
+ )
+ const schemaName = this.sanitizeIdentifier(pathParts[0])
+ const id = pathParts.length === 3 ? pathParts[2] : undefined
+ const body = ['POST', 'PUT', 'PATCH'].includes(liteRequest.method)
+ ? await liteRequest.json()
+ : undefined
- // Add ORDER BY clause
- if (sortBy) {
- const sanitizedSortBy = this.sanitizeIdentifier(sortBy);
- const order = orderParam?.toUpperCase() === "DESC" ? "DESC" : "ASC";
- query += ` ORDER BY ${sanitizedSortBy} ${order}`;
+ return {
+ method: liteRequest.method,
+ tableName,
+ schemaName,
+ id,
+ searchParams: url.searchParams,
+ body,
+ }
}
- // Add LIMIT and OFFSET clauses
- if (limitParam) {
- const limit = parseInt(limitParam, 10);
- if (limit > 0) {
- query += ` LIMIT ?`;
- params.push(limit);
-
- if (offsetParam) {
- const offset = parseInt(offsetParam, 10);
- if (offset > 0) {
- query += ` OFFSET ?`;
- params.push(offset);
- }
+ private async buildSelectQuery(
+ tableName: string,
+ schemaName: string | undefined,
+ id: string | undefined,
+ searchParams: URLSearchParams
+ ): Promise<{ query: string; params: any[] }> {
+ let query = `SELECT * FROM ${
+ schemaName ? `${schemaName}.` : ''
+ }${tableName}`
+ const params: any[] = []
+ const conditions: string[] = []
+ const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName)
+ const {
+ conditions: pkConditions,
+ params: pkParams,
+ error,
+ } = this.getPrimaryKeyConditions(pkColumns, id, {}, searchParams)
+
+ if (!error) {
+ conditions.push(...pkConditions)
+ params.push(...pkParams)
}
- }
- }
- return { query, params };
- }
-
- private async handleGet(
- tableName: string,
- schemaName: string | undefined,
- id: string | undefined,
- searchParams: URLSearchParams
- ): Promise {
- const { query, params } = await this.buildSelectQuery(
- tableName,
- schemaName,
- id,
- searchParams
- );
-
- try {
- const response = await this.executeOperation([{ sql: query, params }]);
- const resultArray = response.result;
- return createResponse(resultArray, undefined, 200);
- } catch (error: any) {
- console.error("GET Operation Error:", error);
- return createResponse(
- undefined,
- error.message || "Failed to retrieve data",
- 500
- );
- }
- }
-
- private async handlePost(
- tableName: string,
- schemaName: string | undefined,
- data: any
- ): Promise {
- if (!this.isDataValid(data)) {
- console.error("Invalid data format for POST:", data);
- return createResponse(undefined, "Invalid data format", 400);
- }
+ // Extract special parameters
+ const sortBy = searchParams.get('sort_by')
+ const orderParam = searchParams.get('order')
+ const limitParam = searchParams.get('limit')
+ const offsetParam = searchParams.get('offset')
- const dataKeys = Object.keys(data);
- if (dataKeys.length === 0) {
- console.error("No data provided for POST");
- return createResponse(undefined, "No data provided", 400);
- }
+ // Remove special parameters from searchParams
+ ;['sort_by', 'order', 'limit', 'offset'].forEach((param) =>
+ searchParams.delete(param)
+ )
- // Sanitize column names
- const columns = dataKeys.map((col) => this.sanitizeIdentifier(col));
- const placeholders = columns.map(() => "?").join(", ");
- const query = `INSERT INTO ${
- schemaName ? `${schemaName}.` : ""
- }${tableName} (${columns.join(", ")}) VALUES (${placeholders})`;
-
- // Map parameters using original data keys to get correct values
- const params = dataKeys.map((key) => data[key]);
- const queries = [{ sql: query, params }];
-
- try {
- await this.executeOperation(queries);
- return createResponse(
- { message: "Resource created successfully", data },
- undefined,
- 201
- );
- } catch (error: any) {
- console.error("POST Operation Error:", error);
- const errorMessage =
- error.message ||
- error.error ||
- JSON.stringify(error) ||
- "Failed to create resource";
- return createResponse(undefined, errorMessage, 500);
- }
- }
-
- private async handlePatch(
- tableName: string,
- schemaName: string | undefined,
- id: string | undefined,
- data: any
- ): Promise {
- const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName);
-
- const {
- conditions: pkConditions,
- params: pkParams,
- error,
- } = this.getPrimaryKeyConditions(
- pkColumns,
- id,
- data,
- new URLSearchParams()
- );
-
- if (error) {
- console.error("PATCH Operation Error:", error);
- return createResponse(undefined, error, 400);
- }
+ // Handle other search parameters
+ for (const [key, value] of searchParams.entries()) {
+ if (pkColumns.includes(key)) continue // Skip primary key columns
+ const [column, op] = key.split('.')
+ const sanitizedColumn = this.sanitizeIdentifier(column)
+ const operator = this.sanitizeOperator(op)
+
+ if (operator === 'IN') {
+ const values = value.split(',').map((val) => val.trim())
+ const placeholders = values.map(() => '?').join(', ')
+ conditions.push(`${sanitizedColumn} IN (${placeholders})`)
+ params.push(...values)
+ } else {
+ conditions.push(`${sanitizedColumn} ${operator} ?`)
+ params.push(value)
+ }
+ }
- if (!this.isDataValid(data)) {
- console.error("Invalid data format for PATCH:", data);
- return createResponse(undefined, "Invalid data format", 400);
- }
+ // Add WHERE clause if there are conditions
+ if (conditions.length > 0) {
+ query += ` WHERE ${conditions.join(' AND ')}`
+ }
- const dataKeys = Object.keys(data);
- if (dataKeys.length === 0) {
- console.error("No data provided for PATCH");
- return createResponse(undefined, "No data provided", 400);
- }
+ // Add ORDER BY clause
+ if (sortBy) {
+ const sanitizedSortBy = this.sanitizeIdentifier(sortBy)
+ const order = orderParam?.toUpperCase() === 'DESC' ? 'DESC' : 'ASC'
+ query += ` ORDER BY ${sanitizedSortBy} ${order}`
+ }
- // Remove primary key columns from dataKeys
- const updateKeys = dataKeys.filter((key) => !pkColumns.includes(key));
+ // Add LIMIT and OFFSET clauses
+ if (limitParam) {
+ const limit = parseInt(limitParam, 10)
+ if (limit > 0) {
+ query += ` LIMIT ?`
+ params.push(limit)
+
+ if (offsetParam) {
+ const offset = parseInt(offsetParam, 10)
+ if (offset > 0) {
+ query += ` OFFSET ?`
+ params.push(offset)
+ }
+ }
+ }
+ }
- if (updateKeys.length === 0) {
- console.error("No updatable data provided for PATCH");
- return createResponse(undefined, "No updatable data provided", 400);
+ return { query, params }
}
- // Sanitize column names
- const columns = updateKeys.map((col) => this.sanitizeIdentifier(col));
- const setClause = columns.map((col) => `${col} = ?`).join(", ");
- const query = `UPDATE ${
- schemaName ? `${schemaName}.` : ""
- }${tableName} SET ${setClause} WHERE ${pkConditions.join(" AND ")}`;
-
- // Map parameters using original data keys to get correct values
- const params = updateKeys.map((key) => data[key]);
- params.push(...pkParams);
-
- const queries = [{ sql: query, params }];
-
- try {
- await this.executeOperation(queries);
- return createResponse(
- { message: "Resource updated successfully", data },
- undefined,
- 200
- );
- } catch (error: any) {
- console.error("PATCH Operation Error:", error);
- return createResponse(
- undefined,
- error.message || "Failed to update resource",
- 500
- );
- }
- }
-
- private async handlePut(
- tableName: string,
- schemaName: string | undefined,
- id: string | undefined,
- data: any
- ): Promise {
- const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName);
-
- const {
- conditions: pkConditions,
- params: pkParams,
- error,
- } = this.getPrimaryKeyConditions(
- pkColumns,
- id,
- data,
- new URLSearchParams()
- );
-
- if (error) {
- console.error("PUT Operation Error:", error);
- return createResponse(undefined, error, 400);
- }
+ private async handleGet(
+ tableName: string,
+ schemaName: string | undefined,
+ id: string | undefined,
+ searchParams: URLSearchParams
+ ): Promise {
+ const { query, params } = await this.buildSelectQuery(
+ tableName,
+ schemaName,
+ id,
+ searchParams
+ )
- if (!this.isDataValid(data)) {
- console.error("Invalid data format for PUT:", data);
- return createResponse(undefined, "Invalid data format", 400);
+ try {
+ const response = await this.executeOperation([
+ { sql: query, params },
+ ])
+ const resultArray = response.result
+ return createResponse(resultArray, undefined, 200)
+ } catch (error: any) {
+ console.error('GET Operation Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'Failed to retrieve data',
+ 500
+ )
+ }
}
- const dataKeys = Object.keys(data);
- if (dataKeys.length === 0) {
- console.error("No data provided for PUT");
- return createResponse(undefined, "No data provided", 400);
- }
+ private async handlePost(
+ tableName: string,
+ schemaName: string | undefined,
+ data: any
+ ): Promise {
+ if (!this.isDataValid(data)) {
+ console.error('Invalid data format for POST:', data)
+ return createResponse(undefined, 'Invalid data format', 400)
+ }
+
+ const dataKeys = Object.keys(data)
+ if (dataKeys.length === 0) {
+ console.error('No data provided for POST')
+ return createResponse(undefined, 'No data provided', 400)
+ }
- // Sanitize column names
- const columns = dataKeys.map((col) => this.sanitizeIdentifier(col));
- const setClause = columns.map((col) => `${col} = ?`).join(", ");
- const query = `UPDATE ${
- schemaName ? `${schemaName}.` : ""
- }${tableName} SET ${setClause} WHERE ${pkConditions.join(" AND ")}`;
-
- // Map parameters using original data keys to get correct values
- const params = dataKeys.map((key) => data[key]);
- params.push(...pkParams);
-
- const queries = [{ sql: query, params }];
-
- try {
- await this.executeOperation(queries);
- return createResponse(
- { message: "Resource replaced successfully", data },
- undefined,
- 200
- );
- } catch (error: any) {
- console.error("PUT Operation Error:", error);
- return createResponse(
- undefined,
- error.message || "Failed to replace resource",
- 500
- );
+ // Sanitize column names
+ const columns = dataKeys.map((col) => this.sanitizeIdentifier(col))
+ const placeholders = columns.map(() => '?').join(', ')
+ const query = `INSERT INTO ${
+ schemaName ? `${schemaName}.` : ''
+ }${tableName} (${columns.join(', ')}) VALUES (${placeholders})`
+
+ // Map parameters using original data keys to get correct values
+ const params = dataKeys.map((key) => data[key])
+ const queries = [{ sql: query, params }]
+
+ try {
+ await this.executeOperation(queries)
+ return createResponse(
+ { message: 'Resource created successfully', data },
+ undefined,
+ 201
+ )
+ } catch (error: any) {
+ console.error('POST Operation Error:', error)
+ const errorMessage =
+ error.message ||
+ error.error ||
+ JSON.stringify(error) ||
+ 'Failed to create resource'
+ return createResponse(undefined, errorMessage, 500)
+ }
}
- }
- private async handleDelete(
- tableName: string,
- schemaName: string | undefined,
- id: string | undefined
- ): Promise {
- const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName);
+ private async handlePatch(
+ tableName: string,
+ schemaName: string | undefined,
+ id: string | undefined,
+ data: any
+ ): Promise {
+ const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName)
+
+ const {
+ conditions: pkConditions,
+ params: pkParams,
+ error,
+ } = this.getPrimaryKeyConditions(
+ pkColumns,
+ id,
+ data,
+ new URLSearchParams()
+ )
- let data: any = {};
+ if (error) {
+ console.error('PATCH Operation Error:', error)
+ return createResponse(undefined, error, 400)
+ }
+
+ if (!this.isDataValid(data)) {
+ console.error('Invalid data format for PATCH:', data)
+ return createResponse(undefined, 'Invalid data format', 400)
+ }
+
+ const dataKeys = Object.keys(data)
+ if (dataKeys.length === 0) {
+ console.error('No data provided for PATCH')
+ return createResponse(undefined, 'No data provided', 400)
+ }
- // Currently the DELETE only works with single primary key tables.
- if (pkColumns.length) {
- const firstPK = pkColumns[0];
- data[firstPK] = id;
+ // Remove primary key columns from dataKeys
+ const updateKeys = dataKeys.filter((key) => !pkColumns.includes(key))
+
+ if (updateKeys.length === 0) {
+ console.error('No updatable data provided for PATCH')
+ return createResponse(undefined, 'No updatable data provided', 400)
+ }
+
+ // Sanitize column names
+ const columns = updateKeys.map((col) => this.sanitizeIdentifier(col))
+ const setClause = columns.map((col) => `${col} = ?`).join(', ')
+ const query = `UPDATE ${
+ schemaName ? `${schemaName}.` : ''
+ }${tableName} SET ${setClause} WHERE ${pkConditions.join(' AND ')}`
+
+ // Map parameters using original data keys to get correct values
+ const params = updateKeys.map((key) => data[key])
+ params.push(...pkParams)
+
+ const queries = [{ sql: query, params }]
+
+ try {
+ await this.executeOperation(queries)
+ return createResponse(
+ { message: 'Resource updated successfully', data },
+ undefined,
+ 200
+ )
+ } catch (error: any) {
+ console.error('PATCH Operation Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'Failed to update resource',
+ 500
+ )
+ }
}
- const {
- conditions: pkConditions,
- params: pkParams,
- error,
- } = this.getPrimaryKeyConditions(
- pkColumns,
- id,
- data,
- new URLSearchParams()
- );
-
- if (error) {
- console.error("DELETE Operation Error:", error);
- return createResponse(undefined, error, 400);
+ private async handlePut(
+ tableName: string,
+ schemaName: string | undefined,
+ id: string | undefined,
+ data: any
+ ): Promise {
+ const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName)
+
+ const {
+ conditions: pkConditions,
+ params: pkParams,
+ error,
+ } = this.getPrimaryKeyConditions(
+ pkColumns,
+ id,
+ data,
+ new URLSearchParams()
+ )
+
+ if (error) {
+ console.error('PUT Operation Error:', error)
+ return createResponse(undefined, error, 400)
+ }
+
+ if (!this.isDataValid(data)) {
+ console.error('Invalid data format for PUT:', data)
+ return createResponse(undefined, 'Invalid data format', 400)
+ }
+
+ const dataKeys = Object.keys(data)
+ if (dataKeys.length === 0) {
+ console.error('No data provided for PUT')
+ return createResponse(undefined, 'No data provided', 400)
+ }
+
+ // Sanitize column names
+ const columns = dataKeys.map((col) => this.sanitizeIdentifier(col))
+ const setClause = columns.map((col) => `${col} = ?`).join(', ')
+ const query = `UPDATE ${
+ schemaName ? `${schemaName}.` : ''
+ }${tableName} SET ${setClause} WHERE ${pkConditions.join(' AND ')}`
+
+ // Map parameters using original data keys to get correct values
+ const params = dataKeys.map((key) => data[key])
+ params.push(...pkParams)
+
+ const queries = [{ sql: query, params }]
+
+ try {
+ await this.executeOperation(queries)
+ return createResponse(
+ { message: 'Resource replaced successfully', data },
+ undefined,
+ 200
+ )
+ } catch (error: any) {
+ console.error('PUT Operation Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'Failed to replace resource',
+ 500
+ )
+ }
}
- const query = `DELETE FROM ${
- schemaName ? `${schemaName}.` : ""
- }${tableName} WHERE ${pkConditions.join(" AND ")}`;
- const queries = [{ sql: query, params: pkParams }];
-
- try {
- await this.executeOperation(queries);
- return createResponse(
- { message: "Resource deleted successfully" },
- undefined,
- 200
- );
- } catch (error: any) {
- console.error("DELETE Operation Error:", error);
- return createResponse(
- undefined,
- error.message || "Failed to delete resource",
- 500
- );
+ private async handleDelete(
+ tableName: string,
+ schemaName: string | undefined,
+ id: string | undefined
+ ): Promise {
+ const pkColumns = await this.getPrimaryKeyColumns(tableName, schemaName)
+
+ let data: any = {}
+
+ // Currently the DELETE only works with single primary key tables.
+ if (pkColumns.length) {
+ const firstPK = pkColumns[0]
+ data[firstPK] = id
+ }
+
+ const {
+ conditions: pkConditions,
+ params: pkParams,
+ error,
+ } = this.getPrimaryKeyConditions(
+ pkColumns,
+ id,
+ data,
+ new URLSearchParams()
+ )
+
+ if (error) {
+ console.error('DELETE Operation Error:', error)
+ return createResponse(undefined, error, 400)
+ }
+
+ const query = `DELETE FROM ${
+ schemaName ? `${schemaName}.` : ''
+ }${tableName} WHERE ${pkConditions.join(' AND ')}`
+ const queries = [{ sql: query, params: pkParams }]
+
+ try {
+ await this.executeOperation(queries)
+ return createResponse(
+ { message: 'Resource deleted successfully' },
+ undefined,
+ 200
+ )
+ } catch (error: any) {
+ console.error('DELETE Operation Error:', error)
+ return createResponse(
+ undefined,
+ error.message || 'Failed to delete resource',
+ 500
+ )
+ }
}
- }
}
diff --git a/src/operation.ts b/src/operation.ts
index 03969dd..cbea633 100644
--- a/src/operation.ts
+++ b/src/operation.ts
@@ -1,117 +1,120 @@
// Import the native Node libraries for connecting to various databases
-import { Client as PgClient } from "pg";
-import { createConnection as createMySqlConnection } from "mysql2";
-import { createClient as createTursoConnection } from "@libsql/client/web";
+import { Client as PgClient } from 'pg'
+import { createConnection as createMySqlConnection } from 'mysql2'
+import { createClient as createTursoConnection } from '@libsql/client/web'
// Import how we interact with the databases through the Outerbase SDK
import {
- CloudflareD1Connection,
- MySQLConnection,
- PostgreSQLConnection,
- StarbaseConnection,
- TursoConnection,
-} from "@outerbase/sdk";
+ CloudflareD1Connection,
+ MySQLConnection,
+ PostgreSQLConnection,
+ StarbaseConnection,
+ TursoConnection,
+} from '@outerbase/sdk'
import {
- CloudflareD1Source,
- DataSource,
- RemoteSource,
- StarbaseDBSource,
- TursoDBSource,
-} from "./types";
-import { StarbaseDBConfiguration } from "./handler";
-import { afterQueryCache, beforeQueryCache } from "./cache";
-import { isQueryAllowed } from "./allowlist";
-import { applyRLS } from "./rls";
-import type { SqlConnection } from "@outerbase/sdk/dist/connections/sql-base";
+ CloudflareD1Source,
+ DataSource,
+ RemoteSource,
+ StarbaseDBSource,
+ TursoDBSource,
+} from './types'
+import { StarbaseDBConfiguration } from './handler'
+import { afterQueryCache, beforeQueryCache } from './cache'
+import { isQueryAllowed } from './allowlist'
+import { applyRLS } from './rls'
+import type { SqlConnection } from '@outerbase/sdk/dist/connections/sql-base'
export type OperationQueueItem = {
- queries: { sql: string; params?: any[] }[];
- isTransaction: boolean;
- isRaw: boolean;
- resolve: (value: any) => void;
- reject: (reason?: any) => void;
-};
+ queries: { sql: string; params?: any[] }[]
+ isTransaction: boolean
+ isRaw: boolean
+ resolve: (value: any) => void
+ reject: (reason?: any) => void
+}
export type RawQueryResponse = {
- columns: string[];
- rows: unknown[];
- meta: {
- rows_read: number;
- rows_written: number;
- };
-};
+ columns: string[]
+ rows: unknown[]
+ meta: {
+ rows_read: number
+ rows_written: number
+ }
+}
-export type QueryResponse = unknown[] | RawQueryResponse;
+export type QueryResponse = unknown[] | RawQueryResponse
export type ConnectionDetails = {
- database: SqlConnection;
- defaultSchema: string;
-};
+ database: SqlConnection
+ defaultSchema: string
+}
async function beforeQuery(opts: {
- sql: string;
- params?: unknown[];
- dataSource?: DataSource;
- config?: StarbaseDBConfiguration;
+ sql: string
+ params?: unknown[]
+ dataSource?: DataSource
+ config?: StarbaseDBConfiguration
}): Promise<{ sql: string; params?: unknown[] }> {
- let { sql, params, dataSource, config } = opts;
+ let { sql, params, dataSource, config } = opts
- // ## DO NOT REMOVE: PRE QUERY HOOK ##
+ // ## DO NOT REMOVE: PRE QUERY HOOK ##
- return {
- sql,
- params,
- };
+ return {
+ sql,
+ params,
+ }
}
async function afterQuery(opts: {
- sql: string;
- result: any;
- isRaw: boolean;
- dataSource?: DataSource;
- config?: StarbaseDBConfiguration;
+ sql: string
+ result: any
+ isRaw: boolean
+ dataSource?: DataSource
+ config?: StarbaseDBConfiguration
}): Promise {
- let { result, isRaw, dataSource, config } = opts;
- result = isRaw ? transformRawResults(result, "from") : result;
+ let { result, isRaw, dataSource, config } = opts
+ result = isRaw ? transformRawResults(result, 'from') : result
- // ## DO NOT REMOVE: POST QUERY HOOK ##
+ // ## DO NOT REMOVE: POST QUERY HOOK ##
- return isRaw ? transformRawResults(result, "to") : result;
+ return isRaw ? transformRawResults(result, 'to') : result
}
function transformRawResults(
- result: any,
- direction: "to" | "from"
+ result: any,
+ direction: 'to' | 'from'
): Record {
- if (direction === "from") {
- // Convert our result from the `raw` output to a traditional object
- result = {
- ...result,
- rows: result.rows.map((row: any) =>
- result.columns.reduce((obj: any, column: string, index: number) => {
- obj[column] = row[index];
- return obj;
- }, {})
- ),
- };
-
- return result.rows;
- } else if (direction === "to") {
- // Convert our traditional object to the `raw` output format
- const columns = Object.keys(result[0] || {});
- const rows = result.map((row: any) => columns.map((col) => row[col]));
+ if (direction === 'from') {
+ // Convert our result from the `raw` output to a traditional object
+ result = {
+ ...result,
+ rows: result.rows.map((row: any) =>
+ result.columns.reduce(
+ (obj: any, column: string, index: number) => {
+ obj[column] = row[index]
+ return obj
+ },
+ {}
+ )
+ ),
+ }
+
+ return result.rows
+ } else if (direction === 'to') {
+ // Convert our traditional object to the `raw` output format
+ const columns = Object.keys(result[0] || {})
+ const rows = result.map((row: any) => columns.map((col) => row[col]))
+
+ return {
+ columns,
+ rows,
+ meta: {
+ rows_read: rows.length,
+ rows_written: 0,
+ },
+ }
+ }
- return {
- columns,
- rows,
- meta: {
- rows_read: rows.length,
- rows_written: 0,
- },
- };
- }
-
- return result;
+ return result
}
// Outerbase API supports more data sources than can be supported via Cloudflare Workers. For those data
@@ -119,287 +122,292 @@ function transformRawResults(
// to be made. Otherwise, for supported data sources such as Postgres, MySQL, D1, StarbaseDB, Turso and Mongo
// we can connect to the database directly and remove the additional hop to the Outerbase API.
async function executeExternalQuery(opts: {
- sql: string;
- params: any;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ sql: string
+ params: any
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}): Promise {
- let { sql, params, dataSource, config } = opts;
-
- if (!dataSource.external) {
- throw new Error("External connection not found.");
- }
-
- // If not an Outerbase API request, forward to external database.
- if (!config?.outerbaseApiKey) {
- return executeSDKQuery({ sql, params, dataSource, config });
- }
-
- // Convert params from array to object if needed
- let convertedParams = params;
- if (Array.isArray(params)) {
- let paramIndex = 0;
- convertedParams = params.reduce(
- (acc, value, index) => ({
- ...acc,
- [`param${index}`]: value,
- }),
- {}
- );
- sql = sql.replace(/\?/g, () => `:param${paramIndex++}`);
- }
-
- const API_URL = "https://app.outerbase.com";
- const response = await fetch(`${API_URL}/api/v1/ezql/raw`, {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- "X-Source-Token": config.outerbaseApiKey,
- },
- body: JSON.stringify({
- query: sql.replaceAll("\n", " "),
- params: convertedParams,
- }),
- });
-
- const results: any = await response.json();
- return results.response.results?.items;
+ let { sql, params, dataSource, config } = opts
+
+ if (!dataSource.external) {
+ throw new Error('External connection not found.')
+ }
+
+ // If not an Outerbase API request, forward to external database.
+ if (!config?.outerbaseApiKey) {
+ return executeSDKQuery({ sql, params, dataSource, config })
+ }
+
+ // Convert params from array to object if needed
+ let convertedParams = params
+ if (Array.isArray(params)) {
+ let paramIndex = 0
+ convertedParams = params.reduce(
+ (acc, value, index) => ({
+ ...acc,
+ [`param${index}`]: value,
+ }),
+ {}
+ )
+ sql = sql.replace(/\?/g, () => `:param${paramIndex++}`)
+ }
+
+ const API_URL = 'https://app.outerbase.com'
+ const response = await fetch(`${API_URL}/api/v1/ezql/raw`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-Source-Token': config.outerbaseApiKey,
+ },
+ body: JSON.stringify({
+ query: sql.replaceAll('\n', ' '),
+ params: convertedParams,
+ }),
+ })
+
+ const results: any = await response.json()
+ return results.response.results?.items
}
export async function executeQuery(opts: {
- sql: string;
- params: unknown[] | undefined;
- isRaw: boolean;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ sql: string
+ params: unknown[] | undefined
+ isRaw: boolean
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}): Promise {
- let { sql, params, isRaw, dataSource, config } = opts;
-
- if (!dataSource) {
- console.error("Data source not found.");
- return [];
- }
-
- // If the allowlist feature is enabled, we should verify the query is allowed before proceeding.
- await isQueryAllowed({
- sql: sql,
- isEnabled: config?.features?.allowlist ?? false,
- dataSource,
- config,
- });
-
- // If the row level security feature is enabled, we should apply our policies to this SQL statement.
- sql = await applyRLS({
- sql,
- isEnabled: config?.features?.rls ?? true,
- dataSource,
- config,
- });
-
- // Run the beforeQuery hook for any third party logic to be applied before execution.
- const { sql: updatedSQL, params: updatedParams } = await beforeQuery({
- sql,
- params,
- dataSource,
- config,
- });
-
- // If the query was modified by RLS then we determine it isn't currently a valid candidate
- // for caching. In the future we will support queries impacted by RLS and caching their
- // results.
- if (!isRaw) {
- // If a cached version of this query request exists, this function will fetch the cached results.
- const cache = await beforeQueryCache({
- sql: updatedSQL,
- params: updatedParams,
- dataSource,
- });
-
- if (cache) {
- return cache as QueryResponse;
+ let { sql, params, isRaw, dataSource, config } = opts
+
+ if (!dataSource) {
+ console.error('Data source not found.')
+ return []
+ }
+
+ // If the allowlist feature is enabled, we should verify the query is allowed before proceeding.
+ await isQueryAllowed({
+ sql: sql,
+ isEnabled: config?.features?.allowlist ?? false,
+ dataSource,
+ config,
+ })
+
+ // If the row level security feature is enabled, we should apply our policies to this SQL statement.
+ sql = await applyRLS({
+ sql,
+ isEnabled: config?.features?.rls ?? true,
+ dataSource,
+ config,
+ })
+
+ // Run the beforeQuery hook for any third party logic to be applied before execution.
+ const { sql: updatedSQL, params: updatedParams } = await beforeQuery({
+ sql,
+ params,
+ dataSource,
+ config,
+ })
+
+ // If the query was modified by RLS then we determine it isn't currently a valid candidate
+ // for caching. In the future we will support queries impacted by RLS and caching their
+ // results.
+ if (!isRaw) {
+ // If a cached version of this query request exists, this function will fetch the cached results.
+ const cache = await beforeQueryCache({
+ sql: updatedSQL,
+ params: updatedParams,
+ dataSource,
+ })
+
+ if (cache) {
+ return cache as QueryResponse
+ }
}
- }
-
- let result;
-
- if (dataSource.source === "internal") {
- result = await dataSource.rpc.executeQuery({
- sql: updatedSQL,
- params: updatedParams,
- isRaw,
- });
- } else {
- result = await executeExternalQuery({
- sql: updatedSQL,
- params: updatedParams,
- dataSource,
- config,
- });
- }
-
- // If this is a cacheable query, this function will handle that logic.
- if (!isRaw) {
- await afterQueryCache({ sql, params: updatedParams, result, dataSource });
- }
-
- return await afterQuery({
- sql: updatedSQL,
- result,
- isRaw,
- dataSource,
- config,
- });
+
+ let result
+
+ if (dataSource.source === 'internal') {
+ result = await dataSource.rpc.executeQuery({
+ sql: updatedSQL,
+ params: updatedParams,
+ isRaw,
+ })
+ } else {
+ result = await executeExternalQuery({
+ sql: updatedSQL,
+ params: updatedParams,
+ dataSource,
+ config,
+ })
+ }
+
+ // If this is a cacheable query, this function will handle that logic.
+ if (!isRaw) {
+ await afterQueryCache({
+ sql,
+ params: updatedParams,
+ result,
+ dataSource,
+ })
+ }
+
+ return await afterQuery({
+ sql: updatedSQL,
+ result,
+ isRaw,
+ dataSource,
+ config,
+ })
}
export async function executeTransaction(opts: {
- queries: { sql: string; params?: any[] }[];
- isRaw: boolean;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ queries: { sql: string; params?: any[] }[]
+ isRaw: boolean
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}): Promise {
- const { queries, isRaw, dataSource, config } = opts;
+ const { queries, isRaw, dataSource, config } = opts
- if (!dataSource) {
- console.error("Data source not found.");
- return [];
- }
+ if (!dataSource) {
+ console.error('Data source not found.')
+ return []
+ }
- const results = [];
+ const results = []
- for (const query of queries) {
- const result = await executeQuery({
- sql: query.sql,
- params: query.params,
- isRaw,
- dataSource,
- config,
- });
+ for (const query of queries) {
+ const result = await executeQuery({
+ sql: query.sql,
+ params: query.params,
+ isRaw,
+ dataSource,
+ config,
+ })
- results.push(result);
- }
+ results.push(result)
+ }
- return results;
+ return results
}
async function createSDKPostgresConnection(
- source: RemoteSource
+ source: RemoteSource
): Promise {
- const client = new PostgreSQLConnection(
- new PgClient({
- host: source.host,
- port: source.port,
- user: source.user,
- password: source.password,
- database: source.database,
- })
- );
+ const client = new PostgreSQLConnection(
+ new PgClient({
+ host: source.host,
+ port: source.port,
+ user: source.user,
+ password: source.password,
+ database: source.database,
+ })
+ )
- return {
- database: client,
- defaultSchema: source.defaultSchema || "public",
- };
+ return {
+ database: client,
+ defaultSchema: source.defaultSchema || 'public',
+ }
}
async function createSDKMySQLConnection(
- source: RemoteSource
+ source: RemoteSource
): Promise {
- const client = new MySQLConnection(
- createMySqlConnection({
- host: source.host,
- port: source.port,
- user: source.user,
- password: source.password,
- database: source.database,
- })
- );
+ const client = new MySQLConnection(
+ createMySqlConnection({
+ host: source.host,
+ port: source.port,
+ user: source.user,
+ password: source.password,
+ database: source.database,
+ })
+ )
- return {
- database: client,
- defaultSchema: source.defaultSchema || "public",
- };
+ return {
+ database: client,
+ defaultSchema: source.defaultSchema || 'public',
+ }
}
async function createSDKTursoConnection(
- source: TursoDBSource
+ source: TursoDBSource
): Promise {
- const client = new TursoConnection(
- createTursoConnection({
- url: source.uri,
- authToken: source.token,
- })
- );
+ const client = new TursoConnection(
+ createTursoConnection({
+ url: source.uri,
+ authToken: source.token,
+ })
+ )
- return {
- database: client,
- defaultSchema: source.defaultSchema || "main",
- };
+ return {
+ database: client,
+ defaultSchema: source.defaultSchema || 'main',
+ }
}
async function createSDKCloudflareConnection(
- source: CloudflareD1Source
+ source: CloudflareD1Source
): Promise {
- const client = new CloudflareD1Connection({
- apiKey: source.apiKey,
- accountId: source.accountId,
- databaseId: source.databaseId,
- });
-
- return {
- database: client,
- defaultSchema: source.defaultSchema || "main",
- };
+ const client = new CloudflareD1Connection({
+ apiKey: source.apiKey,
+ accountId: source.accountId,
+ databaseId: source.databaseId,
+ })
+
+ return {
+ database: client,
+ defaultSchema: source.defaultSchema || 'main',
+ }
}
async function createSDKStarbaseConnection(
- source: StarbaseDBSource
+ source: StarbaseDBSource
): Promise {
- const client = new StarbaseConnection({
- apiKey: source.apiKey,
- url: source.token,
- });
-
- return {
- database: client,
- defaultSchema: source.defaultSchema || "main",
- };
+ const client = new StarbaseConnection({
+ apiKey: source.apiKey,
+ url: source.token,
+ })
+
+ return {
+ database: client,
+ defaultSchema: source.defaultSchema || 'main',
+ }
}
export async function executeSDKQuery(opts: {
- sql: string;
- params?: unknown[] | undefined;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ sql: string
+ params?: unknown[] | undefined
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}) {
- const external = opts.dataSource.external;
-
- if (!external) {
- console.warn("No external connection found");
- return [];
- }
-
- let connection: SqlConnection;
-
- if (external.dialect === "postgresql") {
- const { database } = await createSDKPostgresConnection(external);
- connection = database;
- } else if (external.dialect === "mysql") {
- const { database } = await createSDKMySQLConnection(external);
- connection = database;
- } else if (external.provider === "cloudflare-d1") {
- const { database } = await createSDKCloudflareConnection(external);
- connection = database;
- } else if (external.provider === "starbase") {
- const { database } = await createSDKStarbaseConnection(external);
- connection = database;
- } else if (external.provider === "turso") {
- const { database } = await createSDKTursoConnection(external);
- connection = database;
- } else {
- throw new Error("Unsupported external database type");
- }
-
- await connection.connect();
-
- const { data } = await connection.raw(opts.sql, opts.params);
- return data;
+ const external = opts.dataSource.external
+
+ if (!external) {
+ console.warn('No external connection found')
+ return []
+ }
+
+ let connection: SqlConnection
+
+ if (external.dialect === 'postgresql') {
+ const { database } = await createSDKPostgresConnection(external)
+ connection = database
+ } else if (external.dialect === 'mysql') {
+ const { database } = await createSDKMySQLConnection(external)
+ connection = database
+ } else if (external.provider === 'cloudflare-d1') {
+ const { database } = await createSDKCloudflareConnection(external)
+ connection = database
+ } else if (external.provider === 'starbase') {
+ const { database } = await createSDKStarbaseConnection(external)
+ connection = database
+ } else if (external.provider === 'turso') {
+ const { database } = await createSDKTursoConnection(external)
+ connection = database
+ } else {
+ throw new Error('Unsupported external database type')
+ }
+
+ await connection.connect()
+
+ const { data } = await connection.raw(opts.sql, opts.params)
+ return data
}
diff --git a/src/rls/index.ts b/src/rls/index.ts
index 3f533a1..130982b 100644
--- a/src/rls/index.ts
+++ b/src/rls/index.ts
@@ -1,26 +1,26 @@
-import { StarbaseDBConfiguration } from "../handler";
-import { DataSource, QueryResult } from "../types";
+import { StarbaseDBConfiguration } from '../handler'
+import { DataSource, QueryResult } from '../types'
-const parser = new (require("node-sql-parser").Parser)();
+const parser = new (require('node-sql-parser').Parser)()
type Policy = {
- action: string;
- condition: {
- type: string;
- operator: string;
- left: {
- type: string;
- table: string;
- column: string;
- };
- right: {
- type: string;
- value: string;
- };
- };
-};
-
-let policies: Policy[] = [];
+ action: string
+ condition: {
+ type: string
+ operator: string
+ left: {
+ type: string
+ table: string
+ column: string
+ }
+ right: {
+ type: string
+ value: string
+ }
+ }
+}
+
+let policies: Policy[] = []
// Rules on how RLS policies should work
// 1. If a table has _any_ rules applied to it, then each action needs to be explicitly defined or it should be automatically denied.
@@ -37,338 +37,351 @@ let policies: Policy[] = [];
// -> If they really don't want any rules to exist, remove this power-up
function normalizeIdentifier(name: string): string {
- if (!name) return name;
- if (
- (name.startsWith('"') && name.endsWith('"')) ||
- (name.startsWith("`") && name.endsWith("`"))
- ) {
- return name.slice(1, -1);
- }
- return name;
+ if (!name) return name
+ if (
+ (name.startsWith('"') && name.endsWith('"')) ||
+ (name.startsWith('`') && name.endsWith('`'))
+ ) {
+ return name.slice(1, -1)
+ }
+ return name
}
async function loadPolicies(dataSource: DataSource): Promise {
- try {
- const statement =
- 'SELECT "actions", "schema", "table", "column", "value", "value_type", "operator" FROM tmp_rls_policies';
- const result = await dataSource.rpc.executeQuery({
- sql: statement,
- }) as QueryResult[];
-
- if (!result || result.length === 0) {
- // Discussion point to be had here. For safety precautions I am ejecting
- // out of the entire flow if no results are responded back with for example
- // the case where the database instance is not responding, we don't want to
- // simply assume that the incoming SQL should be processed. Instead, we need
- // to know that we received all the rules for us to enforce them. When no rules
- // exist we exit with an error.
- throw new Error(
- "Error fetching RLS policies. No policies may exist or there was an error fetching."
- );
- }
+ try {
+ const statement =
+ 'SELECT "actions", "schema", "table", "column", "value", "value_type", "operator" FROM tmp_rls_policies'
+ const result = (await dataSource.rpc.executeQuery({
+ sql: statement,
+ })) as QueryResult[]
+
+ if (!result || result.length === 0) {
+ // Discussion point to be had here. For safety precautions I am ejecting
+ // out of the entire flow if no results are responded back with for example
+ // the case where the database instance is not responding, we don't want to
+ // simply assume that the incoming SQL should be processed. Instead, we need
+ // to know that we received all the rules for us to enforce them. When no rules
+ // exist we exit with an error.
+ throw new Error(
+ 'Error fetching RLS policies. No policies may exist or there was an error fetching.'
+ )
+ }
- const policies = result.map((row: any) => {
- let value = row.value;
- const valueType = row.value_type?.toLowerCase();
-
- // Currently we are supporting two `value_type` options for the time being. By
- // default values are assumed as `string` unless the type is expressed as another
- // in which we cast it to that type. We will need to handle scenarios where
- // the SQL statement itself will need the type casting.
- if (valueType === "number") {
- value = Number(value);
-
- // For example, some databases may require casting like the commented out
- // string here below. We will want to come back and help cover those
- // particular situations.
- // value = `${value}::INT`
- }
-
- let tableName = row.schema ? `${row.schema}.${row.table}` : row.table;
- tableName = normalizeIdentifier(tableName);
- const columnName = normalizeIdentifier(row.column);
-
- // If the policy value is context.id(), use a placeholder
- let rightNode;
- if (value === "context.id()") {
- rightNode = { type: "string", value: "__CONTEXT_ID__" };
- } else {
- rightNode = { type: "string", value: value };
- }
-
- // This policy will help construct clauses, such as a WHERE, for the criteria to be met.
- // For example the left side equals the qualifier table column and the right side equals
- // the value that column should be set to. So a basic example could be:
- // `WHERE (my_column = '1234')`
- return {
- action: row.actions.toUpperCase(),
- condition: {
- type: "binary_expr",
- operator: row.operator,
- left: { type: "column_ref", table: tableName, column: columnName },
- right: rightNode,
- },
- };
- });
-
- return policies;
- } catch (error) {
- console.error("Error loading RLS policies:", error);
- return [];
- }
+ const policies = result.map((row: any) => {
+ let value = row.value
+ const valueType = row.value_type?.toLowerCase()
+
+ // Currently we are supporting two `value_type` options for the time being. By
+ // default values are assumed as `string` unless the type is expressed as another
+ // in which we cast it to that type. We will need to handle scenarios where
+ // the SQL statement itself will need the type casting.
+ if (valueType === 'number') {
+ value = Number(value)
+
+ // For example, some databases may require casting like the commented out
+ // string here below. We will want to come back and help cover those
+ // particular situations.
+ // value = `${value}::INT`
+ }
+
+ let tableName = row.schema
+ ? `${row.schema}.${row.table}`
+ : row.table
+ tableName = normalizeIdentifier(tableName)
+ const columnName = normalizeIdentifier(row.column)
+
+ // If the policy value is context.id(), use a placeholder
+ let rightNode
+ if (value === 'context.id()') {
+ rightNode = { type: 'string', value: '__CONTEXT_ID__' }
+ } else {
+ rightNode = { type: 'string', value: value }
+ }
+
+ // This policy will help construct clauses, such as a WHERE, for the criteria to be met.
+ // For example the left side equals the qualifier table column and the right side equals
+ // the value that column should be set to. So a basic example could be:
+ // `WHERE (my_column = '1234')`
+ return {
+ action: row.actions.toUpperCase(),
+ condition: {
+ type: 'binary_expr',
+ operator: row.operator,
+ left: {
+ type: 'column_ref',
+ table: tableName,
+ column: columnName,
+ },
+ right: rightNode,
+ },
+ }
+ })
+
+ return policies
+ } catch (error) {
+ console.error('Error loading RLS policies:', error)
+ return []
+ }
}
export async function applyRLS(opts: {
- sql: string;
- isEnabled: boolean;
- dataSource: DataSource;
- config: StarbaseDBConfiguration;
+ sql: string
+ isEnabled: boolean
+ dataSource: DataSource
+ config: StarbaseDBConfiguration
}): Promise {
- const { sql, isEnabled, dataSource, config } = opts;
-
- if (!isEnabled) return sql;
- if (!sql) {
- throw Error("No SQL query found in RLS plugin.");
- }
-
- // Do not apply RLS rules to the admin user
- if (config.role === "admin") {
- return sql;
- }
-
- policies = await loadPolicies(dataSource);
-
- const dialect =
- dataSource.source === "external" ? dataSource.external!.dialect : "sqlite";
-
- let context: Record = dataSource?.context ?? {};
- let ast;
- let modifiedSql;
- const sqlifyOptions = {
- database: dialect,
- quote: "",
- };
-
- // We are originally provided a SQL statement to evaluate. The first task we must
- // complete is converting it from SQL to an AST object we can breakdown and
- // understand the structure. By breaking down the structure this is where we can
- // begin applying our RLS policies by injecting items into the abstract syntax
- // tree which will later be converted back to an executable SQL statement.
- try {
- ast = parser.astify(sql, { database: dialect });
- if (Array.isArray(ast)) {
- ast.forEach((singleAst) => applyRLSToAst(singleAst));
- } else {
- applyRLSToAst(ast);
+ const { sql, isEnabled, dataSource, config } = opts
+
+ if (!isEnabled) return sql
+ if (!sql) {
+ throw Error('No SQL query found in RLS plugin.')
}
- } catch (error) {
- console.error("Error parsing SQL:", error);
- throw error as Error;
- }
-
- // After the query was converted into an AST and had any RLS policy rules
- // injected into the abstract syntax tree dynamically, now we are ready to
- // convert the AST object back into a SQL statement that the database can
- // execute.
- try {
- if (Array.isArray(ast)) {
- modifiedSql = ast
- .map((singleAst) => parser.sqlify(singleAst, sqlifyOptions))
- .join("; ");
- } else {
- modifiedSql = parser.sqlify(ast, sqlifyOptions);
+
+ // Do not apply RLS rules to the admin user
+ if (config.role === 'admin') {
+ return sql
}
- } catch (error) {
- console.error("Error generating SQL from AST:", error);
- throw error as Error;
- }
- // Replace placeholder with the user's ID properly quoted
- if (context?.sub) {
- modifiedSql = modifiedSql.replace(/'__CONTEXT_ID__'/g, `'${context.sub}'`);
- }
+ policies = await loadPolicies(dataSource)
+
+ const dialect =
+ dataSource.source === 'external'
+ ? dataSource.external!.dialect
+ : 'sqlite'
+
+ let context: Record = dataSource?.context ?? {}
+ let ast
+ let modifiedSql
+ const sqlifyOptions = {
+ database: dialect,
+ quote: '',
+ }
- return modifiedSql;
+ // We are originally provided a SQL statement to evaluate. The first task we must
+ // complete is converting it from SQL to an AST object we can breakdown and
+ // understand the structure. By breaking down the structure this is where we can
+ // begin applying our RLS policies by injecting items into the abstract syntax
+ // tree which will later be converted back to an executable SQL statement.
+ try {
+ ast = parser.astify(sql, { database: dialect })
+ if (Array.isArray(ast)) {
+ ast.forEach((singleAst) => applyRLSToAst(singleAst))
+ } else {
+ applyRLSToAst(ast)
+ }
+ } catch (error) {
+ console.error('Error parsing SQL:', error)
+ throw error as Error
+ }
+
+ // After the query was converted into an AST and had any RLS policy rules
+ // injected into the abstract syntax tree dynamically, now we are ready to
+ // convert the AST object back into a SQL statement that the database can
+ // execute.
+ try {
+ if (Array.isArray(ast)) {
+ modifiedSql = ast
+ .map((singleAst) => parser.sqlify(singleAst, sqlifyOptions))
+ .join('; ')
+ } else {
+ modifiedSql = parser.sqlify(ast, sqlifyOptions)
+ }
+ } catch (error) {
+ console.error('Error generating SQL from AST:', error)
+ throw error as Error
+ }
+
+ // Replace placeholder with the user's ID properly quoted
+ if (context?.sub) {
+ modifiedSql = modifiedSql.replace(
+ /'__CONTEXT_ID__'/g,
+ `'${context.sub}'`
+ )
+ }
+
+ return modifiedSql
}
function applyRLSToAst(ast: any): void {
- if (!ast) return;
-
- // Handle WITH (CTE) queries as arrays
- if (ast.with && Array.isArray(ast.with)) {
- for (const cte of ast.with) {
- if (cte.stmt) {
- applyRLSToAst(cte.stmt);
- }
+ if (!ast) return
+
+ // Handle WITH (CTE) queries as arrays
+ if (ast.with && Array.isArray(ast.with)) {
+ for (const cte of ast.with) {
+ if (cte.stmt) {
+ applyRLSToAst(cte.stmt)
+ }
+ }
}
- }
-
- // Set operations
- if (["union", "intersect", "except"].includes(ast.type)) {
- applyRLSToAst(ast.left);
- applyRLSToAst(ast.right);
- return;
- }
-
- // Subqueries in INSERT/UPDATE/DELETE
- if (ast.type === "insert" && ast.from) {
- applyRLSToAst(ast.from);
- }
- if (ast.type === "update" && ast.where) {
- traverseWhere(ast.where);
- }
- if (ast.type === "delete" && ast.where) {
- traverseWhere(ast.where);
- }
-
- const tablesWithRules: Record = {};
- policies.forEach((policy) => {
- const tbl = normalizeIdentifier(policy.condition.left.table);
- if (!tablesWithRules[tbl]) {
- tablesWithRules[tbl] = [];
+
+ // Set operations
+ if (['union', 'intersect', 'except'].includes(ast.type)) {
+ applyRLSToAst(ast.left)
+ applyRLSToAst(ast.right)
+ return
}
- tablesWithRules[tbl].push(policy.action);
- });
-
- const statementType = ast.type?.toUpperCase();
- if (!["SELECT", "UPDATE", "DELETE", "INSERT"].includes(statementType)) {
- return;
- }
-
- let tables: string[] = [];
- if (statementType === "INSERT") {
- let tableName = normalizeIdentifier(ast.table[0].table);
- if (tableName.includes(".")) {
- tableName = tableName.split(".")[1];
+
+ // Subqueries in INSERT/UPDATE/DELETE
+ if (ast.type === 'insert' && ast.from) {
+ applyRLSToAst(ast.from)
}
- tables = [tableName];
- } else if (statementType === "UPDATE") {
- tables = ast.table.map((tableRef: any) => {
- let tableName = normalizeIdentifier(tableRef.table);
- if (tableName.includes(".")) {
- tableName = tableName.split(".")[1];
- }
- return tableName;
- });
- } else {
- // SELECT or DELETE
- tables =
- ast.from?.map((fromTable: any) => {
- let tableName = normalizeIdentifier(fromTable.table);
- if (tableName.includes(".")) {
- tableName = tableName.split(".")[1];
- }
- return tableName;
- }) || [];
- }
-
- const restrictedTables = Object.keys(tablesWithRules);
-
- for (const table of tables) {
- if (restrictedTables.includes(table)) {
- const allowedActions = tablesWithRules[table];
- if (!allowedActions.includes(statementType)) {
- throw new Error(
- `Unauthorized access: No matching rules for ${statementType} on restricted table ${table}`
- );
- }
+ if (ast.type === 'update' && ast.where) {
+ traverseWhere(ast.where)
}
- }
-
- policies
- .filter(
- (policy) => policy.action === statementType || policy.action === "*"
- )
- .forEach(({ action, condition }) => {
- const targetTable = normalizeIdentifier(condition.left.table);
- const isTargetTable = tables.includes(targetTable);
-
- if (!isTargetTable) return;
-
- if (action !== "INSERT") {
- // Add condition to WHERE with parentheses
- if (ast.where) {
- ast.where = {
- type: "binary_expr",
- operator: "AND",
- parentheses: true,
- left: {
- ...ast.where,
- parentheses: true,
- },
- right: {
- ...condition,
- parentheses: true,
- },
- };
- } else {
- ast.where = {
- ...condition,
- parentheses: true,
- };
- }
- } else {
- // For INSERT, enforce column values
- if (ast.values && ast.values.length > 0) {
- const columnIndex = ast.columns.findIndex(
- (col: any) =>
- normalizeIdentifier(col) ===
- normalizeIdentifier(condition.left.column)
- );
- if (columnIndex !== -1) {
- ast.values.forEach((valueList: any) => {
- if (
- valueList.type === "expr_list" &&
- Array.isArray(valueList.value)
- ) {
- valueList.value[columnIndex] = {
- type: condition.right.type,
- value: condition.right.value,
- };
- } else {
- valueList[columnIndex] = {
- type: condition.right.type,
- value: condition.right.value,
- };
- }
- });
- }
+ if (ast.type === 'delete' && ast.where) {
+ traverseWhere(ast.where)
+ }
+
+ const tablesWithRules: Record = {}
+ policies.forEach((policy) => {
+ const tbl = normalizeIdentifier(policy.condition.left.table)
+ if (!tablesWithRules[tbl]) {
+ tablesWithRules[tbl] = []
}
- }
- });
+ tablesWithRules[tbl].push(policy.action)
+ })
- ast.from?.forEach((fromItem: any) => {
- if (fromItem.expr && fromItem.expr.type === "select") {
- applyRLSToAst(fromItem.expr);
+ const statementType = ast.type?.toUpperCase()
+ if (!['SELECT', 'UPDATE', 'DELETE', 'INSERT'].includes(statementType)) {
+ return
}
- // Handle both single join and array of joins
- if (fromItem.join) {
- const joins = Array.isArray(fromItem.join) ? fromItem.join : [fromItem];
- joins.forEach((joinItem: any) => {
- if (joinItem.expr && joinItem.expr.type === "select") {
- applyRLSToAst(joinItem.expr);
+ let tables: string[] = []
+ if (statementType === 'INSERT') {
+ let tableName = normalizeIdentifier(ast.table[0].table)
+ if (tableName.includes('.')) {
+ tableName = tableName.split('.')[1]
}
- });
+ tables = [tableName]
+ } else if (statementType === 'UPDATE') {
+ tables = ast.table.map((tableRef: any) => {
+ let tableName = normalizeIdentifier(tableRef.table)
+ if (tableName.includes('.')) {
+ tableName = tableName.split('.')[1]
+ }
+ return tableName
+ })
+ } else {
+ // SELECT or DELETE
+ tables =
+ ast.from?.map((fromTable: any) => {
+ let tableName = normalizeIdentifier(fromTable.table)
+ if (tableName.includes('.')) {
+ tableName = tableName.split('.')[1]
+ }
+ return tableName
+ }) || []
}
- });
- if (ast.where) {
- traverseWhere(ast.where);
- }
+ const restrictedTables = Object.keys(tablesWithRules)
- ast.columns?.forEach((column: any) => {
- if (column.expr && column.expr.type === "select") {
- applyRLSToAst(column.expr);
+ for (const table of tables) {
+ if (restrictedTables.includes(table)) {
+ const allowedActions = tablesWithRules[table]
+ if (!allowedActions.includes(statementType)) {
+ throw new Error(
+ `Unauthorized access: No matching rules for ${statementType} on restricted table ${table}`
+ )
+ }
+ }
}
- });
+
+ policies
+ .filter(
+ (policy) => policy.action === statementType || policy.action === '*'
+ )
+ .forEach(({ action, condition }) => {
+ const targetTable = normalizeIdentifier(condition.left.table)
+ const isTargetTable = tables.includes(targetTable)
+
+ if (!isTargetTable) return
+
+ if (action !== 'INSERT') {
+ // Add condition to WHERE with parentheses
+ if (ast.where) {
+ ast.where = {
+ type: 'binary_expr',
+ operator: 'AND',
+ parentheses: true,
+ left: {
+ ...ast.where,
+ parentheses: true,
+ },
+ right: {
+ ...condition,
+ parentheses: true,
+ },
+ }
+ } else {
+ ast.where = {
+ ...condition,
+ parentheses: true,
+ }
+ }
+ } else {
+ // For INSERT, enforce column values
+ if (ast.values && ast.values.length > 0) {
+ const columnIndex = ast.columns.findIndex(
+ (col: any) =>
+ normalizeIdentifier(col) ===
+ normalizeIdentifier(condition.left.column)
+ )
+ if (columnIndex !== -1) {
+ ast.values.forEach((valueList: any) => {
+ if (
+ valueList.type === 'expr_list' &&
+ Array.isArray(valueList.value)
+ ) {
+ valueList.value[columnIndex] = {
+ type: condition.right.type,
+ value: condition.right.value,
+ }
+ } else {
+ valueList[columnIndex] = {
+ type: condition.right.type,
+ value: condition.right.value,
+ }
+ }
+ })
+ }
+ }
+ }
+ })
+
+ ast.from?.forEach((fromItem: any) => {
+ if (fromItem.expr && fromItem.expr.type === 'select') {
+ applyRLSToAst(fromItem.expr)
+ }
+
+ // Handle both single join and array of joins
+ if (fromItem.join) {
+ const joins = Array.isArray(fromItem.join)
+ ? fromItem.join
+ : [fromItem]
+ joins.forEach((joinItem: any) => {
+ if (joinItem.expr && joinItem.expr.type === 'select') {
+ applyRLSToAst(joinItem.expr)
+ }
+ })
+ }
+ })
+
+ if (ast.where) {
+ traverseWhere(ast.where)
+ }
+
+ ast.columns?.forEach((column: any) => {
+ if (column.expr && column.expr.type === 'select') {
+ applyRLSToAst(column.expr)
+ }
+ })
}
function traverseWhere(node: any): void {
- if (!node) return;
- if (node.type === "select") {
- applyRLSToAst(node);
- }
- if (node.left) traverseWhere(node.left);
- if (node.right) traverseWhere(node.right);
+ if (!node) return
+ if (node.type === 'select') {
+ applyRLSToAst(node)
+ }
+ if (node.left) traverseWhere(node.left)
+ if (node.right) traverseWhere(node.right)
}
diff --git a/src/studio/index.ts b/src/studio/index.ts
index 025703f..b73b32d 100644
--- a/src/studio/index.ts
+++ b/src/studio/index.ts
@@ -1,7 +1,7 @@
interface HandleStudioRequestOption {
- username: string,
- password: string,
- apiKey: string;
+ username: string
+ password: string
+ apiKey: string
}
function createStudioHTML(apiKey: string): string {
@@ -115,35 +115,38 @@ function createStudioHTML(apiKey: string): string {