diff --git a/.circleci/config.yml b/.circleci/config.yml index 41817bafb5..c84370bbaf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -52,7 +52,7 @@ build-distro-bin: &build-distro-bin key: v1-{{ .Environment.CIRCLE_JOB }}-dependencies-{{ checksum "postgrest.cabal" }}-{{ checksum "stack.yaml" }} jobs: - build-test: + build-test-9.4: docker: - image: circleci/buildpack-deps:trusty environment: @@ -138,6 +138,38 @@ jobs: name: run tests command: POSTGREST_TEST_CONNECTION=$(test/create_test_db "postgres://circleci@localhost" postgrest_test) stack test + build-test-10: + docker: + - image: circleci/buildpack-deps:trusty + environment: + - PGHOST=localhost + - image: circleci/postgres:10.5 + environment: + - POSTGRES_USER=circleci + - POSTGRES_DB=circleci + steps: + - checkout + - restore_cache: + keys: + - v1-stack-dependencies-{{ checksum "postgrest.cabal" }}-{{ checksum "stack.yaml" }} + - run: + name: install stack & dependencies + command: | + curl -L https://github.com/commercialhaskell/stack/releases/download/v1.1.2/stack-1.1.2-linux-x86_64.tar.gz | tar zx -C /tmp + sudo mv /tmp/stack-1.1.2-linux-x86_64/stack /usr/bin + sudo apt-get update + sudo apt-get install -y libgmp-dev + sudo apt-get install -y postgresql-client + stack setup + - run: + name: build src and tests + command: | + stack build --fast -j1 + stack build --fast --test --no-run-tests + - run: + name: run tests + command: POSTGREST_TEST_CONNECTION=$(test/create_test_db "postgres://circleci@localhost" postgrest_test) stack test + build-prof-test: docker: - image: circleci/buildpack-deps:trusty @@ -224,7 +256,7 @@ workflows: version: 2 build-test-release: jobs: - - build-test: + - build-test-9.4: filters: tags: only: /v[0-9]+(\.[0-9]+)*/ @@ -232,14 +264,19 @@ workflows: filters: tags: only: /v[0-9]+(\.[0-9]+)*/ + - build-test-10: + filters: + tags: + only: /v[0-9]+(\.[0-9]+)*/ - build-prof-test: filters: tags: only: /v[0-9]+(\.[0-9]+)*/ - centos6: requires: - - build-test + - build-test-9.4 - build-test-9.6 + - build-test-10 - build-prof-test filters: tags: @@ -248,8 +285,9 @@ workflows: ignore: /.*/ - centos7: requires: - - build-test + - build-test-9.4 - build-test-9.6 + - build-test-10 - build-prof-test filters: tags: @@ -258,8 +296,9 @@ workflows: ignore: /.*/ - ubuntu: requires: - - build-test + - build-test-9.4 - build-test-9.6 + - build-test-10 - build-prof-test filters: tags: @@ -268,8 +307,9 @@ workflows: ignore: /.*/ - ubuntui386: requires: - - build-test + - build-test-9.4 - build-test-9.6 + - build-test-10 - build-prof-test filters: tags: diff --git a/BACKERS.md b/BACKERS.md index 554645a4bf..08327e2822 100644 --- a/BACKERS.md +++ b/BACKERS.md @@ -6,9 +6,9 @@ PostgREST ongoing development is only possible thanks to our Sponsors and Backer - [Christiaan Westerbeek](https://devotis.nl) - [Daniel Babiak](https://github.com/d-babiak) +- [Michel Pelletier](https://github.com/michelp/) ## Backers - Tsingson Qin -- Michel Pelletier - Jay Hannah diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e692933d4..5cbcc0ea27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,16 +7,42 @@ This project adheres to [Semantic Versioning](http://semver.org/). ### Added +### Fixed + +- #1182, Fix embedding on views with composite pks - @steve-chavez +- #1180, Fix embedding on views with subselects in pg10 - @steve-chavez +- #1197, Allow CORS for PUT - @bkylerussell + +## [5.1.0] - 2018-08-31 + +### Added + - #1099, Add support for getting json/jsonb by array index - @steve-chavez +- #1145, Add materialized view columns to OpenAPI output - @steve-chavez +- #709, Allow embedding on views with subselects/CTE - @steve-chavez +- #1148, OpenAPI: add `required` section for the non-nullable columns - @laughedelic +- #1158, Add summary to OpenAPI doc for RPC functions - @mdr1384 ### Fixed - #1113, Fix UPSERT failing when having a camel case PK column - @steve-chavez +- #945, Fix slow start-up time on big schemas - @steve-chavez +- #1129, Fix view embedding when table is capitalized - @steve-chavez +- #1149, OpenAPI: Change `GET` response type to array - @laughedelic +- #1152, Fix RPC failing when having arguments with reserved or uppercase keywords - @mdr1384 +- #905, Fix intermittent empty replies - @steve-chavez +- #1139, Fix JWTIssuedAtFuture failure for valid iat claim - @steve-chavez +- #1141, Fix app.settings resetting on pool timeout - @steve-chavez ### Changed - #1099, Numbers in json path `?select=data->1->>key` now get treated as json array indexes instead of keys - @steve-chavez - #1128, Allow finishing a json path with a single arrow `->`. Now a json can be obtained without resorting to casting, Previously: `/json_arr?select=data->>2::json`, now: `/json_arr?select=data->2` - @steve-chavez +- #724, Change server-host default of *4 to 127.0.0.1 + +### Deprecated + +- #724, SIGHUP deprecated, SIGUSR1 should be used instead ## [0.5.0.0] - 2018-05-14 diff --git a/README.md b/README.md index 05181c6543..ef06464108 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,21 @@ ![Logo](static/logo.png "Logo") -[![Build Status](https://circleci.com/gh/PostgREST/postgrest/tree/master.svg?style=shield)](https://circleci.com/gh/PostgREST/postgrest/tree/master) +[![Donate](https://img.shields.io/badge/Donate-Patreon-orange.svg?colorB=F96854)](https://www.patreon.com/postgrest) +[![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.me/postgrest) Deploy [![Join the chat at https://gitter.im/begriffs/postgrest](https://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg)](https://gitter.im/begriffs/postgrest) [![Docs](https://img.shields.io/badge/docs-latest-brightgreen.svg?style=flat)](http://postgrest.org) [![Docker Stars](https://img.shields.io/docker/pulls/postgrest/postgrest.svg)](https://hub.docker.com/r/postgrest/postgrest/) -[![Donate](https://img.shields.io/badge/patreon-donate-orange.svg?colorB=F96854)](https://www.patreon.com/postgrest) +[![Build Status](https://circleci.com/gh/PostgREST/postgrest/tree/master.svg?style=shield)](https://circleci.com/gh/PostgREST/postgrest/tree/master) +[![Hackage docs](https://img.shields.io/hackage/v/postgrest.svg?label=hackage)](http://hackage.haskell.org/package/postgrest) PostgREST serves a fully RESTful API from any existing PostgreSQL database. It provides a cleaner, more standards-compliant, faster API than you are likely to write from scratch. -### Usage +## Usage 1. Download the binary ([latest release](https://github.com/PostgREST/postgrest/releases/latest)) for your platform. @@ -23,7 +25,7 @@ API than you are likely to write from scratch. postgrest --help ``` -### Performance +## Performance TLDR; subsecond response times for up to 2000 requests/sec on Heroku free tier. If you're used to servers written in interpreted languages @@ -51,7 +53,7 @@ by * Using the PostgreSQL binary protocol * Being stateless to allow horizontal scaling -### Security +## Security PostgREST [handles authentication](http://postgrest.org/en/stable/auth.html) (via JSON Web @@ -72,7 +74,7 @@ are limited to certain templates using functions, the trigger workaround does not compromise row-level security. -### Versioning +## Versioning A robust long-lived API needs the freedom to exist in multiple versions. PostgREST does versioning through database schemas. This @@ -80,7 +82,7 @@ allows you to expose tables and views without making the app brittle. Underlying tables can be superseded and hidden behind public facing views. -### Self-documentation +## Self-documentation PostgREST uses the [OpenAPI](https://openapis.org/) standard to generate up-to-date documentation for APIs. You can use a tool like @@ -92,7 +94,7 @@ instance the number of rows returned by an endpoint is reported by - and limited with - range headers. More about [that](http://begriffs.com/posts/2014-03-06-beyond-http-header-links.html). -### Data Integrity +## Data Integrity Rather than relying on an Object Relational Mapper and custom imperative coding, this system requires you put declarative constraints @@ -106,7 +108,17 @@ See examples of [PostgreSQL constraints](http://www.tutorialspoint.com/postgresql/postgresql_constraints.htm) and the [API guide](http://postgrest.org/en/stable/api.html). -### Thanks +## Supporting development + +You can help PostgREST ongoing maintenance and development by: + +- Making a regular donation through Patreon https://www.patreon.com/postgrest + +- Alternatively, you can make a one-time donation via Paypal https://www.paypal.me/postgrest + +Every donation will be spent on making PostgREST better for the whole community. + +## Thanks The PostgREST organization is grateful to: diff --git a/app.json b/app.json index 4b10f61ff2..2003b545af 100644 --- a/app.json +++ b/app.json @@ -10,7 +10,7 @@ }, "POSTGREST_VER": { "description": "Version of PostgREST to deploy", - "value": "0.5.0.0" + "value": "5.1.0" }, "DB_URI": { "description": "Database connection string, e.g. postgres://user:pass@xxxxxxx.rds.amazonaws.com/mydb", diff --git a/main/Main.hs b/main/Main.hs index ed79cd355e..93de5a8ec3 100644 --- a/main/Main.hs +++ b/main/Main.hs @@ -5,13 +5,11 @@ module Main where import PostgREST.App (postgrest) import PostgREST.Config (AppConfig (..), - minimumPgVersion, prettyVersion, readOptions) -import PostgREST.DbStructure (getDbStructure, getPgVersion, - fillSessionWithSettings) +import PostgREST.DbStructure (getDbStructure, getPgVersion) import PostgREST.Error (encodeError) import PostgREST.OpenAPI (isMalformedProxyUri) -import PostgREST.Types (DbStructure, Schema, PgVersion(..)) +import PostgREST.Types (DbStructure, Schema, PgVersion(..), minimumPgVersion) import Protolude hiding (hPutStrLn, replace) @@ -33,8 +31,7 @@ import qualified Hasql.Pool as P import qualified Hasql.Session as H import Network.Wai.Handler.Warp (defaultSettings, runSettings, setHost, - setPort, setServerName, - setTimeout) + setPort, setServerName) import System.IO (BufferMode (..), hSetBuffering) @@ -64,11 +61,10 @@ connectionWorker :: ThreadId -- ^ This thread is killed if pg version is unsupported -> P.Pool -- ^ The PostgreSQL connection pool -> Schema -- ^ Schema PostgREST is serving up - -> [(Text, Text)] -- ^ Settings or Environment passed in through the config -> IORef (Maybe DbStructure) -- ^ mutable reference to 'DbStructure' -> IORef Bool -- ^ Used as a binary Semaphore -> IO () -connectionWorker mainTid pool schema settings refDbStructure refIsWorkerOn = do +connectionWorker mainTid pool schema refDbStructure refIsWorkerOn = do isWorkerOn <- readIORef refIsWorkerOn unless isWorkerOn $ do atomicWriteIORef refIsWorkerOn True @@ -86,7 +82,6 @@ connectionWorker mainTid pool schema settings refDbStructure refIsWorkerOn = do ("Cannot run in this PostgreSQL version, PostgREST needs at least " <> pgvName minimumPgVersion) killThread mainTid - fillSessionWithSettings settings dbStructure <- getDbStructure schema actualPgVersion liftIO $ atomicWriteIORef refDbStructure $ Just dbStructure case result of @@ -152,8 +147,7 @@ main = do appSettings = setHost ((fromString . toS) host) -- Warp settings . setPort port - . setServerName (toS $ "postgrest/" <> prettyVersion) - . setTimeout 3600 $ + . setServerName (toS $ "postgrest/" <> prettyVersion) $ defaultSettings -- Checks that the provided proxy uri is formated correctly @@ -186,7 +180,6 @@ main = do mainTid pool (configSchema conf) - (configSettings conf) refDbStructure refIsWorkerOn -- @@ -204,15 +197,15 @@ main = do throwTo mainTid UserInterrupt ) Nothing - void $ installHandler sigHUP ( - Catch $ connectionWorker - mainTid - pool - (configSchema conf) - (configSettings conf) - refDbStructure - refIsWorkerOn - ) Nothing + forM_ [sigHUP, sigUSR1] $ \sig -> + void $ installHandler sig ( + Catch $ connectionWorker + mainTid + pool + (configSchema conf) + refDbStructure + refIsWorkerOn + ) Nothing #endif @@ -230,7 +223,6 @@ main = do mainTid pool (configSchema conf) - (configSettings conf) refDbStructure refIsWorkerOn) diff --git a/postgrest.cabal b/postgrest.cabal index 781cfd8114..2295891805 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -2,14 +2,16 @@ name: postgrest description: Reads the schema of a PostgreSQL database and creates RESTful routes for the tables and views, supporting all HTTP verbs that security permits. -version: 0.5.0.0 +version: 5.1.0 synopsis: REST API for any Postgres database license: MIT license-file: LICENSE author: Joe Nelson, Adam Baker -homepage: https://github.com/PostgREST/postgrest -maintainer: cred+github@begriffs.com -category: Web +homepage: https://postgrest.org +maintainer: Steve Chávez +bug-reports: https://github.com/PostgREST/postgrest/issues +category: Executable, PostgreSQL, Network APIs +extra-source-files: CHANGELOG.md build-type: Simple cabal-version: >=1.10 source-repository head @@ -30,11 +32,11 @@ executable postgrest "-with-rtsopts=-N -I2" default-language: Haskell2010 build-depends: auto-update - , base - , hasql - , hasql-pool + , base >= 4.8 && < 4.10 + , hasql >= 1.3 && < 1.4 + , hasql-pool >= 0.5 && < 0.6 , postgrest - , protolude + , protolude == 0.2.2 , text , time , warp @@ -51,7 +53,7 @@ library default-extensions: OverloadedStrings, QuasiQuotes, NoImplicitPrelude build-depends: aeson , ansi-wl-pprint - , base >= 4.8 && < 6 + , base >= 4.8 && < 4.10 , base64-bytestring , bytestring , case-insensitive @@ -62,21 +64,21 @@ library , contravariant-extras , either , gitrev - , hasql - , hasql-pool - , hasql-transaction < 0.6 + , hasql >= 1.3 && < 1.4 + , hasql-pool >= 0.5 && < 0.6 + , hasql-transaction >= 0.7 && < 0.8 , heredoc , HTTP , http-types , insert-ordered-containers , interpolatedstring-perl6 - , jose + , jose == 0.7.0.0 , lens , lens-aeson , network-uri , optparse-applicative >= 0.13 && < 0.15 , parsec - , protolude >= 0.2.2 + , protolude == 0.2.2 , Ranged-sets == 0.3.0 , regex-tdfa , scientific @@ -143,15 +145,15 @@ Test-Suite spec , aeson-qq , async , auto-update - , base + , base >= 4.8 && < 4.10 , bytestring , base64-bytestring , case-insensitive , cassava , containers , contravariant - , hasql - , hasql-pool + , hasql >= 1.3 && < 1.4 + , hasql-pool >= 0.5 && < 0.6 , heredoc , hjsonschema == 1.5.0.1 , hspec @@ -163,7 +165,7 @@ Test-Suite spec , monad-control , postgrest , process - , protolude + , protolude == 0.2.2 , regex-tdfa , time , transformers-base diff --git a/src/PostgREST/App.hs b/src/PostgREST/App.hs index 022d95acc6..ff12b6ddfb 100644 --- a/src/PostgREST/App.hs +++ b/src/PostgREST/App.hs @@ -102,7 +102,7 @@ findProc qi payloadKeys paramsAsSingleObject allProcs = else payloadKeys `S.isSubsetOf` S.fromList (pgaName <$> pdArgs x)) ) <$> procs -transactionMode :: Maybe ProcDescription -> Action -> H.Mode +transactionMode :: Maybe ProcDescription -> Action -> HT.Mode transactionMode proc action = case action of ActionRead -> HT.Read @@ -131,7 +131,7 @@ app dbStructure proc conf apiRequest = Right ((q, cq), bField) -> do let stm = createReadStatement q cq (contentType == CTSingularJSON) shouldCount (contentType == CTTextCSV) bField - row <- H.query () stm + row <- H.statement () stm let (tableTotal, queryTotal, _ , body) = row (status, contentRange) = rangeHeader queryTotal tableTotal canonical = iCanonicalQS apiRequest @@ -162,7 +162,7 @@ app dbStructure proc conf apiRequest = stm = createWriteStatement sq mq (contentType == CTSingularJSON) isSingle (contentType == CTTextCSV) (iPreferRepresentation apiRequest) pkCols - row <- H.query (toS pjRaw) stm + row <- H.statement (toS pjRaw) stm let (_, _, fs, body) = extractQueryResult row headers = catMaybes [ if null fs @@ -191,7 +191,7 @@ app dbStructure proc conf apiRequest = let stm = createWriteStatement sq mq (contentType == CTSingularJSON) False (contentType == CTTextCSV) (iPreferRepresentation apiRequest) [] - row <- H.query (toS pjRaw) stm + row <- H.statement (toS pjRaw) stm let (_, queryTotal, _, body) = extractQueryResult row if contentType == CTSingularJSON && queryTotal /= 1 @@ -224,7 +224,7 @@ app dbStructure proc conf apiRequest = else if S.fromList colNames /= pjKeys then return $ simpleError status400 [] "You must specify all columns in the payload when using PUT" else do - row <- H.query (toS pjRaw) $ + row <- H.statement (toS pjRaw) $ createWriteStatement sq mq (contentType == CTSingularJSON) False (contentType == CTTextCSV) (iPreferRepresentation apiRequest) [] let (_, queryTotal, _, body) = extractQueryResult row @@ -248,7 +248,7 @@ app dbStructure proc conf apiRequest = (contentType == CTSingularJSON) False (contentType == CTTextCSV) (iPreferRepresentation apiRequest) [] - row <- H.query mempty stm + row <- H.statement mempty stm let (_, queryTotal, _, body) = extractQueryResult row r = contentRangeH 1 0 $ toInteger <$> if shouldCount then Just queryTotal else Nothing @@ -287,7 +287,7 @@ app dbStructure proc conf apiRequest = PJArray _ -> False singular = contentType == CTSingularJSON specifiedPgArgs = filter ((`S.member` pjKeys) . pgaName) $ fromMaybe [] (pdArgs <$> proc) - row <- H.query (toS pjRaw) $ + row <- H.statement (toS pjRaw) $ callProc qi specifiedPgArgs returnsScalar q cq shouldCount singular (iPreferSingleObjectParameter apiRequest) (contentType == CTTextCSV) @@ -316,7 +316,7 @@ app dbStructure proc conf apiRequest = toTableInfo :: [Table] -> [(Table, [Column], [Text])] toTableInfo = map (\t -> let (s, tn) = (tableSchema t, tableName t) in (t, tableCols dbStructure s tn, tablePKCols dbStructure s tn)) encodeApi ti sd procs = encodeOpenAPI (concat $ M.elems procs) (toTableInfo ti) uri' sd $ dbPrimaryKeys dbStructure - body <- encodeApi <$> H.query schema accessibleTables <*> H.query schema schemaDescription <*> H.query schema accessibleProcs + body <- encodeApi <$> H.statement schema accessibleTables <*> H.statement schema schemaDescription <*> H.statement schema accessibleProcs return $ responseLBS status200 [toHeader CTOpenAPI] $ toS body _ -> return notFound diff --git a/src/PostgREST/Auth.hs b/src/PostgREST/Auth.hs index f18cf317b5..43f2494350 100644 --- a/src/PostgREST/Auth.hs +++ b/src/PostgREST/Auth.hs @@ -20,6 +20,7 @@ module PostgREST.Auth ( ) where import Control.Lens.Operators +import Control.Lens (set) import qualified Data.Aeson as JSON import qualified Data.HashMap.Strict as M import Data.Time.Clock (UTCTime) @@ -48,7 +49,7 @@ jwtClaims secret audience payload time jspath = case secret of Nothing -> return JWTMissingSecret Just s -> do - let validation = defaultJWTValidationSettings (maybe (const True) (==) audience) + let validation = set allowedSkew 1 $ defaultJWTValidationSettings (maybe (const True) (==) audience) eJwt <- runExceptT $ do jwt <- decodeCompact payload verifyClaimsAt validation s time jwt diff --git a/src/PostgREST/Config.hs b/src/PostgREST/Config.hs index 8af03deafd..621a4fe6ea 100644 --- a/src/PostgREST/Config.hs +++ b/src/PostgREST/Config.hs @@ -18,9 +18,6 @@ module PostgREST.Config ( prettyVersion , docsVersion , readOptions , corsPolicy - , minimumPgVersion - , pgVersion95 - , pgVersion96 , AppConfig (..) ) where @@ -52,7 +49,7 @@ import Network.Wai.Middleware.Cors (CorsResourcePolicy (..)) import Options.Applicative hiding (str) import Paths_postgrest (version) import PostgREST.Parsers (pRoleClaimKey) -import PostgREST.Types (PgVersion(..), ApiRequestError(..), +import PostgREST.Types (ApiRequestError(..), JSPath, JSPathExp(..)) import Protolude hiding (hPutStrLn, take, intercalate, (<>)) @@ -85,7 +82,7 @@ data AppConfig = AppConfig { defaultCorsPolicy :: CorsResourcePolicy defaultCorsPolicy = CorsResourcePolicy Nothing - ["GET", "POST", "PATCH", "DELETE", "OPTIONS"] ["Authorization"] Nothing + ["GET", "POST", "PATCH", "PUT", "DELETE", "OPTIONS"] ["Authorization"] Nothing (Just $ 60*60*24) False False True -- | CORS policy to be used in by Wai Cors middleware @@ -132,7 +129,7 @@ readOptions = do <*> C.key "db-anon-role" <*> (mfilter (/= "") <$> C.key "server-proxy-uri") <*> C.key "db-schema" - <*> (fromMaybe "*4" . mfilter (/= "") <$> C.key "server-host") + <*> (fromMaybe "127.0.0.1" . mfilter (/= "") <$> C.key "server-host") <*> (fromMaybe 3000 . join . fmap coerceInt <$> C.key "server-port") <*> (fmap encodeUtf8 . mfilter (/= "") <$> C.key "jwt-secret") <*> (fromMaybe False . join . fmap coerceBool <$> C.key "secret-is-base64") @@ -141,7 +138,7 @@ readOptions = do <*> (join . fmap coerceInt <$> C.key "max-rows") <*> (mfilter (/= "") <$> C.key "pre-request") <*> pure False - <*> (fmap parsedPairToTextPair <$> C.subassocs "app.settings") + <*> (fmap (fmap coerceText) <$> C.subassocs "app.settings") <*> (maybe (Right [JSPKey "role"]) parseRoleClaimKey <$> C.key "role-claim-key") case mAppConf of @@ -152,13 +149,6 @@ readOptions = do return appConf where - parsedPairToTextPair :: (Name, Value) -> (Text, Text) - parsedPairToTextPair (k, v) = (k, newValue) - where - newValue = case v of - String textVal -> textVal - _ -> show v - parseJwtAudience :: Name -> C.ConfigParserM (Maybe StringOrURI) parseJwtAudience k = C.key k >>= \case @@ -168,6 +158,10 @@ readOptions = do (Just "") -> pure Nothing aud' -> pure aud' + coerceText :: Value -> Text + coerceText (String s) = s + coerceText v = show v + coerceInt :: (Read i, Integral i) => Value -> Maybe i coerceInt (Number x) = rightToMaybe $ floatingOrInteger x coerceInt (String x) = readMaybe $ toS x @@ -209,7 +203,7 @@ readOptions = do |db-anon-role = "postgres" |db-pool = 10 | - |server-host = "*4" + |server-host = "127.0.0.1" |server-port = 3000 | |## base url for swagger output @@ -236,13 +230,3 @@ pathParser = strArgument $ metavar "FILENAME" <> help "Path to configuration file" - --- | Tells the minimum PostgreSQL version required by this version of PostgREST -minimumPgVersion :: PgVersion -minimumPgVersion = PgVersion 90400 "9.4" - -pgVersion96 :: PgVersion -pgVersion96 = PgVersion 90600 "9.6" - -pgVersion95 :: PgVersion -pgVersion95 = PgVersion 90500 "9.5" diff --git a/src/PostgREST/DbRequestBuilder.hs b/src/PostgREST/DbRequestBuilder.hs index 597b65b3b1..b48c23fcf4 100644 --- a/src/PostgREST/DbRequestBuilder.hs +++ b/src/PostgREST/DbRequestBuilder.hs @@ -90,7 +90,7 @@ readRequest maxRows allRels proc apiRequest = -- in a relation where one of the tables matches "TableName" -- replace the name to that table with pg_source -- this "fake" relations is needed so that in a mutate query --- we can look a the "returning *" part which is wrapped with a "with" +-- we can look at the "returning *" part which is wrapped with a "with" -- as just another table that has relations with other tables toSourceRelation :: TableName -> Relation -> Maybe Relation toSourceRelation mt r@(Relation t _ ft _ _ rt _ _) @@ -225,12 +225,12 @@ addJoinConditions schema (Node node@(query, nodeProps@(_, relation, _, _, _)) fo addJoinCond jc rq@Select{joinConditions=jcs} = rq{joinConditions=jc:jcs} getJoinConditions :: Relation -> [JoinCondition] -getJoinConditions (Relation Table{tableSchema=tSchema, tableName=tN} cols Table{tableName=ftN} fcs typ lt lc1 lc2) = +getJoinConditions (Relation Table{tableSchema=tSchema, tableName=tN} cols Table{tableName=ftN} fCols typ lt lc1 lc2) = if | typ == Child || typ == Parent -> - zipWith (toJoinCondition tN ftN) cols fcs + zipWith (toJoinCondition tN ftN) cols fCols | typ == Many -> let ltN = fromMaybe "" (tableName <$> lt) in - zipWith (toJoinCondition tN ltN) cols (fromMaybe [] lc1) ++ zipWith (toJoinCondition ftN ltN) fcs (fromMaybe [] lc2) + zipWith (toJoinCondition tN ltN) cols (fromMaybe [] lc1) ++ zipWith (toJoinCondition ftN ltN) fCols (fromMaybe [] lc2) | typ == Root -> witness where toJoinCondition :: Text -> Text -> Column -> Column -> JoinCondition diff --git a/src/PostgREST/DbStructure.hs b/src/PostgREST/DbStructure.hs index dbff35b61e..614ff5ff5f 100644 --- a/src/PostgREST/DbStructure.hs +++ b/src/PostgREST/DbStructure.hs @@ -10,12 +10,11 @@ module PostgREST.DbStructure ( , accessibleProcs , schemaDescription , getPgVersion -, fillSessionWithSettings ) where import qualified Hasql.Decoders as HD import qualified Hasql.Encoders as HE -import qualified Hasql.Query as H +import qualified Hasql.Statement as H import Control.Applicative import qualified Data.HashMap.Strict as M @@ -27,25 +26,22 @@ import Data.Text (split, strip, import qualified Data.Text as T import qualified Hasql.Session as H import PostgREST.Types -import Text.InterpolatedString.Perl6 (q) +import Text.InterpolatedString.Perl6 (q, qc) import GHC.Exts (groupWith) import Protolude import Unsafe (unsafeHead) -import Data.Functor.Contravariant (contramap) -import Contravariant.Extras (contrazip2) - getDbStructure :: Schema -> PgVersion -> H.Session DbStructure getDbStructure schema pgVer = do - tabs <- H.query () allTables - cols <- H.query schema $ allColumns tabs - syns <- H.query () $ allSynonyms cols - childRels <- H.query () $ allChildRelations tabs cols - keys <- H.query () $ allPrimaryKeys tabs - procs <- H.query schema allProcs - - let rels = addManyToManyRelations . addParentRelations $ addViewRelations syns childRels + tabs <- H.statement () allTables + cols <- H.statement schema $ allColumns tabs + syns <- H.statement schema $ allSynonyms cols pgVer + childRels <- H.statement () $ allChildRelations tabs cols + keys <- H.statement () $ allPrimaryKeys tabs + procs <- H.statement schema allProcs + + let rels = addManyToManyRelations . addParentRelations $ addViewChildRelations syns childRels cols' = addForeignKeys rels cols keys' = addViewPrimaryKeys syns keys @@ -60,70 +56,70 @@ getDbStructure schema pgVer = do decodeTables :: HD.Result [Table] decodeTables = - HD.rowsList tblRow + HD.rowList tblRow where - tblRow = Table <$> HD.value HD.text - <*> HD.value HD.text - <*> HD.nullableValue HD.text - <*> HD.value HD.bool + tblRow = Table <$> HD.column HD.text + <*> HD.column HD.text + <*> HD.nullableColumn HD.text + <*> HD.column HD.bool decodeColumns :: [Table] -> HD.Result [Column] decodeColumns tables = - mapMaybe (columnFromRow tables) <$> HD.rowsList colRow + mapMaybe (columnFromRow tables) <$> HD.rowList colRow where colRow = (,,,,,,,,,,,) - <$> HD.value HD.text <*> HD.value HD.text - <*> HD.value HD.text <*> HD.nullableValue HD.text - <*> HD.value HD.int4 <*> HD.value HD.bool - <*> HD.value HD.text <*> HD.value HD.bool - <*> HD.nullableValue HD.int4 - <*> HD.nullableValue HD.int4 - <*> HD.nullableValue HD.text - <*> HD.nullableValue HD.text + <$> HD.column HD.text <*> HD.column HD.text + <*> HD.column HD.text <*> HD.nullableColumn HD.text + <*> HD.column HD.int4 <*> HD.column HD.bool + <*> HD.column HD.text <*> HD.column HD.bool + <*> HD.nullableColumn HD.int4 + <*> HD.nullableColumn HD.int4 + <*> HD.nullableColumn HD.text + <*> HD.nullableColumn HD.text decodeRelations :: [Table] -> [Column] -> HD.Result [Relation] decodeRelations tables cols = - mapMaybe (relationFromRow tables cols) <$> HD.rowsList relRow + mapMaybe (relationFromRow tables cols) <$> HD.rowList relRow where relRow = (,,,,,) - <$> HD.value HD.text - <*> HD.value HD.text - <*> HD.value (HD.array (HD.arrayDimension replicateM (HD.arrayValue HD.text))) - <*> HD.value HD.text - <*> HD.value HD.text - <*> HD.value (HD.array (HD.arrayDimension replicateM (HD.arrayValue HD.text))) + <$> HD.column HD.text + <*> HD.column HD.text + <*> HD.column (HD.array (HD.dimension replicateM (HD.element HD.text))) + <*> HD.column HD.text + <*> HD.column HD.text + <*> HD.column (HD.array (HD.dimension replicateM (HD.element HD.text))) decodePks :: [Table] -> HD.Result [PrimaryKey] decodePks tables = - mapMaybe (pkFromRow tables) <$> HD.rowsList pkRow + mapMaybe (pkFromRow tables) <$> HD.rowList pkRow where - pkRow = (,,) <$> HD.value HD.text <*> HD.value HD.text <*> HD.value HD.text + pkRow = (,,) <$> HD.column HD.text <*> HD.column HD.text <*> HD.column HD.text decodeSynonyms :: [Column] -> HD.Result [Synonym] decodeSynonyms cols = - mapMaybe (synonymFromRow cols) <$> HD.rowsList synRow + mapMaybe (synonymFromRow cols) <$> HD.rowList synRow where synRow = (,,,,,) - <$> HD.value HD.text <*> HD.value HD.text - <*> HD.value HD.text <*> HD.value HD.text - <*> HD.value HD.text <*> HD.value HD.text + <$> HD.column HD.text <*> HD.column HD.text + <*> HD.column HD.text <*> HD.column HD.text + <*> HD.column HD.text <*> HD.column HD.text decodeProcs :: HD.Result (M.HashMap Text [ProcDescription]) decodeProcs = -- Duplicate rows for a function means they're overloaded, order these by least args according to ProcDescription Ord instance - map sort . M.fromListWith (++) . map ((\(x,y) -> (x, [y])) . addName) <$> HD.rowsList tblRow + map sort . M.fromListWith (++) . map ((\(x,y) -> (x, [y])) . addName) <$> HD.rowList tblRow where tblRow = ProcDescription - <$> HD.value HD.text - <*> HD.nullableValue HD.text - <*> (parseArgs <$> HD.value HD.text) + <$> HD.column HD.text + <*> HD.nullableColumn HD.text + <*> (parseArgs <$> HD.column HD.text) <*> (parseRetType - <$> HD.value HD.text - <*> HD.value HD.text - <*> HD.value HD.bool - <*> HD.value HD.char) - <*> (parseVolatility <$> HD.value HD.char) + <$> HD.column HD.text + <*> HD.column HD.text + <*> HD.column HD.bool + <*> HD.column HD.char) + <*> (parseVolatility <$> HD.column HD.char) addName :: ProcDescription -> (Text, ProcDescription) addName pd = (pdName pd, pd) @@ -159,11 +155,11 @@ decodeProcs = | v == 's' = Stable | otherwise = Volatile -- only 'v' can happen here -allProcs :: H.Query Schema (M.HashMap Text [ProcDescription]) -allProcs = H.statement (toS procsSqlQuery) (HE.value HE.text) decodeProcs True +allProcs :: H.Statement Schema (M.HashMap Text [ProcDescription]) +allProcs = H.Statement (toS procsSqlQuery) (HE.param HE.text) decodeProcs True -accessibleProcs :: H.Query Schema (M.HashMap Text [ProcDescription]) -accessibleProcs = H.statement (toS sql) (HE.value HE.text) decodeProcs True +accessibleProcs :: H.Statement Schema (M.HashMap Text [ProcDescription]) +accessibleProcs = H.Statement (toS sql) (HE.param HE.text) decodeProcs True where sql = procsSqlQuery <> " AND has_function_privilege(p.oid, 'execute')" @@ -186,9 +182,9 @@ procsSqlQuery = [q| WHERE pn.nspname = $1 |] -schemaDescription :: H.Query Schema (Maybe Text) +schemaDescription :: H.Statement Schema (Maybe Text) schemaDescription = - H.statement sql (HE.value HE.text) (join <$> HD.maybeRow (HD.nullableValue HD.text)) True + H.Statement sql (HE.param HE.text) (join <$> HD.rowMaybe (HD.nullableColumn HD.text)) True where sql = [q| select @@ -199,9 +195,9 @@ schemaDescription = where n.nspname = $1 |] -accessibleTables :: H.Query Schema [Table] +accessibleTables :: H.Statement Schema [Table] accessibleTables = - H.statement sql (HE.value HE.text) decodeTables True + H.Statement sql (HE.param HE.text) decodeTables True where sql = [q| select @@ -247,32 +243,32 @@ Having a Relation{relTable=t1, relColumns=[c1], relFTable=t2, relFColumns=[c2], t1.c1------t2.c2 -When only having a t1_view.c1 synonym, we need to add a View to Table Relation +When only having a t1_view.c1 synonym, we need to add a View to Table Child Relation t1.c1----t2.c2 t1.c1----------t2.c2 - -> --------/ + -> ________/ / t1_view.c1 t1_view.c1 -When only having a t2_view.c2 synonym, we need to add a Table to View Relation +When only having a t2_view.c2 synonym, we need to add a Table to View Child Relation t1.c1----t2.c2 t1.c1----------t2.c2 - -> \-------- + -> \________ \ t2_view.c2 t2_view.c1 -When having t1_view.c1 and a t2_view.c2 synonyms, we need to add a View to View Relation in addition to the prior +When having t1_view.c1 and a t2_view.c2 synonyms, we need to add a View to View Child Relation in addition to the prior t1.c1----t2.c2 t1.c1----------t2.c2 - -> \--------/ + -> \________/ / \ t1_view.c1 t2_view.c2 t1_view.c1-------t2_view.c1 The logic for composite pks is similar just need to make sure all the Relation columns have synonyms. -} -addViewRelations :: [Synonym] -> [Relation] -> [Relation] -addViewRelations allSyns = concatMap (\rel -> +addViewChildRelations :: [Synonym] -> [Relation] -> [Relation] +addViewChildRelations allSyns = concatMap (\rel -> rel : case rel of Relation{relType=Child, relTable, relColumns, relFTable, relFColumns} -> @@ -283,18 +279,22 @@ addViewRelations allSyns = concatMap (\rel -> fColsSyns = colSynsGroupedByView relFColumns getView :: [Synonym] -> Table getView = colTable . snd . unsafeHead - syns `allSynsOf` cols = S.fromList (fst <$> syns) == S.fromList cols in - - -- View Table Relations - [Relation (getView syns) (snd <$> syns) relFTable relFColumns Child Nothing Nothing Nothing + syns `allSynsOf` cols = S.fromList (fst <$> syns) == S.fromList cols + -- Relation is dependent on the order of relColumns and relFColumns to get the join conditions right in the generated query. + -- So we need to change the order of the synonyms to match the relColumns + -- This could be avoided if the Relation type is improved with a structure that maintains the association of relColumns and relFColumns + syns `sortAccordingTo` columns = sortOn (\(k, _) -> L.lookup k $ zip columns [0::Int ..]) syns in + + -- View Table Child Relations + [Relation (getView syns) (snd <$> syns `sortAccordingTo` relColumns) relFTable relFColumns Child Nothing Nothing Nothing | syns <- colsSyns, syns `allSynsOf` relColumns] ++ - -- Table View Relations - [Relation relTable relColumns (getView fSyns) (snd <$> fSyns) Child Nothing Nothing Nothing + -- Table View Child Relations + [Relation relTable relColumns (getView fSyns) (snd <$> fSyns `sortAccordingTo` relFColumns) Child Nothing Nothing Nothing | fSyns <- fColsSyns, fSyns `allSynsOf` relFColumns] ++ - -- View View Relations - [Relation (getView syns) (snd <$> syns) (getView fSyns) (snd <$> fSyns) Child Nothing Nothing Nothing + -- View View Child Relations + [Relation (getView syns) (snd <$> syns `sortAccordingTo` relColumns) (getView fSyns) (snd <$> fSyns `sortAccordingTo` relFColumns) Child Nothing Nothing Nothing | syns <- colsSyns, fSyns <- fColsSyns, syns `allSynsOf` relColumns, fSyns `allSynsOf` relFColumns] _ -> []) @@ -328,9 +328,9 @@ addViewPrimaryKeys syns = concatMap (\pk -> filter (\(col, _) -> colTable col == pkTable pk && colName col == pkName pk) syns in pk : viewPks) -allTables :: H.Query () [Table] +allTables :: H.Statement () [Table] allTables = - H.statement sql HE.unit decodeTables True + H.Statement sql HE.unit decodeTables True where sql = [q| SELECT @@ -351,9 +351,9 @@ allTables = GROUP BY table_schema, table_name, insertable ORDER BY table_schema, table_name |] -allColumns :: [Table] -> H.Query Schema [Column] +allColumns :: [Table] -> H.Statement Schema [Column] allColumns tabs = - H.statement sql (HE.value HE.text) (decodeColumns tabs) True + H.Statement sql (HE.param HE.text) (decodeColumns tabs) True where sql = [q| SELECT DISTINCT @@ -388,7 +388,7 @@ allColumns tabs = pg_catalog.pg_namespace n WHERE r.contype IN ('f', 'p') - AND c.relkind IN ('r', 'v', 'f', 'mv') + AND c.relkind IN ('r', 'v', 'f', 'm') AND r.conrelid = c.oid AND c.relnamespace = n.oid AND n.nspname NOT IN ('pg_catalog', 'information_schema', $1) @@ -492,7 +492,7 @@ allColumns tabs = NOT pg_is_other_temp_schema(nc.oid) AND a.attnum > 0 AND NOT a.attisdropped - AND (c.relkind = ANY (ARRAY['r'::"char", 'v'::"char", 'f'::"char"])) + AND (c.relkind = ANY (ARRAY['r'::"char", 'v'::"char", 'f'::"char", 'm'::"char"])) AND (nc.nspname = $1 OR kc.r_oid IS NOT NULL) /*--filter only columns that are FK/PK or in the api schema */ /*--AND (pg_has_role(c.relowner, 'USAGE'::text) OR has_column_privilege(c.oid, a.attnum, 'SELECT, INSERT, UPDATE, REFERENCES'::text))*/ ) @@ -538,9 +538,9 @@ columnFromRow tabs (s, t, n, desc, pos, nul, typ, u, l, p, d, e) = buildColumn < parseEnum :: Maybe Text -> [Text] parseEnum str = fromMaybe [] $ split (==',') <$> str -allChildRelations :: [Table] -> [Column] -> H.Query () [Relation] +allChildRelations :: [Table] -> [Column] -> H.Statement () [Relation] allChildRelations tabs cols = - H.statement sql HE.unit (decodeRelations tabs cols) True + H.Statement sql HE.unit (decodeRelations tabs cols) True where sql = [q| SELECT ns1.nspname AS table_schema, @@ -579,9 +579,9 @@ relationFromRow allTabs allCols (rs, rt, rcs, frs, frt, frcs) = cols = mapM (findCol rs rt) rcs colsF = mapM (findCol frs frt) frcs -allPrimaryKeys :: [Table] -> H.Query () [PrimaryKey] +allPrimaryKeys :: [Table] -> H.Statement () [PrimaryKey] allPrimaryKeys tabs = - H.statement sql HE.unit (decodePks tabs) True + H.Statement sql HE.unit (decodePks tabs) True where sql = [q| /* @@ -689,80 +689,71 @@ pkFromRow :: [Table] -> (Schema, Text, Text) -> Maybe PrimaryKey pkFromRow tabs (s, t, n) = PrimaryKey <$> table <*> pure n where table = find (\tbl -> tableSchema tbl == s && tableName tbl == t) tabs -allSynonyms :: [Column] -> H.Query () [Synonym] -allSynonyms cols = - H.statement sql HE.unit (decodeSynonyms cols) True - where - -- query explanation at https://gist.github.com/ruslantalpa/2eab8c930a65e8043d8f - sql = [q| - with view_columns as ( +allSynonyms :: [Column] -> PgVersion -> H.Statement Schema [Synonym] +allSynonyms cols pgVer = + H.Statement sql (HE.param HE.text) (decodeSynonyms cols) True + -- query explanation at https://gist.github.com/steve-chavez/7ee0e6590cddafb532e5f00c46275569 + where + subselectRegex :: Text + subselectRegex | pgVer < pgVersion100 = ":subselect {.*?:constraintDeps <>} :location" + | otherwise = ":subselect {.*?:stmt_len 0} :location" + sql = [qc| + with + views as ( select - c.oid as view_oid, - a.attname::information_schema.sql_identifier as column_name - from pg_attribute a - join pg_class c on a.attrelid = c.oid - join pg_namespace nc on c.relnamespace = nc.oid - where - not pg_is_other_temp_schema(nc.oid) - and a.attnum > 0 - and not a.attisdropped - and (c.relkind = 'v'::"char") - and nc.nspname not in ('information_schema', 'pg_catalog') - ), - view_column_usage as ( - select distinct - v.oid as view_oid, - nv.nspname::information_schema.sql_identifier as view_schema, - v.relname::information_schema.sql_identifier as view_name, - nt.nspname::information_schema.sql_identifier as table_schema, - t.relname::information_schema.sql_identifier as table_name, - a.attname::information_schema.sql_identifier as column_name, - pg_get_viewdef(v.oid)::information_schema.character_data as view_definition - from pg_namespace nv - join pg_class v on nv.oid = v.relnamespace - join pg_depend dv on v.oid = dv.refobjid - join pg_depend dt on dv.objid = dt.objid - join pg_class t on dt.refobjid = t.oid - join pg_namespace nt on t.relnamespace = nt.oid - join pg_attribute a on t.oid = a.attrelid and dt.refobjsubid = a.attnum - - where - nv.nspname not in ('information_schema', 'pg_catalog') - and v.relkind = 'v'::"char" - and dv.refclassid = 'pg_class'::regclass::oid - and dv.classid = 'pg_rewrite'::regclass::oid - and dv.deptype = 'i'::"char" - and dv.refobjid <> dt.refobjid - and dt.classid = 'pg_rewrite'::regclass::oid - and dt.refclassid = 'pg_class'::regclass::oid - and (t.relkind = any (array['r'::"char", 'v'::"char", 'f'::"char"])) - ), - candidates as ( + n.nspname as view_schema, + c.relname as view_name, + r.ev_action as view_definition + from pg_class c + join pg_namespace n on n.oid = c.relnamespace + join pg_rewrite r on r.ev_class = c.oid + where (c.relkind = 'v'::char) and n.nspname = $1 + ), + removed_subselects as( select - vcu.*, - ( - select case when match is not null then coalesce(match[8], match[7], match[4]) end - from regexp_matches( - CONCAT('SELECT ', SPLIT_PART(vcu.view_definition, 'SELECT', 2)), - CONCAT('SELECT.*?((',vcu.table_name,')|(\w+))\.(', vcu.column_name, ')(\s+AS\s+("([^"]+)"|([^, \n\t]+)))?.*?FROM.*?(',vcu.table_schema,'\.|)(\2|',vcu.table_name,'\s+(as\s)?\3)'), - 'nsi' - ) match - ) as view_column_name - from view_column_usage as vcu - ) - select - c.table_schema, - c.table_name, - c.column_name as table_column_name, - c.view_schema, - c.view_name, - c.view_column_name - from view_columns as vc, candidates as c - where - vc.view_oid = c.view_oid - and vc.column_name = c.view_column_name - order by c.view_schema, c.view_name, c.table_name, c.view_column_name - |] + view_schema, view_name, + regexp_replace(view_definition, '{subselectRegex}', '', 'g') as x + from views + ), + target_lists as( + select + view_schema, view_name, + regexp_split_to_array(x, 'targetList') as x + from removed_subselects + ), + last_target_list_wo_tail as( + select + view_schema, view_name, + (regexp_split_to_array(x[array_upper(x, 1)], ':onConflict'))[1] as x + from target_lists + ), + target_entries as( + select + view_schema, view_name, + unnest(regexp_split_to_array(x, 'TARGETENTRY')) as entry + from last_target_list_wo_tail + ), + results as( + select + view_schema, view_name, + substring(entry from ':resname (.*?) :') as view_colum_name, + substring(entry from ':resorigtbl (.*?) :') as resorigtbl, + substring(entry from ':resorigcol (.*?) :') as resorigcol + from target_entries + ) + select + sch.nspname as table_schema, + tbl.relname as table_name, + col.attname as table_column_name, + res.view_schema, + res.view_name, + res.view_colum_name + from results res + join pg_class tbl on tbl.oid::text = res.resorigtbl + join pg_attribute col on col.attrelid = tbl.oid and col.attnum::text = res.resorigcol + join pg_namespace sch on sch.oid = tbl.relnamespace + where resorigtbl <> '0' + order by view_schema, view_name, view_colum_name; |] synonymFromRow :: [Column] -> (Text,Text,Text,Text,Text,Text) -> Maybe Synonym synonymFromRow allCols (s1,t1,c1,s2,t2,c2) = (,) <$> col1 <*> col2 @@ -772,21 +763,7 @@ synonymFromRow allCols (s1,t1,c1,s2,t2,c2) = (,) <$> col1 <*> col2 findCol s t c = find (\col -> (tableSchema . colTable) col == s && (tableName . colTable) col == t && colName col == c) allCols getPgVersion :: H.Session PgVersion -getPgVersion = H.query () $ H.statement sql HE.unit versionRow False +getPgVersion = H.statement () $ H.Statement sql HE.unit versionRow False where sql = "SELECT current_setting('server_version_num')::integer, current_setting('server_version')" - versionRow = HD.singleRow $ PgVersion <$> HD.value HD.int4 <*> HD.value HD.text - -fillSessionWithSettings :: [(Text, Text)] -> H.Session () -fillSessionWithSettings settings = - -- Send all of the config settings to the set_config function, using pgsql's `unnest` to transform arrays of values - H.query settings $ H.statement "SELECT set_config(k, v, false) FROM unnest($1, $2) AS f1(k, v)" encoder HD.unit False - - where - -- Take a list of (key, value) pairs and encode each as an array to later bind to the query - -- see Insert Many section at https://hackage.haskell.org/package/hasql-1.1.1/docs/Hasql-Encoders.html - encoder = contramap L.unzip $ contrazip2 (vector HE.text) (vector HE.text) - where - vector value = - HE.value $ HE.array $ HE.arrayDimension foldl' $ HE.arrayValue value - + versionRow = HD.singleRow $ PgVersion <$> HD.column HD.int4 <*> HD.column HD.text diff --git a/src/PostgREST/Error.hs b/src/PostgREST/Error.hs index 0972b28190..f54a8e6389 100644 --- a/src/PostgREST/Error.hs +++ b/src/PostgREST/Error.hs @@ -118,7 +118,10 @@ instance JSON.ToJSON P.UsageError where "details" .= (toS $ fromMaybe "" e :: Text)] toJSON (P.SessionError e) = JSON.toJSON e -- H.Error -instance JSON.ToJSON H.Error where +instance JSON.ToJSON H.QueryError where + toJSON (H.QueryError _ _ e) = JSON.toJSON e + +instance JSON.ToJSON H.CommandError where toJSON (H.ResultError (H.ServerError c m d h)) = case toS c of 'P':'T':_ -> JSON.object [ @@ -154,7 +157,7 @@ instance JSON.ToJSON H.Error where httpStatus :: Bool -> P.UsageError -> HT.Status httpStatus _ (P.ConnectionError _) = HT.status503 -httpStatus authed (P.SessionError (H.ResultError (H.ServerError c m _ _))) = +httpStatus authed (P.SessionError (H.QueryError _ _ (H.ResultError (H.ServerError c m _ _)))) = case toS c of '0':'8':_ -> HT.status503 -- pg connection err '0':'9':_ -> HT.status500 -- triggered action exception @@ -184,5 +187,5 @@ httpStatus authed (P.SessionError (H.ResultError (H.ServerError c m _ _))) = "42501" -> if authed then HT.status403 else HT.status401 -- insufficient privilege 'P':'T':n -> fromMaybe HT.status500 (HT.mkStatus <$> readMaybe n <*> pure m) _ -> HT.status400 -httpStatus _ (P.SessionError (H.ResultError _)) = HT.status500 -httpStatus _ (P.SessionError (H.ClientError _)) = HT.status503 +httpStatus _ (P.SessionError (H.QueryError _ _ (H.ResultError _))) = HT.status500 +httpStatus _ (P.SessionError (H.QueryError _ _ (H.ClientError _))) = HT.status503 diff --git a/src/PostgREST/Middleware.hs b/src/PostgREST/Middleware.hs index 5db16d9e0b..6037eb8479 100644 --- a/src/PostgREST/Middleware.hs +++ b/src/PostgREST/Middleware.hs @@ -19,7 +19,7 @@ import PostgREST.ApiRequest (ApiRequest(..)) import PostgREST.Auth (JWTAttempt(..)) import PostgREST.Config (AppConfig (..), corsPolicy) import PostgREST.Error (simpleError) -import PostgREST.QueryBuilder (pgFmtLit, unquoted, pgFmtEnvVar) +import PostgREST.QueryBuilder (pgFmtLit, unquoted, pgFmtSetLocal) import Protolude hiding (concat, null) @@ -32,13 +32,14 @@ runWithClaims conf eClaims app req = JWTInvalid e -> return $ unauthed $ show e JWTMissingSecret -> return $ simpleError status500 [] "Server lacks JWT secret" JWTClaims claims -> do - H.sql $ toS.mconcat $ setSchemaSql ++ setRoleSql ++ claimsSql ++ headersSql ++ cookiesSql + H.sql $ toS.mconcat $ setSchemaSql ++ setRoleSql ++ claimsSql ++ headersSql ++ cookiesSql ++ appSettingsSql mapM_ H.sql customReqCheck app req where - headersSql = map (pgFmtEnvVar "request.header.") $ iHeaders req - cookiesSql = map (pgFmtEnvVar "request.cookie.") $ iCookies req - claimsSql = map (pgFmtEnvVar "request.jwt.claim.") [(c,unquoted v) | (c,v) <- M.toList claimsWithRole] + headersSql = pgFmtSetLocal "request.header." <$> iHeaders req + cookiesSql = pgFmtSetLocal "request.cookie." <$> iCookies req + claimsSql = pgFmtSetLocal "request.jwt.claim." <$> [(c,unquoted v) | (c,v) <- M.toList claimsWithRole] + appSettingsSql = pgFmtSetLocal mempty <$> configSettings conf setRoleSql = maybeToList $ (\r -> "set local role " <> r <> ";") . toS . pgFmtLit . unquoted <$> M.lookup "role" claimsWithRole setSchemaSql = ["set schema " <> pgFmtLit (configSchema conf) <> ";"] :: [Text] diff --git a/src/PostgREST/OpenAPI.hs b/src/PostgREST/OpenAPI.hs index 865b7da3af..20ffda41d2 100644 --- a/src/PostgREST/OpenAPI.hs +++ b/src/PostgREST/OpenAPI.hs @@ -42,7 +42,8 @@ makeTableDef pks (t, cs, _) = (tn, (mempty :: Schema) & description .~ tableDescription t & type_ .~ SwaggerObject - & properties .~ fromList (map (makeProperty pks) cs)) + & properties .~ fromList (map (makeProperty pks) cs) + & required .~ map colName (filter (not . colNullable) cs)) makeProperty :: [PrimaryKey] -> Column -> (Text, Referenced Schema) makeProperty pks c = (colName c, Inline s) @@ -197,8 +198,11 @@ makePathItem (t, cs, _) = ("/" ++ unpack tn, p $ tableInsertable t) & at 206 ?~ "Partial Content" & at 200 ?~ Inline ((mempty :: Response) & description .~ "OK" - & schema ?~ (Ref $ Reference $ tableName t) + & schema ?~ Inline (mempty + & type_ .~ SwaggerArray + & items ?~ (SwaggerItemsObject $ Ref $ Reference $ tableName t) ) + ) postOp = tOp & parameters .~ map ref ["body." <> tn, "preferReturn"] & at 201 ?~ "Created" @@ -219,8 +223,13 @@ makePathItem (t, cs, _) = ("/" ++ unpack tn, p $ tableInsertable t) makeProcPathItem :: ProcDescription -> (FilePath, PathItem) makeProcPathItem pd = ("/rpc/" ++ toS (pdName pd), pe) where + -- Use first line of proc description as summary; rest as description (if present) + -- We strip leading newlines from description so that users can include a blank line between summary and description + (pSum, pDesc) = fmap fst &&& fmap (dropWhile (=='\n') . snd) $ + breakOn "\n" <$> pdDescription pd postOp = (mempty :: Operation) - & description .~ pdDescription pd + & summary .~ pSum + & description .~ mfilter (/="") pDesc & parameters .~ makeProcParam pd & tags .~ Set.fromList ["(rpc) " <> pdName pd] & produces ?~ makeMimeList [CTApplicationJSON, CTSingularJSON] diff --git a/src/PostgREST/QueryBuilder.hs b/src/PostgREST/QueryBuilder.hs index 8045573bcc..c959264eed 100644 --- a/src/PostgREST/QueryBuilder.hs +++ b/src/PostgREST/QueryBuilder.hs @@ -23,16 +23,15 @@ module PostgREST.QueryBuilder ( , requestToCountQuery , unquoted , ResultsWithCount - , pgFmtEnvVar + , pgFmtSetLocal ) where -import qualified Hasql.Query as H +import qualified Hasql.Statement as H import qualified Hasql.Encoders as HE import qualified Hasql.Decoders as HD import qualified Data.Aeson as JSON -import PostgREST.Config (pgVersion96) import PostgREST.RangeQuery (rangeLimit, rangeOffset, allRange) import qualified Data.HashMap.Strict as HM import Data.Maybe @@ -58,10 +57,10 @@ import PostgREST.ApiRequest (PreferRepresentation (..)) type ResultsWithCount = (Maybe Int64, Int64, [BS.ByteString], BS.ByteString) standardRow :: HD.Row ResultsWithCount -standardRow = (,,,) <$> HD.nullableValue HD.int8 <*> HD.value HD.int8 - <*> HD.value header <*> HD.value HD.bytea +standardRow = (,,,) <$> HD.nullableColumn HD.int8 <*> HD.column HD.int8 + <*> HD.column header <*> HD.column HD.bytea where - header = HD.array $ HD.arrayDimension replicateM $ HD.arrayValue HD.bytea + header = HD.array $ HD.dimension replicateM $ HD.element HD.bytea noLocationF :: Text noLocationF = "array[]::text[]" @@ -76,10 +75,10 @@ decodeStandard = decodeStandardMay :: HD.Result (Maybe ResultsWithCount) decodeStandardMay = - HD.maybeRow standardRow + HD.rowMaybe standardRow createReadStatement :: SqlQuery -> SqlQuery -> Bool -> Bool -> Bool -> Maybe FieldName -> - H.Query () ResultsWithCount + H.Statement () ResultsWithCount createReadStatement selectQuery countQuery isSingle countTotal asCsv binaryField = unicodeStatement sql HE.unit decodeStandard False where @@ -102,9 +101,9 @@ createReadStatement selectQuery countQuery isSingle countTotal asCsv binaryField createWriteStatement :: SqlQuery -> SqlQuery -> Bool -> Bool -> Bool -> PreferRepresentation -> [Text] -> - H.Query ByteString (Maybe ResultsWithCount) + H.Statement ByteString (Maybe ResultsWithCount) createWriteStatement selectQuery mutateQuery wantSingle wantHdrs asCsv rep pKeys = - unicodeStatement sql (HE.value HE.unknown) decodeStandardMay True + unicodeStatement sql (HE.param HE.unknown) decodeStandardMay True where sql = case rep of @@ -139,9 +138,9 @@ createWriteStatement selectQuery mutateQuery wantSingle wantHdrs asCsv rep pKeys type ProcResults = (Maybe Int64, Int64, ByteString, ByteString) callProc :: QualifiedIdentifier -> [PgArg] -> Bool -> SqlQuery -> SqlQuery -> Bool -> Bool -> Bool -> Bool -> Bool -> Maybe FieldName -> Bool -> PgVersion -> - H.Query ByteString (Maybe ProcResults) + H.Statement ByteString (Maybe ProcResults) callProc qi pgArgs returnsScalar selectQuery countQuery countTotal isSingle paramsAsSingleObject asCsv asBinary binaryField isObject pgVer = - unicodeStatement sql (HE.value HE.unknown) decodeProc True + unicodeStatement sql (HE.param HE.unknown) decodeProc True where sql = if returnsScalar then [qc| @@ -173,18 +172,18 @@ callProc qi pgArgs returnsScalar selectQuery countQuery countTotal isSingle para unwords [ "_args_record AS (", "SELECT * FROM " <> (if isObject then "json_to_record" else "json_to_recordset") <> "($1)", - "AS _(" <> intercalate ", " ((\a -> pgaName a <> " " <> pgaType a) <$> pgArgs) <> ")", + "AS _(" <> intercalate ", " ((\a -> pgFmtIdent (pgaName a) <> " " <> pgaType a) <$> pgArgs) <> ")", ")"] - , intercalate ", " ((\a -> pgaName a <> " := (SELECT " <> pgaName a <> " FROM _args_record)") <$> pgArgs)) + , intercalate ", " ((\a -> pgFmtIdent (pgaName a) <> " := (SELECT " <> pgFmtIdent (pgaName a) <> " FROM _args_record)") <$> pgArgs)) countResultF = if countTotal then "( "<> countQuery <> ")" else "null::bigint" :: Text _procName = qiName qi responseHeaders = if pgVer >= pgVersion96 then "coalesce(nullif(current_setting('response.headers', true), ''), '[]')" :: Text -- nullif is used because of https://gist.github.com/steve-chavez/8d7033ea5655096903f3b52f8ed09a15 else "'[]'" :: Text - decodeProc = HD.maybeRow procRow - procRow = (,,,) <$> HD.nullableValue HD.int8 <*> HD.value HD.int8 - <*> HD.value HD.bytea <*> HD.value HD.bytea + decodeProc = HD.rowMaybe procRow + procRow = (,,,) <$> HD.nullableColumn HD.int8 <*> HD.column HD.int8 + <*> HD.column HD.bytea <*> HD.column HD.bytea scalarBodyF | asBinary = asBinaryF _procName | otherwise = "(row_to_json(_postgrest_t)->" <> pgFmtLit _procName <> ")::character varying" @@ -381,8 +380,8 @@ fromQi t = (if s == "" then "" else pgFmtIdent s <> ".") <> pgFmtIdent n n = qiName t s = qiSchema t -unicodeStatement :: Text -> HE.Params a -> HD.Result b -> Bool -> H.Query a b -unicodeStatement = H.statement . T.encodeUtf8 +unicodeStatement :: Text -> HE.Params a -> HD.Result b -> Bool -> H.Statement a b +unicodeStatement = H.Statement . T.encodeUtf8 emptyOnFalse :: Text -> Bool -> Text emptyOnFalse val cond = if cond then "" else val @@ -469,8 +468,8 @@ pgFmtAs fName jp Nothing = case jOp <$> lastMay jp of Nothing -> "" pgFmtAs _ _ (Just alias) = " AS " <> pgFmtIdent alias -pgFmtEnvVar :: Text -> (Text, Text) -> SqlFragment -pgFmtEnvVar prefix (k, v) = +pgFmtSetLocal :: Text -> (Text, Text) -> SqlFragment +pgFmtSetLocal prefix (k, v) = "set local " <> pgFmtIdent (prefix <> k) <> " = " <> pgFmtLit v <> ";" trimNullChars :: Text -> Text diff --git a/src/PostgREST/Types.hs b/src/PostgREST/Types.hs index 928ebc7d7d..2a7c82b441 100644 --- a/src/PostgREST/Types.hs +++ b/src/PostgREST/Types.hs @@ -144,6 +144,9 @@ data RelationType = Child | Parent | Many | Root deriving (Show, Eq) The name 'Relation' here is used with the meaning "What is the relation between the current node and the parent node". It has nothing to do with PostgreSQL referring to tables/views as relations. + The order of the relColumns and relFColumns should be maintained to get + the join conditions right. + TODO merge relColumns and relFColumns to a tuple or Data.Bimap -} data Relation = Relation { relTable :: Table @@ -317,7 +320,23 @@ toMime (CTOther ct) = ct data PgVersion = PgVersion { pgvNum :: Int32 , pgvName :: Text -} deriving (Eq, Ord, Show) +} deriving (Eq, Show) + +instance Ord PgVersion where + (PgVersion v1 _) `compare` (PgVersion v2 _) = v1 `compare` v2 + +-- | Tells the minimum PostgreSQL version required by this version of PostgREST +minimumPgVersion :: PgVersion +minimumPgVersion = PgVersion 90400 "9.4" + +pgVersion95 :: PgVersion +pgVersion95 = PgVersion 90500 "9.5" + +pgVersion96 :: PgVersion +pgVersion96 = PgVersion 90600 "9.6" + +pgVersion100 :: PgVersion +pgVersion100 = PgVersion 100000 "10" sourceCTEName :: SqlFragment sourceCTEName = "pg_source" diff --git a/stack.yaml b/stack.yaml index 9a2c218d8d..dbfc05744c 100644 --- a/stack.yaml +++ b/stack.yaml @@ -6,10 +6,12 @@ extra-deps: - hjsonschema-1.5.0.1 - Ranged-sets-0.3.0 - protolude-0.2.2 - - hasql-1.1 - - hasql-pool-0.4.3 - - hasql-transaction-0.5.2 + - hasql-1.3 + - hasql-pool-0.5 + - hasql-transaction-0.7 + - text-builder-0.5.1.1 - jose-0.7.0.0 + - postgresql-libpq-0.9.4.1 ghc-options: postgrest: -O2 -Werror -Wall -fwarn-identities -fno-warn-redundant-constraints nix: diff --git a/test/Feature/AndOrParamsSpec.hs b/test/Feature/AndOrParamsSpec.hs index 8869eb7b23..c3092942ef 100644 --- a/test/Feature/AndOrParamsSpec.hs +++ b/test/Feature/AndOrParamsSpec.hs @@ -167,7 +167,7 @@ spec = context "used with POST" $ it "includes related data with filters" $ - request methodPost "/child_entities?entities.or=(id.eq.2,id.eq.3)&select=id,entities(id)" + request methodPost "/child_entities?select=id,entities(id)&entities.or=(id.eq.2,id.eq.3)&entities.order=id" [("Prefer", "return=representation")] [json|[{"id":4,"name":"entity 4","parent_id":1}, {"id":5,"name":"entity 5","parent_id":2}, diff --git a/test/Feature/CorsSpec.hs b/test/Feature/CorsSpec.hs index 2efcd96732..f8e3cd2438 100644 --- a/test/Feature/CorsSpec.hs +++ b/test/Feature/CorsSpec.hs @@ -45,7 +45,7 @@ spec = "true" respHeaders `shouldSatisfy` matchHeader "Access-Control-Allow-Methods" - "GET, POST, PATCH, DELETE, OPTIONS, HEAD" + "GET, POST, PATCH, PUT, DELETE, OPTIONS, HEAD" respHeaders `shouldSatisfy` matchHeader "Access-Control-Allow-Headers" "Authentication, Foo, Bar, Accept, Accept-Language, Content-Language" diff --git a/test/Feature/QuerySpec.hs b/test/Feature/QuerySpec.hs index 9bf7cbc679..ed50b200bb 100644 --- a/test/Feature/QuerySpec.hs +++ b/test/Feature/QuerySpec.hs @@ -223,13 +223,12 @@ spec = do , matchHeaders = [] } - it "requesting parents and children" $ get "/projects?id=eq.1&select=id, name, clients(*), tasks(id, name)" `shouldRespondWith` [json|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|] { matchHeaders = [matchContentTypeJson] } - it "requesting parent without specifying primary key" $ do + it "requesting parent without specifying primary key" $ get "/projects?select=name,client(name)" `shouldRespondWith` [json|[ {"name":"Windows 7","client":{"name": "Microsoft"}}, @@ -239,9 +238,6 @@ spec = do {"name":"Orphan","client":null} ]|] { matchHeaders = [matchContentTypeJson] } - get "/articleStars?select=createdAt,article(owner),user(name)&limit=1" `shouldRespondWith` - [json|[{"createdAt":"2015-12-08T04:22:57.472738","article":{"owner": "postgrest_test_authenticator"},"user":{"name": "Angela Martin"}}]|] - { matchHeaders = [matchContentTypeJson] } it "requesting parent and renaming primary key" $ get "/projects?select=name,client(clientId:id,name)" `shouldRespondWith` @@ -313,40 +309,11 @@ spec = do [json|[{"id":1,"tasks":[{"id":1},{"id":2},{"id":3},{"id":4}]},{"id":2,"tasks":[{"id":5},{"id":6},{"id":7}]},{"id":3,"tasks":[{"id":1},{"id":5}]}]|] { matchHeaders = [matchContentTypeJson] } - it "requesting parents and children on views" $ - get "/projects_view?id=eq.1&select=id, name, clients(*), tasks(id, name)" `shouldRespondWith` - [json|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|] - { matchHeaders = [matchContentTypeJson] } - - it "requesting parents and children on views with renamed keys" $ - get "/projects_view_alt?t_id=eq.1&select=t_id, name, clients(*), tasks(id, name)" `shouldRespondWith` - [json|[{"t_id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|] - { matchHeaders = [matchContentTypeJson] } - - it "detects parent relations when having many views of a private table" $ do - get "/books?select=title,author(name)&id=eq.5" `shouldRespondWith` - [json|[ { "title": "Farenheit 451", "author": { "name": "Ray Bradbury" } } ]|] - { matchHeaders = [matchContentTypeJson] } - get "/forties_books?select=title,author(name)&limit=1" `shouldRespondWith` - [json|[ { "title": "1984", "author": { "name": "George Orwell" } } ]|] - { matchHeaders = [matchContentTypeJson] } - get "/fifties_books?select=title,author(name)&limit=1" `shouldRespondWith` - [json|[ { "title": "The Catcher in the Rye", "author": { "name": "J.D. Salinger" } } ]|] - { matchHeaders = [matchContentTypeJson] } - get "/sixties_books?select=title,author(name)&limit=1" `shouldRespondWith` - [json|[ { "title": "To Kill a Mockingbird", "author": { "name": "Harper Lee" } } ]|] - { matchHeaders = [matchContentTypeJson] } - it "requesting children with composite key" $ get "/users_tasks?user_id=eq.2&task_id=eq.6&select=*, comments(content)" `shouldRespondWith` [json|[{"user_id":2,"task_id":6,"comments":[{"content":"Needs to be delivered ASAP"}]}]|] { matchHeaders = [matchContentTypeJson] } - it "detect relations in views from exposed schema that are based on tables in private schema and have columns renames" $ - get "/articles?id=eq.1&select=id,articleStars(users(*))" `shouldRespondWith` - [json|[{"id":1,"articleStars":[{"users":{"id":1,"name":"Angela Martin"}},{"users":{"id":2,"name":"Michael Scott"}},{"users":{"id":3,"name":"Dwight Schrute"}}]}]|] - { matchHeaders = [matchContentTypeJson] } - it "can embed by FK column name" $ get "/projects?id=in.(1,3)&select=id,name,client_id(id,name)" `shouldRespondWith` [json|[{"id":1,"name":"Windows 7","client_id":{"id":1,"name":"Microsoft"}},{"id":3,"name":"IOS","client_id":{"id":2,"name":"Apple"}}]|] @@ -362,8 +329,101 @@ spec = do [json|[{"id":1,"name":"Windows 7","client_id":1,"client":{"id":1,"name":"Microsoft"}},{"id":3,"name":"IOS","client_id":2,"client":{"id":2,"name":"Apple"}}]|] { matchHeaders = [matchContentTypeJson] } - it "can detect fk relations through views to tables in the public schema" $ - get "/consumers_view?select=*,orders_view(*)" `shouldRespondWith` 200 + describe "view embedding" $ do + it "can detect fk relations through views to tables in the public schema" $ + get "/consumers_view?select=*,orders_view(*)" `shouldRespondWith` 200 + + it "can request parent without specifying primary key" $ + get "/articleStars?select=createdAt,article(owner),user(name)&limit=1" `shouldRespondWith` + [json|[{"createdAt":"2015-12-08T04:22:57.472738","article":{"owner": "postgrest_test_authenticator"},"user":{"name": "Angela Martin"}}]|] + { matchHeaders = [matchContentTypeJson] } + + it "can detect relations in views from exposed schema that are based on tables in private schema and have columns renames" $ + get "/articles?id=eq.1&select=id,articleStars(users(*))" `shouldRespondWith` + [json|[{"id":1,"articleStars":[{"users":{"id":1,"name":"Angela Martin"}},{"users":{"id":2,"name":"Michael Scott"}},{"users":{"id":3,"name":"Dwight Schrute"}}]}]|] + { matchHeaders = [matchContentTypeJson] } + + it "works when requesting parents and children on views" $ + get "/projects_view?id=eq.1&select=id, name, clients(*), tasks(id, name)" `shouldRespondWith` + [json|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|] + { matchHeaders = [matchContentTypeJson] } + + it "works when requesting parents and children on views with renamed keys" $ + get "/projects_view_alt?t_id=eq.1&select=t_id, name, clients(*), tasks(id, name)" `shouldRespondWith` + [json|[{"t_id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|] + { matchHeaders = [matchContentTypeJson] } + + it "detects parent relations when having many views of a private table" $ do + get "/books?select=title,author(name)&id=eq.5" `shouldRespondWith` + [json|[ { "title": "Farenheit 451", "author": { "name": "Ray Bradbury" } } ]|] + { matchHeaders = [matchContentTypeJson] } + get "/forties_books?select=title,author(name)&limit=1" `shouldRespondWith` + [json|[ { "title": "1984", "author": { "name": "George Orwell" } } ]|] + { matchHeaders = [matchContentTypeJson] } + get "/fifties_books?select=title,author(name)&limit=1" `shouldRespondWith` + [json|[ { "title": "The Catcher in the Rye", "author": { "name": "J.D. Salinger" } } ]|] + { matchHeaders = [matchContentTypeJson] } + get "/sixties_books?select=title,author(name)&limit=1" `shouldRespondWith` + [json|[ { "title": "To Kill a Mockingbird", "author": { "name": "Harper Lee" } } ]|] + { matchHeaders = [matchContentTypeJson] } + + it "works with views that have subselects" $ + get "/authors_books_number?select=*,books(title)&id=eq.1" `shouldRespondWith` + [json|[ {"id":1, "name":"George Orwell","num_in_forties":1,"num_in_fifties":0,"num_in_sixties":0,"num_in_all_decades":1, + "books":[{"title":"1984"}]} ]|] + { matchHeaders = [matchContentTypeJson] } + + it "works with views that have case subselects" $ + get "/authors_have_book_in_decade?select=*,books(title)&id=eq.3" `shouldRespondWith` + [json|[ {"id":3,"name":"Antoine de Saint-Exupéry","has_book_in_forties":true,"has_book_in_fifties":false,"has_book_in_sixties":false, + "books":[{"title":"The Little Prince"}]} ]|] + { matchHeaders = [matchContentTypeJson] } + + it "works with views that have subselect in the FROM clause" $ + get "/forties_and_fifties_books?select=title,first_publisher,author:authors(name)&id=eq.1" `shouldRespondWith` + [json|[{"title":"1984","first_publisher":"Secker & Warburg","author":{"name":"George Orwell"}}]|] + { matchHeaders = [matchContentTypeJson] } + + it "works with views that have CTE" $ + get "/odd_years_publications?select=title,publication_year,first_publisher,author:authors(name)&id=in.(1,2,3)" `shouldRespondWith` + [json|[ + {"title":"1984","publication_year":1949,"first_publisher":"Secker & Warburg","author":{"name":"George Orwell"}}, + {"title":"The Diary of a Young Girl","publication_year":1947,"first_publisher":"Contact Publishing","author":{"name":"Anne Frank"}}, + {"title":"The Little Prince","publication_year":1947,"first_publisher":"Reynal & Hitchcock","author":{"name":"Antoine de Saint-Exupéry"}} ]|] + { matchHeaders = [matchContentTypeJson] } + + it "works when having a capitalized table name and camelCase fk column" $ + get "/foos?select=*,bars(*)" `shouldRespondWith` 200 + + it "works when embedding a view with a table that has a long compound pk" $ do + get "/player_view?select=id,contract(purchase_price)&id=in.(1,3,5,7)" `shouldRespondWith` + [json| + [{"id":1,"contract":[{"purchase_price":10}]}, + {"id":3,"contract":[{"purchase_price":30}]}, + {"id":5,"contract":[{"purchase_price":50}]}, + {"id":7,"contract":[]}] |] + { matchHeaders = [matchContentTypeJson] } + get "/contract?select=tournament,player_view(first_name)&limit=3" `shouldRespondWith` + [json| + [{"tournament":"tournament_1","player_view":{"first_name":"first_name_1"}}, + {"tournament":"tournament_2","player_view":{"first_name":"first_name_2"}}, + {"tournament":"tournament_3","player_view":{"first_name":"first_name_3"}}] |] + { matchHeaders = [matchContentTypeJson] } + + it "works when embedding a view with a view that referes to a table that has a long compound pk" $ do + get "/player_view?select=id,contract_view(purchase_price)&id=in.(1,3,5,7)" `shouldRespondWith` + [json| + [{"id":1,"contract_view":[{"purchase_price":10}]}, + {"id":3,"contract_view":[{"purchase_price":30}]}, + {"id":5,"contract_view":[{"purchase_price":50}]}, + {"id":7,"contract_view":[]}] |] + { matchHeaders = [matchContentTypeJson] } + get "/contract_view?select=tournament,player_view(first_name)&limit=3" `shouldRespondWith` + [json| + [{"tournament":"tournament_1","player_view":{"first_name":"first_name_1"}}, + {"tournament":"tournament_2","player_view":{"first_name":"first_name_2"}}, + {"tournament":"tournament_3","player_view":{"first_name":"first_name_3"}}] |] + { matchHeaders = [matchContentTypeJson] } describe "path fixed" $ do it "works when requesting children 2 levels" $ @@ -371,13 +431,7 @@ spec = do [json|[{"id":1,"projects":[{"id":1,"tasks":[{"id":1},{"id":2}]},{"id":2,"tasks":[{"id":3},{"id":4}]}]}]|] { matchHeaders = [matchContentTypeJson] } - it "works with parent relation" $ do - get "/message?select=id,body,sender:person_detail.sender(name,sent),recipient:person_detail.recipient(name,received)&id=lt.4" `shouldRespondWith` - [json| - [{"id":1,"body":"Hello Jane","sender":{"name":"John","sent":2},"recipient":{"name":"Jane","received":2}}, - {"id":2,"body":"Hi John","sender":{"name":"Jane","sent":1},"recipient":{"name":"John","received":1}}, - {"id":3,"body":"How are you doing?","sender":{"name":"John","sent":2},"recipient":{"name":"Jane","received":2}}] |] - { matchHeaders = [matchContentTypeJson] } + it "works with parent relation" $ get "/message?select=id,body,sender:person.sender(name),recipient:person.recipient(name)&id=lt.4" `shouldRespondWith` [json| [{"id":1,"body":"Hello Jane","sender":{"name":"John"},"recipient":{"name":"Jane"}}, @@ -385,6 +439,14 @@ spec = do {"id":3,"body":"How are you doing?","sender":{"name":"John"},"recipient":{"name":"Jane"}}] |] { matchHeaders = [matchContentTypeJson] } + it "works with a parent view relation" $ + get "/message?select=id,body,sender:person_detail.sender(name,sent),recipient:person_detail.recipient(name,received)&id=lt.4" `shouldRespondWith` + [json| + [{"id":1,"body":"Hello Jane","sender":{"name":"John","sent":2},"recipient":{"name":"Jane","received":2}}, + {"id":2,"body":"Hi John","sender":{"name":"Jane","sent":1},"recipient":{"name":"John","received":1}}, + {"id":3,"body":"How are you doing?","sender":{"name":"John","sent":2},"recipient":{"name":"Jane","received":2}}] |] + { matchHeaders = [matchContentTypeJson] } + it "works with many<->many relation" $ get "/tasks?select=id,users:users.users_tasks(id)" `shouldRespondWith` [json|[{"id":1,"users":[{"id":1},{"id":3}]},{"id":2,"users":[{"id":1}]},{"id":3,"users":[{"id":1}]},{"id":4,"users":[{"id":1}]},{"id":5,"users":[{"id":2},{"id":3}]},{"id":6,"users":[{"id":2}]},{"id":7,"users":[{"id":2}]},{"id":8,"users":[]}]|] diff --git a/test/Feature/RpcSpec.hs b/test/Feature/RpcSpec.hs index d808115ee8..3e81f220fa 100644 --- a/test/Feature/RpcSpec.hs +++ b/test/Feature/RpcSpec.hs @@ -78,6 +78,16 @@ spec = get "/rpc/sayhello" `shouldRespondWith` 404 get "/rpc/sayhello?any_arg=value" `shouldRespondWith` 404 + it "works when having uppercase identifiers" $ do + get "/rpc/quotedFunction?user=mscott&fullName=Michael Scott&SSN=401-32-XXXX" `shouldRespondWith` + [json|{"user": "mscott", "fullName": "Michael Scott", "SSN": "401-32-XXXX"}|] + { matchHeaders = [matchContentTypeJson] } + post "/rpc/quotedFunction" + [json|{"user": "dschrute", "fullName": "Dwight Schrute", "SSN": "030-18-XXXX"}|] + `shouldRespondWith` + [json|{"user": "dschrute", "fullName": "Dwight Schrute", "SSN": "030-18-XXXX"}|] + { matchHeaders = [matchContentTypeJson] } + context "shaping the response returned by a proc" $ do it "returns a project" $ do post "/rpc/getproject" [json| { "id": 1} |] `shouldRespondWith` diff --git a/test/Feature/StructureSpec.hs b/test/Feature/StructureSpec.hs index a8c472148e..0427b0f5ad 100644 --- a/test/Feature/StructureSpec.hs +++ b/test/Feature/StructureSpec.hs @@ -85,6 +85,27 @@ spec = do deleteResponse `shouldBe` Just "No Content" + it "includes an array type for GET responses" $ do + r <- simpleBody <$> get "/" + + let childGetSchema = r ^? key "paths" + . key "/child_entities" + . key "get" + . key "responses" + . key "200" + . key "schema" + + liftIO $ + childGetSchema `shouldBe` Just + [aesonQQ| + { + "items": { + "$ref": "#/definitions/child_entities" + }, + "type": "array" + } + |] + it "includes definitions to tables" $ do r <- simpleBody <$> get "/" @@ -113,7 +134,10 @@ spec = do "format": "integer", "type": "integer" } - } + }, + "required": [ + "id" + ] } |] @@ -163,15 +187,54 @@ spec = do ] |] + describe "Materialized view" $ + + it "includes materialized view properties" $ do + r <- simpleBody <$> get "/" + + let method s = key "paths" . key "/materialized_projects" . key s + summary = r ^? method "get" . key "summary" + description = r ^? method "get" . key "description" + parameters = r ^? method "get" . key "parameters" + + liftIO $ do + + summary `shouldBe` Just "A materialized view for projects" + + description `shouldBe` Just "Just a test for materialized views" + + parameters `shouldBe` Just + [aesonQQ| + [ + { "$ref": "#/parameters/rowFilter.materialized_projects.id" }, + { "$ref": "#/parameters/rowFilter.materialized_projects.name" }, + { "$ref": "#/parameters/rowFilter.materialized_projects.client_id" }, + { "$ref": "#/parameters/select" }, + { "$ref": "#/parameters/order" }, + { "$ref": "#/parameters/range" }, + { "$ref": "#/parameters/rangeUnit" }, + { "$ref": "#/parameters/offset" }, + { "$ref": "#/parameters/limit" }, + { "$ref": "#/parameters/preferCount" } + ] + |] + describe "RPC" $ do - it "includes body schema for arguments" $ do + it "includes function summary/description and body schema for arguments" $ do r <- simpleBody <$> get "/" - let args = r ^? key "paths" . key "/rpc/varied_arguments" - . key "post" . key "parameters" - . nth 0 . key "schema" - liftIO $ + let method s = key "paths" . key "/rpc/varied_arguments" . key s + args = r ^? method "post" . key "parameters" . nth 0 . key "schema" + summary = r ^? method "post" . key "summary" + description = r ^? method "post" . key "description" + + liftIO $ do + + summary `shouldBe` Just "An RPC function" + + description `shouldBe` Just "Just a test for RPC function arguments" + args `shouldBe` Just [aesonQQ| { @@ -213,7 +276,8 @@ spec = do "type": "integer" } }, - "type": "object" + "type": "object", + "description": "An RPC function\n\nJust a test for RPC function arguments" } |] diff --git a/test/Main.hs b/test/Main.hs index d52dd683cd..10cef820c6 100644 --- a/test/Main.hs +++ b/test/Main.hs @@ -6,9 +6,8 @@ import SpecHelper import qualified Hasql.Pool as P import PostgREST.App (postgrest) -import PostgREST.Config (pgVersion95, pgVersion96, configSettings) -import PostgREST.DbStructure (getDbStructure, getPgVersion, fillSessionWithSettings) -import PostgREST.Types (DbStructure(..)) +import PostgREST.DbStructure (getDbStructure, getPgVersion) +import PostgREST.Types (DbStructure(..), pgVersion95, pgVersion96) import Control.AutoUpdate (defaultUpdateSettings, mkAutoUpdate, updateAction) import Data.Function (id) import Data.IORef @@ -66,7 +65,7 @@ main = do nonexistentSchemaApp = return $ postgrest (testNonexistentSchemaCfg testDbConn) refDbStructure pool getTime $ pure () let reset :: IO () - reset = P.use pool (fillSessionWithSettings (configSettings $ testCfg testDbConn)) >> resetDb testDbConn + reset = resetDb testDbConn actualPgVersion = pgVersion dbStructure extraSpecs = diff --git a/test/fixtures/data.sql b/test/fixtures/data.sql index b20451440c..db44ffd4e7 100644 --- a/test/fixtures/data.sql +++ b/test/fixtures/data.sql @@ -250,7 +250,7 @@ INSERT INTO tsearch VALUES (to_tsvector('It''s kind of fun to do the impossible' INSERT INTO tsearch VALUES (to_tsvector('But also fun to do what is possible')); INSERT INTO tsearch VALUES (to_tsvector('Fat cats ate rats')); INSERT INTO tsearch VALUES (to_tsvector('french', 'C''est un peu amusant de faire l''impossible')); -INSERT INTO tsearch VALUES (to_tsvector('german', 'Es ist eine Art Spaß, das Unmögliche zu machen')); +INSERT INTO tsearch VALUES (to_tsvector('german', 'Es ist eine Art Spaß, das Unmögliche zu machen')); -- -- Data for Name: users_projects; Type: TABLE DATA; Schema: test; Owner: - @@ -373,16 +373,27 @@ INSERT INTO authors VALUES (7, 'Harper Lee'); INSERT INTO authors VALUES (8, 'Kurt Vonnegut'); INSERT INTO authors VALUES (9, 'Ken Kesey'); +TRUNCATE TABLE publishers CASCADE; +INSERT INTO publishers VALUES (1, 'Secker & Warburg'); +INSERT INTO publishers VALUES (2, 'Contact Publishing'); +INSERT INTO publishers VALUES (3, 'Reynal & Hitchcock'); +INSERT INTO publishers VALUES (4, 'Little, Brown and Company'); +INSERT INTO publishers VALUES (5, 'Ballantine Books'); +INSERT INTO publishers VALUES (6, 'Faber and Faber'); +INSERT INTO publishers VALUES (7, 'J. B. Lippincott & Co.'); +INSERT INTO publishers VALUES (8, 'Delacorte'); +INSERT INTO publishers VALUES (9, 'Viking Press & Signet Books'); + TRUNCATE TABLE books CASCADE; -INSERT INTO books VALUES (1, '1984', 1949, 1); -INSERT INTO books VALUES (2, 'The Diary of a Young Girl', 1947, 2); -INSERT INTO books VALUES (3, 'The Little Prince', 1947, 3); -INSERT INTO books VALUES (4, 'The Catcher in the Rye', 1951, 4); -INSERT INTO books VALUES (5, 'Farenheit 451', 1953, 5); -INSERT INTO books VALUES (6, 'Lord of the Flies', 1954, 6); -INSERT INTO books VALUES (7, 'To Kill a Mockingbird', 1960, 7); -INSERT INTO books VALUES (8, 'Slaughterhouse-Five', 1969, 8); -INSERT INTO books VALUES (9, 'One Flew Over the Cuckoo''s Nest', 1962, 9); +INSERT INTO books VALUES (1, '1984', 1949, 1, 1); +INSERT INTO books VALUES (2, 'The Diary of a Young Girl', 1947, 2, 2); +INSERT INTO books VALUES (3, 'The Little Prince', 1947, 3, 3); +INSERT INTO books VALUES (4, 'The Catcher in the Rye', 1951, 4, 4); +INSERT INTO books VALUES (5, 'Farenheit 451', 1953, 5, 5); +INSERT INTO books VALUES (6, 'Lord of the Flies', 1954, 6, 6); +INSERT INTO books VALUES (7, 'To Kill a Mockingbird', 1960, 7, 7); +INSERT INTO books VALUES (8, 'Slaughterhouse-Five', 1969, 8, 8); +INSERT INTO books VALUES (9, 'One Flew Over the Cuckoo''s Nest', 1962, 9, 9); SET search_path = test, pg_catalog; @@ -431,3 +442,24 @@ INSERT INTO jsonb_test VALUES (1, '{ "a": {"b": 2} }'); INSERT INTO jsonb_test VALUES (2, '{ "c": [1,2,3] }'); INSERT INTO jsonb_test VALUES (3, '[{ "d": "test" }]'); INSERT INTO jsonb_test VALUES (4, '{ "e": 1 }'); + +TRUNCATE TABLE private.player CASCADE; +INSERT into private.player +SELECT + generate_series, + 'first_name_' || generate_series, + 'last_name_' || generate_series, + '2018-10-11' +FROM generate_series(1, 12); + +TRUNCATE TABLE contract CASCADE; +insert into contract +select + 'tournament_' || generate_series, + tsrange(now()::timestamp, null), + 10*generate_series, + generate_series, + 'first_name_' || generate_series, + 'last_name_' || generate_series, + '2018-10-11' +from generate_series(1, 6); diff --git a/test/fixtures/privileges.sql b/test/fixtures/privileges.sql index f8749990d5..d2cd5d3cd7 100644 --- a/test/fixtures/privileges.sql +++ b/test/fixtures/privileges.sql @@ -82,6 +82,16 @@ GRANT ALL ON TABLE , "UnitTest" , json_arr , jsonb_test + , authors_books_number + , authors_have_book_in_decade + , forties_and_fifties_books + , odd_years_publications + , foos + , bars + , materialized_projects + , contract + , player_view + , contract_view TO postgrest_test_anonymous; GRANT INSERT ON TABLE insertonly TO postgrest_test_anonymous; diff --git a/test/fixtures/schema.sql b/test/fixtures/schema.sql index e2a12c4406..9f2e6159b7 100755 --- a/test/fixtures/schema.sql +++ b/test/fixtures/schema.sql @@ -222,6 +222,10 @@ AS $_$ SELECT 'Hi'::text; $_$; +COMMENT ON FUNCTION varied_arguments(double precision, character varying, boolean, date, money, enum_menagerie_type, integer) IS +$_$An RPC function + +Just a test for RPC function arguments$_$; -- -- Name: jwt_test(); Type: FUNCTION; Schema: test; Owner: - @@ -1120,17 +1124,17 @@ create view images_base64 as ( select name, replace(encode(img, 'base64'), E'\n', '') as img from images ); -create function test.ret_enum(val text) returns test.enum_menagerie_type as $$ +create function test.ret_enum(val text) returns test.enum_menagerie_type as $$ select val::test.enum_menagerie_type; $$ language sql; create domain one_nine as integer check (value >= 1 and value <= 9); -create function test.ret_array() returns integer[] as $$ +create function test.ret_array() returns integer[] as $$ select '{1,2,3}'::integer[]; $$ language sql; -create function test.ret_domain(val integer) returns test.one_nine as $$ +create function test.ret_domain(val integer) returns test.one_nine as $$ select val::test.one_nine; $$ language sql; @@ -1144,20 +1148,20 @@ $$ language sql; create function test.ret_scalars() returns table( a text, b test.enum_menagerie_type, c test.one_nine, d int4range -) as $$ - select row('scalars'::text, enum_first(null::test.enum_menagerie_type), +) as $$ + select row('scalars'::text, enum_first(null::test.enum_menagerie_type), 1::test.one_nine, int4range(10, 20)); $$ language sql; create type test.point_2d as (x integer, y integer); -create function test.ret_point_2d() returns test.point_2d as $$ +create function test.ret_point_2d() returns test.point_2d as $$ select row(10, 5)::test.point_2d; $$ language sql; create type private.point_3d as (x integer, y integer, z integer); -create function test.ret_point_3d() returns private.point_3d as $$ +create function test.ret_point_3d() returns private.point_3d as $$ select row(7, -3, 4)::private.point_3d; $$ language sql; @@ -1171,17 +1175,17 @@ create function test.ret_rows_with_base64_bin() returns setof test.images_base64 select i.name, i.img from test.images_base64 i; $$ language sql; -create function test.single_article(id integer) returns test.articles as $$ +create function test.single_article(id integer) returns test.articles as $$ select a.* from test.articles a where a.id = $1; $$ language sql; -create function test.get_guc_value(name text) returns text as $$ +create function test.get_guc_value(name text) returns text as $$ select nullif(current_setting(name), '')::text; $$ language sql; create table w_or_wo_comma_names ( name text ); -create table items_with_different_col_types ( +create table items_with_different_col_types ( int_data integer, text_data text, bool_data bool, @@ -1194,20 +1198,20 @@ create table items_with_different_col_types ( -- Tables used for testing complex boolean logic with and/or query params -create table entities ( +create table entities ( id integer primary key, name text, arr integer[], text_search_vector tsvector ); -create table child_entities ( +create table child_entities ( id integer primary key, name text, parent_id integer references entities(id) ); -create table grandchild_entities ( +create table grandchild_entities ( id integer primary key, name text, parent_id integer references child_entities(id), @@ -1396,11 +1400,17 @@ create table private.authors( name text ); +create table private.publishers( + id integer primary key, + name text +); + create table private.books( id integer primary key, title text, publication_year smallint, - author_id integer references private.authors(id) + author_id integer references private.authors(id), + first_publisher_id integer references private.publishers(id) ); create view test.authors as select id, name from private.authors; @@ -1466,3 +1476,129 @@ create table jsonb_test( id integer primary key, data jsonb ); + +create view test.authors_books_number as +select + id, + name, + ( + select + count(*) + from forties_books where author_id = authors.id + ) as num_in_forties, + ( + select + count(*) + from fifties_books where author_id = authors.id + ) as num_in_fifties, + ( + select + count(*) + from sixties_books where author_id = authors.id + ) as num_in_sixties, + ( + select + count(*) + from ( + select id + from forties_books where author_id = authors.id + union + select id + from fifties_books where author_id = authors.id + union + select id + from sixties_books where author_id = authors.id + ) _ + ) as num_in_all_decades +from private.authors; + +create view test.authors_have_book_in_decade as +select + id, + name, + case + when (x.id in (select author_id from test.forties_books)) + then true + else false + end as has_book_in_forties, + case + when (x.id in (select author_id from test.fifties_books)) + then true + else false + end as has_book_in_fifties, + case + when (x.id in (select author_id from test.sixties_books)) + then true + else false + end as has_book_in_sixties +from private.authors x; + +create view test.forties_and_fifties_books as +select x.id, x.title, x.publication_year, y.name as first_publisher, x.author_id +from ( + select id, title, publication_year, author_id, first_publisher_id from private.books + where publication_year >= 1940 and publication_year < 1960) x +join private.publishers y on y.id = x.first_publisher_id; + +create view test.odd_years_publications as +with +odd_years_books as( + select id, title, publication_year, author_id, first_publisher_id + from private.books + where publication_year % 2 <> 0 +) +select + x.id, x.title, x.publication_year, + y.name as first_publisher, x.author_id +from odd_years_books x +join private.publishers y on y.id = x.first_publisher_id; + +CREATE TABLE test."Foo"( + id int primary key, + name text +); + +CREATE TABLE test.bar( + id int primary key, + name text, + "fooId" int references "Foo"(id) +); + +CREATE VIEW test.foos as select id,name from "Foo"; +CREATE VIEW test.bars as select id, "fooId", name from bar; + +create materialized view materialized_projects as +select id, name, client_id from projects; + +comment on materialized view materialized_projects is +$$A materialized view for projects + +Just a test for materialized views$$; + +create or replace function test."quotedFunction"("user" text, "fullName" text, "SSN" text) +returns jsonb AS $$ + select format('{"user": "%s", "fullName": "%s", "SSN": "%s"}', "user", "fullName", "SSN")::jsonb; +$$ language sql; + +create table private.player ( + id integer not null, + first_name text not null, + last_name text not null, + birth_date date, + primary key (last_name, id, first_name, birth_date) -- just for testing a long compound pk +); + +create table test.contract ( + tournament text not null, + time tsrange not null, + purchase_price int not null, + id integer not null, + first_name text not null, + last_name text not null, + birth_date date, + foreign key (last_name, id, first_name, birth_date) references private.player +); + +create view test.player_view as select * from private.player; + +create view test.contract_view as select * from test.contract; diff --git a/test/io-tests.sh b/test/io-tests.sh index 90bdd03958..ef5f7cac50 100755 --- a/test/io-tests.sh +++ b/test/io-tests.sh @@ -120,6 +120,49 @@ invalidRoleClaimKey(){ pgrStop } +# ensure iat claim is successful in the presence of pgrst time cache, see https://github.com/PostgREST/postgrest/issues/1139 +ensureIatClaimWorks(){ + pgrStart "./configs/simple.config" + while pgrStarted && test "$( rootStatus )" -ne 200 + do + # wait for the server to start + sleep 0.1 \ + || sleep 1 # fallback: subsecond sleep is not standard and may fail + done + for i in {1..10}; do \ + iatJwt=$(psql -qtAX postgrest_test -c "select jwt.sign(row_to_json(r), 'reallyreallyreallyreallyverysafe') from ( select 'postgrest_test_author' as role, extract(epoch from now()) as iat) r") + httpStatus="$( authorsStatus $iatJwt )" + if test "$httpStatus" -ne 200 + then + ko "iat claim rejected with $httpStatus" + return + fi + sleep .5;\ + done + ok "accepted iat claim" + pgrStop +} + +# ensure app settings don't reset on pool timeout of 10 seconds, see https://github.com/PostgREST/postgrest/issues/1141 +ensureAppSettings(){ + pgrStart "./configs/app-settings.config" + while pgrStarted && test "$( rootStatus )" -ne 200 + do + # wait for the server to start + sleep 0.1 \ + || sleep 1 # fallback: subsecond sleep is not standard and may fail + done + sleep 11 + response=$(curl -s "http://localhost:$pgrPort/rpc/get_guc_value?name=app.settings.external_api_secret") + if test "$response" = "\"0123456789abcdef\"" + then + ok "GET /rpc/get_guc_value response is $response" + else + ko "GET /rpc/get_guc_value response was $response" + fi + pgrStop +} + # PRE: curl must be available test -n "$(command -v curl)" || bailOut 'curl is not available' @@ -157,6 +200,9 @@ invalidRoleClaimKey '.#$%&$%/' invalidRoleClaimKey '' invalidRoleClaimKey 1234 +ensureIatClaimWorks +ensureAppSettings + cleanUp exit $failedTests diff --git a/test/io-tests/configs/app-settings.config b/test/io-tests/configs/app-settings.config new file mode 100644 index 0000000000..4d7ab96a37 --- /dev/null +++ b/test/io-tests/configs/app-settings.config @@ -0,0 +1,8 @@ +db-uri = "postgres:///postgrest_test" +db-schema = "test" +db-anon-role = "postgrest_test_anonymous" +db-pool = 1 +server-host = "127.0.0.1" +server-port = 49421 + +app.settings.external_api_secret = "0123456789abcdef" diff --git a/test/io-tests/configs/base64-secret-from-file.config b/test/io-tests/configs/base64-secret-from-file.config index 98e116b768..295d27cd47 100644 --- a/test/io-tests/configs/base64-secret-from-file.config +++ b/test/io-tests/configs/base64-secret-from-file.config @@ -2,7 +2,7 @@ db-uri = "postgres:///postgrest_test" db-schema = "test" db-anon-role = "postgrest_test_anonymous" db-pool = 1 -server-host = "*4" +server-host = "127.0.0.1" server-port = 49421 # Read secret from a file: /dev/stdin (alias for standard input) diff --git a/test/io-tests/configs/role-claim-key.config b/test/io-tests/configs/role-claim-key.config index 10ca1a00d7..e6fbee6699 100644 --- a/test/io-tests/configs/role-claim-key.config +++ b/test/io-tests/configs/role-claim-key.config @@ -2,7 +2,7 @@ db-uri = "postgres:///postgrest_test" db-schema = "test" db-anon-role = "postgrest_test_anonymous" db-pool = 1 -server-host = "*4" +server-host = "127.0.0.1" server-port = 49421 role-claim-key = "$(ROLE_CLAIM_KEY)" jwt-secret = "reallyreallyreallyreallyverysafe" diff --git a/test/io-tests/configs/secret-from-file.config b/test/io-tests/configs/secret-from-file.config index 26a71112f9..00f703e772 100644 --- a/test/io-tests/configs/secret-from-file.config +++ b/test/io-tests/configs/secret-from-file.config @@ -2,7 +2,7 @@ db-uri = "postgres:///postgrest_test" db-schema = "test" db-anon-role = "postgrest_test_anonymous" db-pool = 1 -server-host = "*4" +server-host = "127.0.0.1" server-port = 49421 # Read secret from a file: /dev/stdin (alias for standard input) diff --git a/test/io-tests/configs/simple.config b/test/io-tests/configs/simple.config new file mode 100644 index 0000000000..350cb7e713 --- /dev/null +++ b/test/io-tests/configs/simple.config @@ -0,0 +1,7 @@ +db-uri = "postgres:///postgrest_test" +db-schema = "test" +db-anon-role = "postgrest_test_anonymous" +db-pool = 1 +server-host = "127.0.0.1" +server-port = 49421 +jwt-secret = "reallyreallyreallyreallyverysafe" diff --git a/test/memory-tests.sh b/test/memory-tests.sh index 2a3ad82c69..37f944d8ab 100755 --- a/test/memory-tests.sh +++ b/test/memory-tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#! /usr/bin/env bash currentTest=1 failedTests=0 result(){ echo "$1 $currentTest $2"; currentTest=$(( $currentTest + 1 )); } @@ -96,20 +96,20 @@ setUp echo "Running memory usage tests.." -jsonKeyTest "1M" "POST" "/rpc/leak" "15M" -jsonKeyTest "1M" "POST" "/leak" "15M" -jsonKeyTest "1M" "PATCH" "/leak?id=eq.1" "15M" +jsonKeyTest "1M" "POST" "/rpc/leak" "20M" +jsonKeyTest "1M" "POST" "/leak" "20M" +jsonKeyTest "1M" "PATCH" "/leak?id=eq.1" "20M" jsonKeyTest "10M" "POST" "/rpc/leak" "105M" jsonKeyTest "10M" "POST" "/leak" "105M" jsonKeyTest "10M" "PATCH" "/leak?id=eq.1" "105M" -jsonKeyTest "100M" "POST" "/rpc/leak" "895M" -jsonKeyTest "100M" "POST" "/leak" "895M" -jsonKeyTest "100M" "PATCH" "/leak?id=eq.1" "895M" +jsonKeyTest "50M" "POST" "/rpc/leak" "500M" +jsonKeyTest "50M" "POST" "/leak" "500M" +jsonKeyTest "50M" "PATCH" "/leak?id=eq.1" "500M" postJsonArrayTest "1000" "/perf_articles" "20M" -postJsonArrayTest "10000" "/perf_articles" "120M" +postJsonArrayTest "10000" "/perf_articles" "150M" postJsonArrayTest "100000" "/perf_articles" "1.15G" cleanUp diff --git a/test/memory-tests/config b/test/memory-tests/config index 31ba9d368d..53979bdf2a 100644 --- a/test/memory-tests/config +++ b/test/memory-tests/config @@ -2,7 +2,7 @@ db-uri = "postgres:///postgrest_test" db-schema = "test" db-anon-role = "postgrest_test_anonymous" db-pool = 1 -server-host = "*4" +server-host = "127.0.0.1" server-port = 49421 jwt-secret = "reallyreallyreallyreallyverysafe"