diff --git a/CHANGELOG.md b/CHANGELOG.md index a26410c91b..92ec196c86 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ This project adheres to [Semantic Versioning](http://semver.org/). + `PGRST204`: Column is not found + `PGRST003`: Timed out when acquiring connection to db - #2667, Fix `db-pool-acquisition-timeout` not logging to stderr when the timeout is reached - @steve-chavez + - #1652, Fix function call with arguments not inlining - @steve-chavez ## [10.1.2] - 2023-02-01 diff --git a/src/PostgREST/Query/QueryBuilder.hs b/src/PostgREST/Query/QueryBuilder.hs index c23f2061d9..b6ec51d9bb 100644 --- a/src/PostgREST/Query/QueryBuilder.hs +++ b/src/PostgREST/Query/QueryBuilder.hs @@ -82,13 +82,12 @@ getSelectsJoins rr@(Node ReadPlan{select, relName, relToParent=Just rel, relAggA mutatePlanToQuery :: MutatePlan -> SQL.Snippet mutatePlanToQuery (Insert mainQi iCols body onConflct putConditions returnings _) = - "WITH " <> normalizedBody body <> " " <> "INSERT INTO " <> SQL.sql (fromQi mainQi) <> SQL.sql (if null iCols then " " else "(" <> cols <> ") ") <> - pgFmtSelectFromJson iCols <> + fromJsonBodyF body iCols True False <> -- Only used for PUT - (if null putConditions then mempty else "WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree (QualifiedIdentifier mempty "_") <$> putConditions)) <> + (if null putConditions then mempty else "WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree (QualifiedIdentifier mempty "pgrst_body") <$> putConditions)) <> SQL.sql (BS.unwords [ - maybe "" (\(oncDo, oncCols) -> + maybe mempty (\(oncDo, oncCols) -> if null oncCols then mempty else @@ -114,15 +113,14 @@ mutatePlanToQuery (Update mainQi uCols body logicForest range ordts returnings) SQL.sql $ "SELECT " <> emptyBodyReturnedColumns <> " FROM " <> fromQi mainQi <> " WHERE false" | range == allRange = - "WITH " <> normalizedBody body <> " " <> "UPDATE " <> mainTbl <> " SET " <> SQL.sql nonRangeCols <> " " <> - "FROM (" <> pgFmtSelectFromJson uCols <> ") AS _ " <> + fromJsonBodyF body uCols False False <> whereLogic <> " " <> SQL.sql (returningF mainQi returnings) | otherwise = - "WITH " <> normalizedBody body <> ", " <> - "pgrst_update_body AS (" <> pgFmtSelectFromJson uCols <> " LIMIT 1), " <> + "WITH " <> + "pgrst_update_body AS (" <> fromJsonBodyF body uCols True True <> "), " <> "pgrst_affected_rows AS (" <> "SELECT " <> SQL.sql rangeIdF <> " FROM " <> mainTbl <> whereLogic <> " " <> @@ -138,7 +136,7 @@ mutatePlanToQuery (Update mainQi uCols body logicForest range ordts returnings) whereLogic = if null logicForest then mempty else " WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree mainQi <$> logicForest) mainTbl = SQL.sql (fromQi mainQi) emptyBodyReturnedColumns = if null returnings then "NULL" else BS.intercalate ", " (pgFmtColumn (QualifiedIdentifier mempty $ qiName mainQi) <$> returnings) - nonRangeCols = BS.intercalate ", " (pgFmtIdent . tfName <> const " = _." <> pgFmtIdent . tfName <$> uCols) + nonRangeCols = BS.intercalate ", " (pgFmtIdent . tfName <> const " = " <> pgFmtColumn (QualifiedIdentifier mempty "pgrst_body") . tfName <$> uCols) rangeCols = BS.intercalate ", " ((\col -> pgFmtIdent (tfName col) <> " = (SELECT " <> pgFmtIdent (tfName col) <> " FROM pgrst_update_body) ") <$> uCols) (whereRangeIdF, rangeIdF) = mutRangeF mainQi (fst . otTerm <$> ordts) @@ -167,50 +165,26 @@ mutatePlanToQuery (Delete mainQi logicForest range ordts returnings) callPlanToQuery :: CallPlan -> SQL.Snippet callPlanToQuery (FunctionCall qi params args returnsScalar multipleCall returnings) = - prmsCTE <> argsBody + "SELECT " <> (if returnsScalar then "pgrst_call AS pgrst_scalar " else returnedColumns) <> " " <> + fromCall where - (prmsCTE, argFrag) = case params of - OnePosParam prm -> ("WITH pgrst_args AS (SELECT NULL)", singleParameter args (encodeUtf8 $ ppType prm)) - KeyParams [] -> (mempty, mempty) - KeyParams prms -> ( - "WITH " <> normalizedBody args <> ", " <> - SQL.sql ( - BS.unwords [ - "pgrst_args AS (", - "SELECT * FROM json_to_recordset(" <> selectBody <> ") AS _(" <> fmtParams prms (const mempty) (\a -> " " <> encodeUtf8 (ppType a)) <> ")", - ")"]) - , SQL.sql $ if multipleCall - then fmtParams prms varadicPrefix (\a -> " := pgrst_args." <> pgFmtIdent (ppName a)) - else fmtParams prms varadicPrefix (\a -> " := (SELECT " <> pgFmtIdent (ppName a) <> " FROM pgrst_args LIMIT 1)") - ) - - fmtParams :: [ProcParam] -> (ProcParam -> SqlFragment) -> (ProcParam -> SqlFragment) -> SqlFragment - fmtParams prms prmFragPre prmFragSuf = BS.intercalate ", " - ((\a -> prmFragPre a <> pgFmtIdent (ppName a) <> prmFragSuf a) <$> prms) + fromCall = case params of + OnePosParam prm -> "FROM " <> callIt (singleParameter args $ encodeUtf8 $ ppType prm) + KeyParams [] -> "FROM " <> callIt mempty + KeyParams prms -> fromJsonBodyF args ((\p -> TypedField (ppName p) (ppType p)) <$> prms) False (not multipleCall) <> ", " <> + "LATERAL " <> callIt (fmtParams prms) - varadicPrefix :: ProcParam -> SqlFragment - varadicPrefix a = if ppVar a then "VARIADIC " else mempty + callIt :: SQL.Snippet -> SQL.Snippet + callIt argument = SQL.sql (fromQi qi) <> "(" <> argument <> ") pgrst_call" - argsBody :: SQL.Snippet - argsBody - | multipleCall = - if returnsScalar - then "SELECT " <> callIt <> " AS pgrst_scalar FROM pgrst_args" - else "SELECT pgrst_lat_args.* FROM pgrst_args, " <> - "LATERAL ( SELECT " <> returnedColumns <> " FROM " <> callIt <> " ) pgrst_lat_args" - | otherwise = - if returnsScalar - then "SELECT " <> callIt <> " AS pgrst_scalar" - else "SELECT " <> returnedColumns <> " FROM " <> callIt - - callIt :: SQL.Snippet - callIt = SQL.sql (fromQi qi) <> "(" <> argFrag <> ")" + fmtParams :: [ProcParam] -> SQL.Snippet + fmtParams prms = SQL.sql $ BS.intercalate ", " + ((\a -> (if ppVar a then "VARIADIC " else mempty) <> pgFmtIdent (ppName a) <> " := pgrst_body." <> pgFmtIdent (ppName a)) <$> prms) returnedColumns :: SQL.Snippet returnedColumns | null returnings = "*" - | otherwise = SQL.sql $ BS.intercalate ", " (pgFmtColumn (QualifiedIdentifier mempty $ qiName qi) <$> returnings) - + | otherwise = SQL.sql $ BS.intercalate ", " (pgFmtColumn (QualifiedIdentifier mempty "pgrst_call") <$> returnings) -- | SQL query meant for COUNTing the root node of the Tree. -- It only takes WHERE into account and doesn't include LIMIT/OFFSET because it would reduce the COUNT. diff --git a/src/PostgREST/Query/SqlFragment.hs b/src/PostgREST/Query/SqlFragment.hs index 4548911a5a..704350be9d 100644 --- a/src/PostgREST/Query/SqlFragment.hs +++ b/src/PostgREST/Query/SqlFragment.hs @@ -21,7 +21,6 @@ module PostgREST.Query.SqlFragment , limitOffsetF , locationF , mutRangeF - , normalizedBody , orderF , pgFmtColumn , pgFmtIdent @@ -30,11 +29,10 @@ module PostgREST.Query.SqlFragment , pgFmtLogicTree , pgFmtOrderTerm , pgFmtSelectItem - , pgFmtSelectFromJson + , fromJsonBodyF , responseHeadersF , responseStatusF , returningF - , selectBody , singleParameter , sourceCTEName , unknownEncoder @@ -121,25 +119,6 @@ ftsOperator = \case FilterFtsPhrase -> "@@ phraseto_tsquery" FilterFtsWebsearch -> "@@ websearch_to_tsquery" --- | --- These CTEs convert a json object into a json array, this way we can use json_to_recordset for all json payloads --- Otherwise we'd have to use json_to_record for json objects and json_to_recordset for json arrays --- We do this in SQL to avoid processing the JSON in application code --- TODO: At this stage there shouldn't be a Maybe since ApiRequest should ensure that an INSERT/UPDATE has a body -normalizedBody :: Maybe LBS.ByteString -> SQL.Snippet -normalizedBody body = - "pgrst_payload AS (SELECT " <> jsonPlaceHolder <> " AS json_data), " <> - SQL.sql (BS.unwords [ - "pgrst_body AS (", - "SELECT", - "CASE WHEN json_typeof(json_data) = 'array'", - "THEN json_data", - "ELSE json_build_array(json_data)", - "END AS val", - "FROM pgrst_payload)"]) - where - jsonPlaceHolder = SQL.encoderAndParam (HE.nullable HE.jsonLazyBytes) body - singleParameter :: Maybe LBS.ByteString -> ByteString -> SQL.Snippet singleParameter body typ = if typ == "bytea" @@ -147,9 +126,6 @@ singleParameter body typ = then SQL.encoderAndParam (HE.nullable HE.bytea) (LBS.toStrict <$> body) else SQL.encoderAndParam (HE.nullable HE.unknown) (LBS.toStrict <$> body) <> "::" <> SQL.sql typ -selectBody :: SqlFragment -selectBody = "(SELECT val FROM pgrst_body)" - -- Here we build the pg array literal, e.g '{"Hebdon, John","Other","Another"}', manually. -- This is necessary to pass an "unknown" array and let pg infer the type. -- There are backslashes here, but since this value is parametrized and is not a string constant @@ -244,19 +220,29 @@ pgFmtSelectItem table (f@(fName, jp), Nothing, alias) = pgFmtField table f <> SQ -- Not quoting should be fine, we validate the input on Parsers. pgFmtSelectItem table (f@(fName, jp), Just cast, alias) = "CAST (" <> pgFmtField table f <> " AS " <> SQL.sql (encodeUtf8 cast) <> " )" <> SQL.sql (pgFmtAs fName jp alias) -pgFmtSelectFromJson :: [TypedField] -> SQL.Snippet -pgFmtSelectFromJson fields = - SQL.sql "SELECT " <> parsedCols <> " " <> - (if null fields - -- When we are inserting no columns (e.g. using default values), we can't use our ordinary `json_to_recordset` - -- because it can't extract records with no columns (there's no valid syntax for the `AS (colName colType,...)` - -- part). But we still need to ensure as many rows are created as there are array elements. - then SQL.sql ("FROM json_array_elements (" <> selectBody <> ") _ ") - else SQL.sql ("FROM json_to_recordset (" <> selectBody <> ") AS _ " <> "(" <> typedCols <> ") ") - ) +-- TODO: At this stage there shouldn't be a Maybe since ApiRequest should ensure that an INSERT/UPDATE has a body +fromJsonBodyF :: Maybe LBS.ByteString -> [TypedField] -> Bool -> Bool -> SQL.Snippet +fromJsonBodyF body fields includeSelect includeLimitOne = + SQL.sql + (if includeSelect then "SELECT " <> parsedCols <> " " else mempty) <> + "FROM (SELECT " <> jsonPlaceHolder <> " AS json_data) pgrst_payload, " <> + -- convert a json object into a json array, this way we can use json_to_recordset for all json payloads + -- Otherwise we'd have to use json_to_record for json objects and json_to_recordset for json arrays + -- We do this in SQL to avoid processing the JSON in application code + "LATERAL (SELECT CASE WHEN json_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE json_build_array(pgrst_payload.json_data) END AS val) pgrst_uniform_json, " <> + "LATERAL (SELECT * FROM " <> + (if null fields + -- When we are inserting no columns (e.g. using default values), we can't use our ordinary `json_to_recordset` + -- because it can't extract records with no columns (there's no valid syntax for the `AS (colName colType,...)` + -- part). But we still need to ensure as many rows are created as there are array elements. + then SQL.sql "json_array_elements(pgrst_uniform_json.val) _ " + else SQL.sql ("json_to_recordset(pgrst_uniform_json.val) AS _(" <> typedCols <> ") " <> if includeLimitOne then "LIMIT 1" else mempty) + ) <> + ") pgrst_body " where - parsedCols = SQL.sql $ BS.intercalate ", " $ pgFmtIdent . tfName <$> fields + parsedCols = BS.intercalate ", " $ fromQi . QualifiedIdentifier "pgrst_body" . tfName <$> fields typedCols = BS.intercalate ", " $ pgFmtIdent . tfName <> const " " <> encodeUtf8 . tfIRType <$> fields + jsonPlaceHolder = SQL.encoderAndParam (HE.nullable HE.jsonLazyBytes) body pgFmtOrderTerm :: QualifiedIdentifier -> OrderTerm -> SQL.Snippet pgFmtOrderTerm qi ot = diff --git a/test/pgbench/1652/new.sql b/test/pgbench/1652/new.sql new file mode 100644 index 0000000000..2cd7a16132 --- /dev/null +++ b/test/pgbench/1652/new.sql @@ -0,0 +1,20 @@ +WITH pgrst_source AS ( + SELECT pgrst_call.* + FROM ( + SELECT '{"id": 4}'::json as json_data + ) pgrst_payload, + LATERAL ( + SELECT CASE WHEN json_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE json_build_array(pgrst_payload.json_data) END AS val + ) pgrst_uniform_json, + LATERAL ( + SELECT * FROM json_to_recordset(pgrst_uniform_json.val) AS _("id" integer) LIMIT 1 + ) pgrst_body, + LATERAL "test"."get_projects_below"("id" := pgrst_body.id) pgrst_call +) +SELECT + null::bigint AS total_result_set, + pg_catalog.count(_postgrest_t) AS page_total, + coalesce(json_agg(_postgrest_t), '[]')::character varying AS body, + nullif(current_setting('response.headers', true), '') AS response_headers, + nullif(current_setting('response.status', true), '') AS response_status +FROM (SELECT "projects".* FROM "pgrst_source" AS "projects") _postgrest_t; diff --git a/test/pgbench/1652/old.sql b/test/pgbench/1652/old.sql new file mode 100644 index 0000000000..07965a1cc4 --- /dev/null +++ b/test/pgbench/1652/old.sql @@ -0,0 +1,15 @@ +WITH pgrst_source AS ( + WITH + pgrst_payload AS (SELECT '{"id": 4}'::json AS json_data), + pgrst_body AS ( SELECT CASE WHEN json_typeof(json_data) = 'array' THEN json_data ELSE json_build_array(json_data) END AS val FROM pgrst_payload), + pgrst_args AS ( SELECT * FROM json_to_recordset((SELECT val FROM pgrst_body)) AS _("id" integer) ) + SELECT "get_projects_below".* + FROM "test"."get_projects_below"("id" := (SELECT "id" FROM pgrst_args LIMIT 1)) +) +SELECT + null::bigint AS total_result_set, + pg_catalog.count(_postgrest_t) AS page_total, + coalesce(json_agg(_postgrest_t), '[]')::character varying AS body, + nullif(current_setting('response.headers', true), '') AS response_headers, + nullif(current_setting('response.status', true), '') AS response_status +FROM (SELECT "projects".* FROM "pgrst_source" AS "projects") _postgrest_t; diff --git a/test/pgbench/2677/new.sql b/test/pgbench/2677/new.sql new file mode 100644 index 0000000000..f4b70b4f25 --- /dev/null +++ b/test/pgbench/2677/new.sql @@ -0,0 +1,12 @@ +INSERT INTO "test"."complex_items"("arr_data", "field-with_sep", "id", "name") +SELECT pgrst_body."arr_data", pgrst_body."field-with_sep", pgrst_body."id", pgrst_body."name" +FROM ( + SELECT '[{"id": 4, "name": "Vier"}, {"id": 5, "name": "Funf", "arr_data": null}, {"id": 6, "name": "Sechs", "arr_data": [1, 2, 3], "field-with_sep": 6}]'::json as json_data +) pgrst_payload, +LATERAL ( + SELECT CASE WHEN json_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE json_build_array(pgrst_payload.json_data) END AS val +) pgrst_uniform_json, +LATERAL ( + SELECT * FROM json_to_recordset (pgrst_uniform_json.val) AS _ ("arr_data" integer[], "field-with_sep" integer, "id" bigint, "name" text) +) pgrst_body +RETURNING "test"."complex_items".* diff --git a/test/pgbench/2677/old.sql b/test/pgbench/2677/old.sql new file mode 100644 index 0000000000..9c85151c24 --- /dev/null +++ b/test/pgbench/2677/old.sql @@ -0,0 +1,7 @@ +WITH +pgrst_payload AS (SELECT '[{"id": 4, "name": "Vier"}, {"id": 5, "name": "Funf", "arr_data": null}, {"id": 6, "name": "Sechs", "arr_data": [1, 2, 3], "field-with_sep": 6}]'::json AS json_data), +pgrst_body AS ( SELECT CASE WHEN json_typeof(json_data) = 'array' THEN json_data ELSE json_build_array(json_data) END AS val FROM pgrst_payload) +INSERT INTO "test"."complex_items"("arr_data", "field-with_sep", "id", "name") +SELECT "arr_data", "field-with_sep", "id", "name" +FROM json_to_recordset ((SELECT val FROM pgrst_body)) AS _ ("arr_data" integer[], "field-with_sep" integer, "id" bigint, "name" text) +RETURNING "test"."complex_items".* diff --git a/test/pgbench/README.md b/test/pgbench/README.md new file mode 100644 index 0000000000..da8fa97496 --- /dev/null +++ b/test/pgbench/README.md @@ -0,0 +1,8 @@ +## pgbench tests + +Can be used as: + +``` +postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/2677/old.sql +postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/2677/new.sql +``` diff --git a/test/pgbench/fixtures.sql b/test/pgbench/fixtures.sql new file mode 100644 index 0000000000..f176d5d691 --- /dev/null +++ b/test/pgbench/fixtures.sql @@ -0,0 +1,7 @@ +\ir ../spec/fixtures/load.sql + +ALTER TABLE test.complex_items + DROP CONSTRAINT complex_items_pkey; + +ALTER TABLE test.complex_items + ALTER COLUMN "field-with_sep" DROP NOT NULL; diff --git a/test/spec/Feature/Query/PlanSpec.hs b/test/spec/Feature/Query/PlanSpec.hs index b896efb87e..ec18996a81 100644 --- a/test/spec/Feature/Query/PlanSpec.hs +++ b/test/spec/Feature/Query/PlanSpec.hs @@ -8,7 +8,9 @@ import Network.Wai.Test (SResponse (..)) import Data.Aeson.Lens import Data.Aeson.QQ +import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as LBS +import qualified Data.Text as T import Network.HTTP.Types import Test.Hspec hiding (pendingWith) import Test.Hspec.Wai @@ -166,10 +168,7 @@ spec actualPgVersion = do liftIO $ do resHeaders `shouldSatisfy` elem ("Content-Type", "application/vnd.pgrst.plan+json; charset=utf-8") resStatus `shouldBe` Status { statusCode = 200, statusMessage="OK" } - totalCost `shouldBe` - if actualPgVersion > pgVersion120 - then 3.28 - else 3.33 + totalCost `shouldBe` 3.27 it "outputs the total cost for an update" $ do r <- request methodPatch "/projects?id=eq.3" @@ -182,10 +181,7 @@ spec actualPgVersion = do liftIO $ do resHeaders `shouldSatisfy` elem ("Content-Type", "application/vnd.pgrst.plan+json; charset=utf-8") resStatus `shouldBe` Status { statusCode = 200, statusMessage="OK" } - totalCost `shouldBe` - if actualPgVersion > pgVersion120 - then 12.45 - else 12.5 + totalCost `shouldBe` 12.45 it "outputs the total cost for a delete" $ do r <- request methodDelete "/projects?id=in.(1,2,3)" @@ -212,10 +208,7 @@ spec actualPgVersion = do liftIO $ do resHeaders `shouldSatisfy` elem ("Content-Type", "application/vnd.pgrst.plan+json; charset=utf-8") resStatus `shouldBe` Status { statusCode = 200, statusMessage="OK" } - totalCost `shouldBe` - if actualPgVersion >= pgVersion120 - then 1.3 - else 1.35 + totalCost `shouldBe` 1.29 it "outputs the plan for application/vnd.pgrst.object" $ do r <- request methodDelete "/projects?id=eq.6" @@ -338,7 +331,7 @@ spec actualPgVersion = do r <- request methodGet "/rpc/get_projects_below?id=3" [planHdr] "" - liftIO $ planCost r `shouldSatisfy` (< 36.4) + liftIO $ planCost r `shouldSatisfy` (< 45.4) it "should not exceed cost when calling setof composite proc with empty params" $ do r <- request methodGet "/rpc/getallprojects" @@ -367,6 +360,29 @@ spec actualPgVersion = do liftIO $ planCost r `shouldSatisfy` (< 5.85) + context "function inlining" $ do + it "should inline a zero argument function" $ do + r <- request methodGet "/rpc/getallusers?id=eq.1" + [(hAccept, "application/vnd.pgrst.plan")] "" + + let resBody = simpleBody r + + liftIO $ do + resBody `shouldSatisfy` ( + if actualPgVersion >= pgVersion120 + then (\t -> T.isInfixOf "Index Scan using users_pkey on users" (decodeUtf8 $ BS.toStrict t)) + else (\t -> T.isInfixOf "Seq Scan on users" (decodeUtf8 $ BS.toStrict t))) + + it "should inline a function with arguments" $ do + r <- request methodGet "/rpc/getitemrange?min=10&max=15" + [(hAccept, "application/vnd.pgrst.plan")] "" + + let resBody = simpleBody r + + liftIO $ do + -- a Seq Scan ensures the function is inlined as the plan uses the underlying table + resBody `shouldSatisfy` (\t -> T.isInfixOf "Seq Scan on items" (decodeUtf8 $ BS.toStrict t)) + disabledSpec :: SpecWith ((), Application) disabledSpec = it "doesn't work if db-plan-enabled=false(the default)" $ do diff --git a/test/spec/Feature/Query/RpcSpec.hs b/test/spec/Feature/Query/RpcSpec.hs index f9275f6625..3b95e5db25 100644 --- a/test/spec/Feature/Query/RpcSpec.hs +++ b/test/spec/Feature/Query/RpcSpec.hs @@ -15,7 +15,7 @@ import Text.Heredoc import PostgREST.Config.PgVersion (PgVersion, pgVersion100, pgVersion109, pgVersion110, pgVersion112, pgVersion114, - pgVersion140) + pgVersion130, pgVersion140) import Protolude hiding (get) import SpecHelper @@ -309,7 +309,8 @@ spec actualPgVersion = ]|] { matchHeaders = [matchContentTypeJson] } - when (actualPgVersion >= pgVersion110) $ + -- https://github.com/PostgREST/postgrest/pull/2677#issuecomment-1444976849 + when (actualPgVersion >= pgVersion130) $ it "can embed if rpc returns domain of table type" $ do post "/rpc/getproject_domain?select=id,name,client:clients(id),tasks(id)" [json| { "id": 1} |]