1
Fork 0
mirror of https://github.com/Airsequel/AirGQL.git synced 2025-09-23 04:24:31 +02:00

Migrate all gql code to share types with introspection

This commit is contained in:
prescientmoon 2024-11-14 18:04:10 +01:00
commit 9cebb6dc2d
4 changed files with 364 additions and 828 deletions

File diff suppressed because it is too large Load diff

View file

@ -3,6 +3,9 @@ module AirGQL.Introspection (
getSchemaResolver, getSchemaResolver,
tableQueryField, tableQueryField,
tableQueryByPKField, tableQueryByPKField,
tableInsertField,
tableUpdateField,
tableDeleteField,
) )
where where
@ -411,13 +414,12 @@ getSchema accessMode tables = do
-- We make this toplevel, because putting it inside `getSchemaResolver` -- We make this toplevel, because putting it inside `getSchemaResolver`
-- means haskell will evaluate it each time, which leads to each execution -- means haskell will evaluate it each time, which leads to each execution
-- taking 2-3s -- taking 2-3 additional seconds
makeSchemaResolver :: Either Text (Type.Schema -> Resolver IO) schemaField :: Either Text (Out.Field IO)
makeSchemaResolver = do schemaField = do
let schemaField = Type.field "__schema" $ Type.nonNull Type.typeSchema let field = Type.field "__schema" $ Type.nonNull Type.typeSchema
ty <- makeType schemaField.type_ ty <- makeType field.type_
let gqlField = Out.Field schemaField.description ty mempty pure $ Out.Field field.description ty mempty
pure $ \schema -> Out.ValueResolver gqlField $ pure $ toGraphQL schema
getSchemaResolver getSchemaResolver
@ -425,8 +427,9 @@ getSchemaResolver
-> [TableEntry] -> [TableEntry]
-> IO (HashMap Text (Resolver IO)) -> IO (HashMap Text (Resolver IO))
getSchemaResolver accessMode tables = do getSchemaResolver accessMode tables = do
case makeSchemaResolver of case schemaField of
Right make -> do Right field -> do
let schema = getSchema accessMode tables let schema = getSchema accessMode tables
pure $ HashMap.singleton "__schema" $ make schema let resolver = Out.ValueResolver field $ pure $ toGraphQL schema
pure $ HashMap.singleton "__schema" resolver
Left err -> fail $ T.unpack err Left err -> fail $ T.unpack err

View file

@ -672,10 +672,10 @@ resolveReferencesConstraint tables referencedTable = do
tables tables
let columns = table.columns let columns = table.columns
let pks = P.filter (\column -> column.primary_key) columns let pks = P.filter (\column -> column.primary_key) columns
let nonRowidPks = P.filter (\column -> column.isRowid) pks let nonRowidPks = P.filter (\column -> P.not column.isRowid) pks
case nonRowidPks of column <- case nonRowidPks of
[] -> pure "rowid" [] -> P.find (\column -> column.isRowid) pks
[column] -> pure column.column_name [column] -> pure column
-- Note: we currently do not support having composite primary keys -- Note: we currently do not support having composite primary keys
-- referenced implicitly, as that would lead to multiple complications like: -- referenced implicitly, as that would lead to multiple complications like:
-- - figuring out the correct order for the references -- - figuring out the correct order for the references
@ -685,6 +685,7 @@ resolveReferencesConstraint tables referencedTable = do
-- do it as long as we keep track of the column order. Not sure it's worth the -- do it as long as we keep track of the column order. Not sure it's worth the
-- hassle though... -- hassle though...
_ -> Nothing _ -> Nothing
pure column.column_name
-- See the docs for `resolveReferencesConstraint` for details -- See the docs for `resolveReferencesConstraint` for details

View file

@ -8,25 +8,20 @@
{-# HLINT ignore "Replace case with maybe" #-} {-# HLINT ignore "Replace case with maybe" #-}
import Protolude ( import Protolude (
Applicative (pure),
Bool (False, True), Bool (False, True),
Either (Right), Either (Right),
FilePath, FilePath,
IO, IO,
Maybe (Just, Nothing), Maybe (Just, Nothing),
Monoid (mempty), Monoid (mempty),
Text,
fromMaybe, fromMaybe,
show, show,
($), ($),
(&), (&),
(.),
(<$),
(<>), (<>),
) )
import Protolude qualified as P import Protolude qualified as P
import Control.Monad.Catch (catchAll)
import Data.Aeson (Value (Number)) import Data.Aeson (Value (Number))
import Data.Aeson qualified as Ae import Data.Aeson qualified as Ae
import Data.Aeson.KeyMap qualified as KeyMap import Data.Aeson.KeyMap qualified as KeyMap
@ -118,6 +113,7 @@ import Tests.Utils (
rmSpaces, rmSpaces,
shouldSaveDbs, shouldSaveDbs,
testRoot, testRoot,
unorderedShouldBe,
withDataDbConn, withDataDbConn,
withTestDbConn, withTestDbConn,
) )
@ -2000,31 +1996,36 @@ testSuite = do
} }
|] |]
expected :: Text
expected = expected =
"user error (Column progress cannot be set on conflicts without being explicitly provided)" [raw|
{
"data": null,
"errors": [{
"locations": [{ "column": 3, "line": 2 }],
"path": ["insert_users"],
"message": "user error (Column progress cannot be set on conflicts without being explicitly provided)"
}]
}
|]
conn <- SS.open dbPath conn <- SS.open dbPath
Right tables <- getEnrichedTables conn Right tables <- getEnrichedTables conn
schema <- getDerivedSchema defaultSchemaConf conn fixtureDbId tables schema <- getDerivedSchema defaultSchemaConf conn fixtureDbId tables
Right _ <- graphql schema Nothing mempty firstQuery Right _ <- graphql schema Nothing mempty firstQuery
Just err <- Right err <- graphql schema Nothing mempty secondQuery
catchAll
(Nothing <$ graphql schema Nothing mempty secondQuery)
(pure . Just . show)
err `shouldBe` expected err `unorderedShouldBe` expected
it "supports deleting data and returning the deleted data" $ do it "supports deleting data and returning the deleted data" $ do
conn <- SS.open dbPath conn <- SS.open dbPath
execute_ execute_
conn conn
[sql| [sql|
insert into users (name, email, created_utc) insert into users (name, email, created_utc)
values values
('John', 'john@del-test.com', '2021-01-01T00:00Z'), ('John', 'john@del-test.com', '2021-01-01T00:00Z'),
('Eve', 'eve@del-test.com', '2021-01-02T00:00Z') ('Eve', 'eve@del-test.com', '2021-01-02T00:00Z')
|] |]
let let
query = query =
@ -2041,21 +2042,24 @@ testSuite = do
} }
} }
|] |]
expected = expected =
rmSpaces rmSpaces
[raw|{ [raw|
"data": { {
"delete_users": { "data": {
"affected_rows": 1, "delete_users": {
"returning": [ "affected_rows": 1,
{ "returning": [
"rowid": 2, {
"name": "Eve" "rowid": 2,
} "name": "Eve"
] }
]
}
} }
} }
}|] |]
Right tables <- getEnrichedTables conn Right tables <- getEnrichedTables conn
schema <- getDerivedSchema defaultSchemaConf conn fixtureDbId tables schema <- getDerivedSchema defaultSchemaConf conn fixtureDbId tables
@ -2106,14 +2110,14 @@ testSuite = do
expected = expected =
rmSpaces rmSpaces
[raw| [raw|
{ {
"data": { "data": {
"insert_checks": { "insert_checks": {
"affected_rows": 1 "affected_rows": 1
}
} }
} }
} |]
|]
Right result <- graphql schema Nothing mempty mutation Right result <- graphql schema Nothing mempty mutation
@ -2256,7 +2260,7 @@ testSuite = do
let let
query = query =
[gql| [gql|
mutation InsertUsers ($objects: [users_insert_input]) { mutation InsertUsers ($objects: [users_insert_input!]!) {
insert_users(objects: $objects) { insert_users(objects: $objects) {
affected_rows affected_rows
} }
@ -2646,16 +2650,16 @@ testSuite = do
let expected = let expected =
rmSpaces rmSpaces
[raw|{ [raw|{
"data": { "insert_track": null }, "data": null,
"errors": [ "errors": [
{ {
"locations": [ { "column": 3, "line": 2 } ], "locations": [ { "column": 3, "line": 2 } ],
"message": "message":
"SQLite3 returned ErrorConstraint while attempting to perform step: FOREIGN KEY constraint failed", "SQLite3 returned ErrorConstraint while attempting to perform step: FOREIGN KEY constraint failed",
"path": [ "insert_track" ] "path": [ "insert_track" ]
} }
] ]
}|] }|]
Ae.encode result `shouldBe` expected Ae.encode result `shouldBe` expected