commit fbee31a8498423624a8887573c54cf121506fc51 Author: Adrian Sieber <mail@adriansieber.com> Date: Fri May 3 08:39:25 2024 +0000 AirGQL: Clean up documentation, remove obsolete references to Airsequel diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d86532e --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +.stack-work +*.hie +/bruno_collection +/data +/tests/*.db diff --git a/Setup.hs b/Setup.hs new file mode 100644 index 0000000..902b387 --- /dev/null +++ b/Setup.hs @@ -0,0 +1,4 @@ +import Distribution.Simple + + +main = defaultMain diff --git a/app/Main.hs b/app/Main.hs new file mode 100644 index 0000000..6362386 --- /dev/null +++ b/app/Main.hs @@ -0,0 +1,218 @@ +{-# LANGUAGE DeriveDataTypeable #-} +-- To look up git hash +{-# LANGUAGE TemplateHaskell #-} + +-- Necessary for cmdArgs +{-# OPTIONS -Wno-partial-fields #-} + +module Main (main) where + +import Protolude ( + Applicative (pure), + Eq ((==)), + FilePath, + IO, + Int, + Maybe (Just), + Semigroup ((<>)), + Show, + Text, + const, + putText, + repeat, + show, + ($), + (&), + (||), + ) +import Protolude qualified as P + +import Data.Data (Data) +import Data.Text qualified as T +import Database.SQLite.Simple qualified as SS +import GitHash (giDirty, giTag, tGitInfoCwd) +import Network.HTTP.Client.MultipartFormData () +import Network.Wai (Middleware) +import Network.Wai.Handler.Warp ( + defaultSettings, + runSettings, + setOnException, + setPort, + ) +import Network.Wai.Middleware.Cors ( + cors, + corsMethods, + corsRequestHeaders, + simpleCorsResourcePolicy, + simpleMethods, + ) +import System.Console.CmdArgs as CmdArgs ( + Default (def), + args, + auto, + cmdArgs, + help, + modes, + program, + summary, + typ, + (&=), + ) + +import AirGQL.ExternalAppContext (getExternalAppContext) +import AirGQL.Utils ( + getGraphiQLVersion, + getSqliteBinaryVersion, + getSqliteEmbeddedVersion, + ) +import Server.Server (platformApp) + + +data Cli + = Help + | Version + | -- Start the AirGQL server + -- serving the GraphQL endpoint for the specified SQLite database + Serve + { dbFilePath :: FilePath + -- TODO: , readOnly :: Bool + } + deriving (Show, Data) + + +cliHelp :: Cli +cliHelp = + Help + &= auto + + +cliVersion :: Cli +cliVersion = Version + + +cliServe :: Cli +cliServe = + Serve + { dbFilePath = def &= typ "Path to database file" &= args + -- TODO: , readOnly = def + } + &= help "Serve database via GraphQL" + + +corsMiddleware :: Middleware +corsMiddleware = + let + policy = + simpleCorsResourcePolicy + { corsRequestHeaders = ["Content-Type", "Authorization"] + , corsMethods = "PUT" : simpleMethods + } + in + cors (const $ Just policy) + + +-- | Imitates output from `git describe --always --dirty` +versionSlug :: Text +versionSlug = + T.pack $ + giTag $$tGitInfoCwd + <> (if giDirty $$tGitInfoCwd then "-dirty" else "") + + +main :: IO () +main = do + let + port :: Int = 4189 + + separatorLine = "\n" <> T.concat (P.take 80 $ repeat "=") + separatorLineThin = "\n" <> T.concat (P.take 80 $ repeat "-") + + runWarp = + runSettings $ + defaultSettings + & setPort port + & setOnException + ( \_ exception -> do + let exceptionText :: Text = show exception + if (exceptionText == "Thread killed by timeout manager") + || ( exceptionText + == "Warp: Client closed connection prematurely" + ) + then pure () + else do + putText exceptionText + ) + + buildBanner + :: Text + -> Text + -> Text + -> Text + -> Text + buildBanner + sqliteEmbeddedVersion + sqliteBinaryVersion + graphiQLVersion + baseUrl = + separatorLine + <> "\n\n" + <> "AirGQL Server\n" + <> separatorLineThin + <> "\n\n" + <> "Version:\t\t " + <> versionSlug + <> "\n\ + \GraphQL URL:\t\t " + <> baseUrl + <> "/graphql" + <> "\n\ + \\n\ + \SQLite Embedded version: " + <> sqliteEmbeddedVersion + <> "\n\ + \SQLite Binary version:\t " + <> sqliteBinaryVersion + <> "\n\ + \GraphiQL version:\t " + <> graphiQLVersion + <> "\n" + <> separatorLine + <> "\n" + + providedArgs <- + cmdArgs $ + modes + [ cliHelp + , cliVersion + , cliServe + ] + &= program "airgql" + &= summary (T.unpack versionSlug) + &= help "Automatic GraphQL API generation for SQLite databases" + + case providedArgs of + Help -> + putText "Run `airgql --help` for detailed usage instructions" + ---------- + Version -> + putText versionSlug + ---------- + Serve{dbFilePath} -> do + SS.withConnection dbFilePath $ \conn -> do + P.when (dbFilePath == "") $ + P.die "ERROR: No database file path was specified" + + let baseUrl :: Text = "http://localhost:" <> show port + ctx <- getExternalAppContext baseUrl + sqliteEmbeddedVersion <- getSqliteEmbeddedVersion conn + sqliteBinaryVersion <- getSqliteBinaryVersion ctx + graphiQLVersion <- getGraphiQLVersion + + putText $ + buildBanner + sqliteEmbeddedVersion + sqliteBinaryVersion + graphiQLVersion + baseUrl + + runWarp $ corsMiddleware $ platformApp ctx dbFilePath diff --git a/images/sql_to_graphql.png b/images/sql_to_graphql.png new file mode 100644 index 0000000..589f63e Binary files /dev/null and b/images/sql_to_graphql.png differ diff --git a/makefile b/makefile new file mode 100644 index 0000000..76e1b7e --- /dev/null +++ b/makefile @@ -0,0 +1,12 @@ +.PHONY: test +test: + stack \ + --stack-yaml stack-standalone.yaml \ + test + + +.PHONY: install +install: + stack \ + --stack-yaml stack-standalone.yaml \ + install diff --git a/package.yaml b/package.yaml new file mode 100644 index 0000000..356d72e --- /dev/null +++ b/package.yaml @@ -0,0 +1,143 @@ +name: airgql +version: 0.7.1.2 +synopsis: Automatically generate a GraphQL API for an SQLite database +description: | + AirGQL automatically generates a GraphQL API for SQLite databases. + It analyses the database schema + and builds the corresponding GraphQL introspection and data resolvers. + + The generated API supports all basic CRUD operations and + even complex queries and mutations including filters and pagination. + It's the perferct solution for easily integrating GraphQL support + into existing Haskell servers. + + AirGQL is part of the Airsequel project, which provides a complete solution + for building web applications on top of SQLite databases. +homepage: https://github.com/Airsequel/AirGQL +license: AGPL-3.0-or-later +author: Feram GmbH +maintainer: adrian@feram.io +copyright: 2024 Feram GmbH +category: Web, Database, SQL, SQLite, GraphQL, Servant, CLI Tool + +extra-source-files: + - readme.md + +flags: + lib-only: + description: Only build/install the library and not the CLI tool. + manual: true + default: false + +dependencies: + - base >= 4.18.2 && < 4.19 + - protolude >= 0.3.4 && < 0.4 + - text >= 2.0.2 && < 2.1 + - sqlite-simple >= 0.4.19 && < 0.5 + +default-extensions: + - DataKinds + - DeriveGeneric + - DerivingStrategies + - DuplicateRecordFields + - FlexibleContexts + - FlexibleInstances + - GADTs + - GeneralizedNewtypeDeriving + - ImportQualifiedPost + - LambdaCase + - MultiParamTypeClasses + - MultiWayIf + - NamedFieldPuns + - NoImplicitPrelude + - NumericUnderscores + - OverloadedRecordDot + - OverloadedStrings + - RankNTypes + - RecordWildCards + - ScopedTypeVariables + - StandaloneDeriving + - TupleSections + - TypeApplications + - TypeFamilies + - TypeOperators + - TypeSynonymInstances + - UndecidableInstances + +ghc-options: + - -fno-warn-orphans + - -fwrite-ide-info + - -Weverything + - -Wno-all-missed-specialisations + - -Wno-missing-deriving-strategies + - -Wno-missing-kind-signatures + - -Wno-missing-safe-haskell-mode + - -Wno-unsafe + +library: + source-dirs: source + dependencies: + - aeson >= 2.1.2.1 && < 2.3 + - blaze-markup >= 0.8.3 && < 0.9 + - bytestring >= 0.11.5 && < 0.12 + - conduit >= 1.3.5 && < 1.4 + - directory >= 1.3.8 && < 1.4 + - double-x-encoding >= 1.2.1 && < 1.3 + - exceptions >= 0.10.7 && < 0.11 + - extra >= 1.7.14 && < 1.8 + - filepath >= 1.4.200 && < 1.5 + - graphql >= 1.2.0.1 && < 1.4 + - graphql-spice >= 1.0.2 && < 1.1 + - http-types >= 0.12.4 && < 0.13 + - process >= 1.6.17 && < 1.7 + - scientific >= 0.3.7 && < 0.4 + - servant >= 0.20.1 && < 0.21 + - servant-blaze >= 0.9.1 && < 0.10 + - servant-docs >= 0.13 && < 0.14 + - servant-multipart >= 0.12.1 && < 0.13 + - servant-server >= 0.20 && < 0.21 + - simple-sql-parser >= 0.6.1 && < 0.8 + - template-haskell >= 2.20.0 && < 2.21 + - time >= 1.12.2 && < 1.13 + - typed-process >= 0.2.11 && < 0.3 + - unix >= 2.8.4 && < 2.9 + - unordered-containers >= 0.2.20 && < 0.3 + - wai >= 3.2.4 && < 3.3 + - wai-extra >= 3.1.14 && < 3.2 + +executables: + airgql: + when: + - condition: flag(lib-only) + then: { buildable: false } + else: { buildable: true } + source-dirs: app + main: Main.hs + ghc-options: + - -threaded + dependencies: + - airgql + - cmdargs >= 0.10.22 && < 0.11 + - githash >= 0.1.7 && < 0.2 + - http-client >= 0.7.17 && < 0.8 + - wai >= 3.2.4 && < 3.3 + - wai-cors >= 0.2.7 && < 0.3 + - warp >= 3.3.31 && < 3.4 + +tests: + airgql-test: + main: Spec.hs + source-dirs: tests + dependencies: + - aeson >= 2.1.2.1 && < 2.3 + - airgql + - bytestring >= 0.11.5 && < 0.12 + - directory >= 1.3.8 && < 1.4 + - exceptions >= 0.10.7 && < 0.11 + - filepath >= 1.4.200 && < 1.5 + - graphql >= 1.2.0.1 && < 1.4 + - graphql-spice >= 1.0.2 && < 1.1 + - hspec >= 2.11.8 && < 2.12 + - servant-server >= 0.20 && < 0.21 + - unix >= 2.8.4 && < 2.9 + - unordered-containers >= 0.2.20 && < 0.3 diff --git a/readme.md b/readme.md new file mode 100644 index 0000000..d202e87 --- /dev/null +++ b/readme.md @@ -0,0 +1,101 @@ +# AirGQL + +Automatically generate a GraphQL API for an SQLite database. + +<img + alt="Diagram of SQL to GraphQL conversion" + src="./images/sql_to_graphql.png" + style="width: 60%;" +/> + +## How It Works + +It analyses the database schema +and builds the corresponding GraphQL introspection and data resolvers. + +The generated API supports all basic CRUD operations and +even complex queries and mutations including filters and pagination. + +It is designed to be either used a Haskell library +for integrating GraphQL support into existing servers +or as a standalone CLI app for quickly spinning up a backend. + +AirGQL is the core component of [Airsequel](https://www.airsequel.com/), +which provides a complete solution for building web applications +on top of SQLite databases. + + +## Installation + +### CLI Tool + +You can install the CLI app using +[Stack](https://docs.haskellstack.org/en/stable/): + +```sh +git clone https://github.com/Airsequel/AirGQL +cd AirGQL +make install +``` + + +### Library + +You can also use AirGQL in your Haskell project +by adding the [Hackage package](https://hackage.haskell.org/package/airgql) +as a dependency to your `package.yaml` or your `*.cabal` file: + +```yaml +dependencies: + - airgql + - … +``` + + +## Usage + +### CLI App + +Run following command to start a GraphQL API server +for an existing SQLite database: + +```sh +stack run -- serve tests/example.sqlite +``` + +Then you can query the API like this: + +```sh +http POST http://localhost:4189/graphql \ + query='query { + songs(limit: 2) { + id + title + } + }' +``` + +It also supports mutations: + +```sh +http POST http://localhost:4189/graphql \ + query='mutation { + insert_songs(objects: [{ title: "New Song" }]) { + returning { + id + title + } + } + }' +``` + +Check out the documentation at +[docs.airsequel.com/graphql-api](https://docs.airsequel.com/graphql-api) +for more details on how to use all of its GraphQL features. + + +### Library + +Check out the code in [app/Main.hs](./app/Main.hs) file for an example +of how to build a simple [Servant](https://www.servant.dev/) server +leveraging AirGQL. diff --git a/source/AirGQL/Config.hs b/source/AirGQL/Config.hs new file mode 100644 index 0000000..8cf8647 --- /dev/null +++ b/source/AirGQL/Config.hs @@ -0,0 +1,42 @@ +module AirGQL.Config ( + Config (..), + maxGraphqlResultCount, + defaultConfig, +) +where + +import Data.Bool (Bool (False)) +import Data.Int (Int) + + +-- | The maximum number of results allowed for the GraphiQL playground +maxGraphqlResultCount :: Int +maxGraphqlResultCount = 10000 + + +data Config = Config + { maxTablesPerDb :: Int + , maxColumnsPerTable :: Int + , maxRowsPerTable :: Int + , maxVisibleCellsPerTable :: Int + , maxDbSize :: Int -- Bytes + , maxCellSize :: Int -- Bytes + , hardHeapLimit :: Int -- Bytes + , sqlTimeoutTime :: Int -- Seconds + , allowRecursiveTriggers :: Bool + } + + +defaultConfig :: Config +defaultConfig = + Config + { maxTablesPerDb = 100 + , maxColumnsPerTable = 500 + , maxRowsPerTable = 100_000 + , maxVisibleCellsPerTable = 0 -- Not used currently + , maxDbSize = 100_000_000 -- Bytes + , maxCellSize = 10_000_000 -- Bytes + , hardHeapLimit = 500_000_000 -- Bytes + , sqlTimeoutTime = 20 + , allowRecursiveTriggers = False + } diff --git a/source/AirGQL/ExternalAppContext.hs b/source/AirGQL/ExternalAppContext.hs new file mode 100644 index 0000000..cfe6043 --- /dev/null +++ b/source/AirGQL/ExternalAppContext.hs @@ -0,0 +1,75 @@ +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use maybe" #-} +module AirGQL.ExternalAppContext ( + SandboxingConfig (..), + ExternalAppContext (..), + getExternalAppContext, +) where + +import Protolude ( + FilePath, + IO, + Maybe (Just, Nothing), + Show, + Text, + not, + pure, + ($), + (&&), + (/=), + (<|>), + (==), + ) +import Protolude qualified as P + +import Data.ByteString qualified as BS +import Data.Text qualified as T +import System.Environment (lookupEnv) +import System.Info (os) +import System.Process.Typed (ExitCode (ExitSuccess), proc, readProcessStdout) + + +lookupBinaryPath :: Text -> IO (Maybe FilePath) +lookupBinaryPath name = do + (code, resultBS) <- readProcessStdout $ proc "which" [T.unpack name] + let result = T.strip $ P.decodeUtf8 $ BS.toStrict resultBS + pure $ + if code == ExitSuccess + && result /= "" + && not ("which: no" `T.isInfixOf` result) + then Just $ T.unpack result + else Nothing + + +data SandboxingConfig = SandboxingConfig + { firejail :: FilePath + , extraBinds :: [FilePath] + } + deriving (Show) + + +data ExternalAppContext = ExternalAppContext + { sqlite :: FilePath + , sqliteLib :: Maybe FilePath + , baseUrl :: Text + } + deriving (Show) + + +getExternalAppContext :: Text -> IO ExternalAppContext +getExternalAppContext baseUrl = do + sqlite <- lookupBinaryPath "sqlite3" + sqliteEnv <- lookupEnv "AIRGQL_SQLITE_BIN" + sqliteLib <- lookupEnv "AIRGQL_SQLITE_LIB" + + pure $ + ExternalAppContext + { baseUrl = baseUrl + , sqlite = P.fromMaybe "/usr/bin/sqlite3" $ sqliteEnv <|> sqlite + , sqliteLib = + sqliteLib + <|> if os == "darwin" + then Just "/usr/local/opt/sqlite/lib/libsqlite3.dylib" + else Nothing + } diff --git a/source/AirGQL/GQLWrapper.hs b/source/AirGQL/GQLWrapper.hs new file mode 100644 index 0000000..1e07c26 --- /dev/null +++ b/source/AirGQL/GQLWrapper.hs @@ -0,0 +1,47 @@ +{-| +Increase readability of code +by wrapping `graphql` library with descriptive wrappers +-} +module AirGQL.GQLWrapper ( + OutField (..), + outFieldToField, + InArgument (..), + inArgumentToArgument, +) +where + +import Protolude (Maybe, Text) + +import Language.GraphQL.Type (Value) +import Language.GraphQL.Type.In qualified as In +import Language.GraphQL.Type.Out qualified as Out + + +data OutField m = OutField + { descriptionMb :: Maybe Text + , fieldType :: Out.Type m + , arguments :: In.Arguments + } + + +outFieldToField :: OutField m -> Out.Field m +outFieldToField outField = + Out.Field + outField.descriptionMb + outField.fieldType + outField.arguments + + +data InArgument = InArgument + { argDescMb :: Maybe Text + , argType :: In.Type + , valueMb :: Maybe Value + } + + +inArgumentToArgument :: InArgument -> In.Argument +inArgumentToArgument inArgument = + In.Argument + inArgument.argDescMb + inArgument.argType + inArgument.valueMb diff --git a/source/AirGQL/GraphQL.hs b/source/AirGQL/GraphQL.hs new file mode 100644 index 0000000..c4539fa --- /dev/null +++ b/source/AirGQL/GraphQL.hs @@ -0,0 +1,1674 @@ +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use list comprehension" #-} +{-# HLINT ignore "Replace case with maybe" #-} + +module AirGQL.GraphQL ( + getDerivedSchema, + queryType, + sqlDataToGQLValue, + getMutationResponse, + gqlValueToSQLData, +) +where + +import Protolude ( + Applicative (pure), + Bool (False, True), + Double, + Either (Left, Right), + Eq ((==)), + IO, + Int, + Integer, + Maybe (Just, Nothing), + MonadIO (liftIO), + MonadReader (ask), + Monoid (mempty), + ReaderT, + Semigroup ((<>)), + Text, + Traversable (sequence), + fromIntegral, + fromMaybe, + notElem, + otherwise, + show, + when, + ($), + (&), + (&&), + (.), + (<$>), + (<&>), + (<=), + (>), + (>=), + ) +import Protolude qualified as P + +import Control.Exception (throw) +import Control.Monad.Catch (catchAll) +import Data.Aeson (object, (.=)) +import Data.HashMap.Strict qualified as HashMap +import Data.List (nub) +import Data.Ord (Ord (min)) +import Data.Text (intercalate, isInfixOf, pack, toUpper) +import Data.Text qualified as T +import Database.SQLite.Simple ( + Connection, + Query (Query), + SQLData (SQLBlob, SQLFloat, SQLInteger, SQLNull, SQLText), + changes, + execute_, + query, + query_, + ) +import Database.SQLite.Simple qualified as SS +import DoubleXEncoding (doubleXDecode, doubleXEncodeGql) +import GHC.IO.Exception (userError) +import Language.GraphQL.AST.Document (Name) +import Language.GraphQL.Error (ResolverException (ResolverException)) +import Language.GraphQL.Type as GQL ( + Arguments (Arguments), + EnumType (EnumType), + EnumValue (EnumValue), + InputField (InputField), + Resolver (EventStreamResolver, ValueResolver), + ScalarType, + Schema, + Value (Boolean, Enum, Float, Int, List, Null, Object, String), + boolean, + float, + int, + schema, + string, + ) +import Language.GraphQL.Type.In ( + InputObjectType (InputObjectType), + Type (NamedInputObjectType), + ) +import Language.GraphQL.Type.In qualified as In +import Language.GraphQL.Type.Out qualified as Out +import Numeric (showFFloat) + +import AirGQL.Config ( + maxGraphqlResultCount, + ) +import AirGQL.GQLWrapper ( + InArgument (InArgument, argDescMb, argType, valueMb), + OutField (OutField, arguments, descriptionMb, fieldType), + inArgumentToArgument, + outFieldToField, + ) +import AirGQL.Introspection (getSchemaResolver, typeNameResolver) +import AirGQL.Lib ( + AccessMode (ReadAndWrite, ReadOnly, WriteOnly), + ColumnEntry (column_name, datatype, datatype_gql), + GqlTypeName (root), + TableEntryRaw (name), + column_name_gql, + getColumns, + ) +import AirGQL.Types.OutObjectType ( + OutObjectType (OutObjectType, descriptionMb, fields, interfaceTypes, name), + outObjectTypeToObjectType, + ) +import AirGQL.Types.PragmaConf (getSQLitePragmas) +import AirGQL.Types.SchemaConf ( + SchemaConf (accessMode, maxRowsPerTable, pragmaConf), + ) +import AirGQL.Types.Utils (encodeToText) +import AirGQL.Utils (colToFileUrl, collectErrorList, quoteKeyword, quoteText) + + +typeNameToScalarType :: Maybe GqlTypeName -> ScalarType +typeNameToScalarType Nothing = string +typeNameToScalarType (Just typeName) = + case typeName.root of + "Int" -> int + "Float" -> float + "String" -> string + "Boolean" -> boolean + _ -> string + + +-- | Prevent numbers of being shown with exponents (0.01 instead of 1e-2) +showFullPrecision :: Double -> Text +showFullPrecision x = + pack $ showFFloat Nothing x "" + + +showGqlValue :: Value -> Text +showGqlValue = \case + String str -> str + Int integer -> show integer + Float double -> showFullPrecision double + Boolean bool -> show bool + Enum text -> text + List list -> "[" <> T.intercalate ", " (list <&> showGqlValue) <> "]" + Object obj -> show $ Object obj + Null -> "null" + + +gqlValueToSQLText :: Value -> Text +gqlValueToSQLText = \case + String str -> quoteText str + Int integer -> show integer + Float double -> showFullPrecision double + Boolean bool -> T.toUpper $ show bool + Enum text -> text + List list -> + quoteText $ + "[" <> T.intercalate ", " (list <&> showGqlValue) <> "]" + Object obj -> quoteText $ show $ Object obj + Null -> "NULL" + + +-- TODO: Add Support for GraphQL's type "ID" + +-- | Convert any GraphQL value to a nullable String +gqlValueToNullableString :: Value -> Value +gqlValueToNullableString value = + case value of + String text -> String text + Null -> Null + val -> String $ showGqlValue val + + +colNamesWithValResolver :: [ColumnEntry] -> [(Text, Resolver IO)] +colNamesWithValResolver columnEntries = + columnEntries <&> \colEntry -> + let + fieldToResolve = + Out.Field + (Just colEntry.column_name_gql) + ( Out.NamedScalarType $ + typeNameToScalarType + colEntry.datatype_gql + ) + mempty + + resolvedValue = do + context <- ask + pure $ case context.values of + Object obj -> + case obj & HashMap.lookup colEntry.column_name_gql of + Nothing -> String "Error: Field does not exist" + Just val -> + case colEntry.datatype of + -- Coerce value to nullable String + -- if no datatype is set. + -- This happens for columns in views. + "" -> gqlValueToNullableString val + _ -> val + _ -> String "Error: Value could not be retrieved" + in + ( colEntry.column_name_gql + , ValueResolver fieldToResolve resolvedValue + ) + + +buildSortClause :: [ColumnEntry] -> [(Name, Value)] -> Text +buildSortClause columnEntries orderElems = + if P.null orderElems + then + if "rowid" `P.elem` (columnEntries <&> T.toLower . AirGQL.Lib.column_name) + then "ORDER BY rowid ASC" + else "" + else + "ORDER BY " + <> ( orderElems + <&> ( \(name, value) -> + ( name + , case value of + Enum "ASC" -> "ASC" + Enum "asc" -> "ASC" + Enum "DESC" -> "DESC" + Enum "desc" -> "DESC" + _ -> "" + ) + ) + <&> (\(name, order) -> name <> " " <> order) + & T.intercalate ", " + ) + + +data Pagination = Pagination + { limit :: Int + , offset :: Maybe Int + } + + +buildPaginationClause :: Maybe Pagination -> Text +buildPaginationClause = \case + Nothing -> "" + Just pagination -> + P.fold + [ "LIMIT " + , show (min pagination.limit maxGraphqlResultCount) + , case pagination.offset of + Nothing -> "" + Just offset -> "\nOFFSET " <> show offset + ] + + +getColNamesQuoted :: [ColumnEntry] -> [Text] +getColNamesQuoted columnEntries = + columnEntries + <&> ( \col -> + ( if "BLOB" `T.isPrefixOf` col.datatype + then + "IIF(" + <> quoteKeyword col.column_name + <> " IS NOT NULL, rowid, NULL)" + <> " AS " + <> quoteKeyword col.column_name + else quoteKeyword col.column_name + ) + ) + + +opAndValToSql :: HashMap.HashMap Text Value -> [Text] +opAndValToSql operatorAndValue = + case HashMap.toList operatorAndValue of + [("eq", value)] -> + pure $ + if value == Null + then " IS NULL" + else " == " <> gqlValueToSQLText value + [("neq", value)] -> + if value == Null + then pure " IS NOT NULL" + else + [ " != " <> gqlValueToSQLText value + , " IS NULL" + ] + [("in", List values)] -> + let listValues = values <&> gqlValueToSQLText & intercalate "," + in [" IN (" <> listValues <> ")"] + [("nin", List values)] -> + let listValues = values <&> gqlValueToSQLText & intercalate "," + in [" NOT IN (" <> listValues <> ")"] + <> if P.elem Null values + then [] + else [" IS NULL"] + [("gt", value)] -> [" > " <> gqlValueToSQLText value] + [("gte", value)] -> [" >= " <> gqlValueToSQLText value] + [("lt", value)] -> [" < " <> gqlValueToSQLText value] + [("lte", value)] -> [" <= " <> gqlValueToSQLText value] + [("like", value)] -> [" like " <> gqlValueToSQLText value] + [("ilike", value)] -> [" like " <> gqlValueToSQLText value] + filter -> do + throw $ + userError $ + "Error: Filter " + <> show filter + <> " is not yet supported" + + +getWhereClause :: [(Text, Value)] -> Text +getWhereClause filterElements = + if P.null filterElements + then " " + else + "WHERE " + <> ( filterElements + <&> ( \(colName, x) -> case x of + Object operatorAndValue -> + let orClauses = + opAndValToSql operatorAndValue + <&> (colName <>) + & intercalate " OR " + in "(" <> orClauses <> ")" + _ -> "" + ) + & intercalate " AND " + ) + + +setCaseInsensitive :: Connection -> [(Text, Value)] -> IO () +setCaseInsensitive connection filterElements = do + when + ( filterElements + & P.any + ( \(_, value) -> case value of + Object operatorAndValue -> + case HashMap.toList operatorAndValue of + [("ilike", _)] -> True + _ -> False + _ -> False + ) + ) + $ do + execute_ connection "PRAGMA case_sensitive_like = False" + + +executeSqlQuery + :: Connection + -> Text + -> [ColumnEntry] + -> [(Text, Value)] + -> [(Text, Value)] + -> Maybe Pagination + -> IO [[SQLData]] +executeSqlQuery + connection + tableName + colEntries + filterElems + orderElems + paginationMb = do + let + sqlQuery :: Query + sqlQuery = + Query $ + "SELECT " + <> intercalate ", " (getColNamesQuoted colEntries) + <> "\n" + <> "FROM " + <> quoteKeyword tableName + <> "\n" + <> getWhereClause filterElems + <> "\n" + <> buildSortClause colEntries orderElems + <> "\n" + <> buildPaginationClause paginationMb + + setCaseInsensitive connection filterElems + + liftIO $ query_ connection sqlQuery + + +colNamesWithFilterField :: Text -> [ColumnEntry] -> [(Text, InputField)] +colNamesWithFilterField tableName columnEntries = + columnEntries <&> \colEntry -> + let + inputField = + InputField + (Just $ "Filter for " <> colEntry.column_name_gql) + ( NamedInputObjectType $ + InputObjectType + (doubleXEncodeGql tableName <> "_filter") + (Just "Filter object for the column") + ( let theInputField = + InputField + (Just "Value to compare to") + ( In.NamedScalarType $ + typeNameToScalarType + colEntry.datatype_gql + ) + Nothing -- Default value + listInputField = + InputField + (Just "Values to compare to") + ( In.ListType $ + In.NamedScalarType $ + typeNameToScalarType + colEntry.datatype_gql + ) + Nothing -- Default value + in HashMap.fromList + [ ("eq", theInputField) + , ("neq", theInputField) + , ("gt", theInputField) + , ("gte", theInputField) + , ("lt", theInputField) + , ("lte", theInputField) + , ("like", theInputField) + , ("ilike", theInputField) + , ("in", listInputField) + , ("nin", listInputField) + ] + ) + ) + Nothing -- Default value + in + ( colEntry.column_name_gql + , inputField + ) + + +queryType + :: Connection + -> AccessMode + -> Text + -> [TableEntryRaw] + -> IO (Out.ObjectType IO) +queryType connection accessMode dbId tables = do + let + documentation :: Text + documentation = + "Available queries for database \"" <> dbId <> "\"" + + getOutField :: Text -> IO (Out.Field IO) + getOutField tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + let + colNamesWithOrderingTerm :: [(Text, InputField)] + colNamesWithOrderingTerm = + columnEntries <&> \colEntry -> + ( colEntry.column_name_gql + , InputField + (Just $ "Ordering term for " <> colEntry.column_name_gql) + ( In.NamedEnumType $ + EnumType + "OrderingTerm" + (Just "Ordering object for the column") + ( HashMap.fromList + [ ("ASC", EnumValue (Just "ASC")) + , ("asc", EnumValue (Just "ASC")) + , ("DESC", EnumValue (Just "DESC")) + , ("desc", EnumValue (Just "DESC")) + ] + ) + ) + Nothing -- Default value + ) + + typeNameField :: Text -> [(Text, Resolver IO)] + typeNameField nameOfTable = + let + typeNameOutField = + outFieldToField $ + OutField + { descriptionMb = Just $ "The type name of " <> nameOfTable + , fieldType = Out.NonNullScalarType string + , arguments = HashMap.empty + } + in + [ + ( "__typename" + , ValueResolver typeNameOutField $ + pure $ + String $ + doubleXEncodeGql nameOfTable <> "_row" + ) + ] + + pure $ + outFieldToField $ + OutField + { descriptionMb = Just $ "Provides entries from " <> tableName + , fieldType = + Out.ListType $ + Out.NamedObjectType $ + Out.ObjectType + tableName + (Just "short desc") + [] + ( HashMap.fromList $ + colNamesWithValResolver columnEntries + <> typeNameField tableName + ) + , arguments = + HashMap.fromList + [ + ( "filter" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "Filter objects" + , argType = + NamedInputObjectType $ + InputObjectType + (doubleXEncodeGql tableName <> "_filter") + ( Just + "Filter objects for the specified columns" + ) + (HashMap.fromList (colNamesWithFilterField tableName columnEntries)) + , valueMb = Nothing + } + ) + , + ( "order_by" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "Order by the specified columns" + , argType = + In.ListType $ + In.NamedInputObjectType $ + InputObjectType + (doubleXEncodeGql tableName <> "_order_by") + (Just "Options for ordering by columns") + (HashMap.fromList colNamesWithOrderingTerm) + , valueMb = Nothing + } + ) + , + ( "limit" + , inArgumentToArgument $ + InArgument + { argDescMb = + Just "Limit the number of returned rows." + , argType = In.NamedScalarType int + , valueMb = Nothing + } + ) + , + ( "offset" + , inArgumentToArgument $ + InArgument + { argDescMb = + Just + "Change the index rows \ + \start being returned from" + , argType = In.NamedScalarType int + , valueMb = Nothing + } + ) + ] + } + -- -- TODO: Use for retrieving record by primary key + -- , arguments = HashMap.fromList $ columnEntries + -- <&> (\colEntry -> + -- ( colEntry.column_name_gql :: Text + -- , inArgumentToArgument $ InArgument + -- { argDescMb = Just "Retrieve object by primary key" + -- , argType = In.NamedScalarType $ + -- typeNameToScalarType $ colEntry.datatype + -- , valueMb = Nothing + -- } + -- ) + -- ) + + getDbEntries :: Text -> Out.Resolve IO + getDbEntries tableName = do + context <- ask + colEntries <- liftIO $ getColumns dbId connection tableName + + rows :: [[SQLData]] <- case context.arguments of + Arguments args -> do + filterElements <- case args & HashMap.lookup "filter" of + Nothing -> pure [] + Just colToFilter -> case colToFilter of + Object filterObj -> case HashMap.toList filterObj of + [] -> P.throwIO $ userError "Error: Filter must not be empty" + filterElements -> pure filterElements + _ -> pure [] + + orderElements :: [(Name, Value)] <- + case args & HashMap.lookup "order_by" of + Nothing -> pure [] + Just colToOrder -> case colToOrder of + List objects -> + -- => [Value] + objects + -- => IO [[(Name, Value)]] + & P.traverse + ( \case + Object orderObject -> case HashMap.toList orderObject of + [] -> P.throwIO $ userError "Error: Order must not be empty" + orderElements -> pure orderElements + _ -> pure [] -- Should not be reachable + ) + -- => IO [(Name, Value)] + <&> P.join + _ -> pure [] + + limitElements :: Maybe P.Int32 <- + case args & HashMap.lookup "limit" of + Just (Int limit) + | limit >= 0 -> + pure (Just limit) + | otherwise -> + P.throwIO $ + userError + "Error: limit must be positive" + _ -> pure Nothing + + paginationMb :: Maybe Pagination <- + case (limitElements, args & HashMap.lookup "offset") of + (Just limit, Just (Int offset)) + | offset >= 0 -> + pure $ + Just $ + Pagination + (fromIntegral limit) + (Just $ fromIntegral offset) + | otherwise -> + P.throwIO $ userError "Error: offset must be positive" + (Just limit, _) -> + pure $ + Just $ + Pagination + (fromIntegral limit) + Nothing + (Nothing, Just (Int _)) -> + P.throwIO $ + userError + "Error: cannot specify offset \ + \without also specifying a limit" + _ -> pure Nothing + + let + countQuery :: Query + countQuery = + Query $ + P.fold + [ "SELECT COUNT() FROM" + , quoteKeyword tableName + , "\n" + , getWhereClause filterElements + ] + + -- Will be equal `Just numRows` when the number of + -- returned rows is too large. + tooManyReturnedRows :: Maybe Int <- case paginationMb of + -- Limit doesn't seem to affect COUNT(), + -- so we consider it manually. + Just pagination + | pagination.limit <= maxGraphqlResultCount -> + pure Nothing + _ -> do + results <- liftIO $ SS.query_ connection countQuery + + let numRows = case P.head results of + Just numRowsOnly -> SS.fromOnly numRowsOnly + Nothing -> 0 + + pure $ + if numRows > maxGraphqlResultCount + then Just numRows + else Nothing + + P.for_ tooManyReturnedRows $ \numRows -> do + P.throwIO $ + userError $ + P.fold + [ "The graphql API cannot return more than " + , show maxGraphqlResultCount + , " entries at a time. Your query would have returned " + , show numRows + , " rows. " + , "Consider setting the `limit` argument on your query: `{ " + , T.unpack tableName + , " (limit: 50) { ... } }`" + ] + + liftIO $ + executeSqlQuery + connection + tableName + colEntries + filterElements + orderElements + paginationMb + + rowsToList dbId tableName colEntries rows + + getResolvers :: IO (HashMap.HashMap Text (Resolver IO)) + getResolvers = do + let + getTableTuple :: TableEntryRaw -> IO (Text, Resolver IO) + getTableTuple table = do + outField <- getOutField table.name + pure + ( doubleXEncodeGql table.name + , ValueResolver + outField + ( -- Exceptions must be converted to ResolverExceptions + -- to be picked up by GQL query executor + catchAll + (getDbEntries table.name) + (throw . ResolverException) + ) + ) + + getTableTuples :: IO [(Text, Resolver IO)] + getTableTuples = + P.for tables getTableTuple + + getTableTuples <&> HashMap.fromList + + -- -- TODO: Add support for retriving record by ID + -- getResolversPrimaryKey :: IO (HashMap.HashMap Text (Resolver IO)) + -- getResolversPrimaryKey = do + -- let + -- getTableTuple table = do + -- outField <- getOutField $ table.name + -- pure + -- ( table.name) <> "_by_pk" + -- , ValueResolver + -- outField + -- (getDbEntries $ table.name) + -- ) + + -- getTableTuples :: IO [(Text, Resolver IO)] + -- getTableTuples = + -- sequence $ tables <&> getTableTuple + + -- getTableTuples <&> HashMap.fromList + + resolvers <- getResolvers + schemaResolver <- getSchemaResolver dbId connection accessMode tables + + -- resolversPrimaryKey <- getResolversPrimaryKey + let + -- Resolve = ReaderT Context m Value + wrapResolve resolve = do + when (accessMode == WriteOnly) $ do + throw $ + ResolverException $ + userError "Cannot read field using writeonly access code" + resolve + + protectResolver = \case + ValueResolver field resolve -> + ValueResolver field (wrapResolve resolve) + EventStreamResolver field resolve subscribe -> + EventStreamResolver field (wrapResolve resolve) subscribe + + pure $ + outObjectTypeToObjectType $ + OutObjectType + { name = "Query" + , descriptionMb = Just documentation + , interfaceTypes = [] + , fields = + P.fold + [ schemaResolver + , typeNameResolver + , resolvers + -- , resolversPrimaryKey) + ] + <&> protectResolver + } + + +-- | WARNING: Also change duplicate `sqlDataToAesonValue` +sqlDataToGQLValue :: Text -> SQLData -> Either Text Value +sqlDataToGQLValue datatype sqlData = case (datatype, sqlData) of + (_, SQLInteger int64) -> + if isInfixOf "BOOL" $ toUpper datatype + then pure $ case int64 of + 0 -> Boolean False + _ -> Boolean True + else + if int64 >= fromIntegral (P.minBound :: P.Int32) + && int64 <= fromIntegral (P.maxBound :: P.Int32) + then pure $ Int $ fromIntegral int64 -- Int32 + else + Left $ + "Integer " + <> show int64 + <> " would overflow. " + <> "This happens because SQLite uses 64-bit ints, " + <> "but GraphQL uses 32-bit ints. " + <> "Use a Number (64-bit float) or Text column instead." + (_, SQLFloat double) -> pure $ Float double + (_, SQLText text) -> pure $ String text + (_, SQLBlob byteString) -> pure $ String $ show byteString + (_, SQLNull) -> pure Null + + +{-| Convert a GraphQL `Value` to a `SQLData` +TODO: ? -> SQLBlob $ string +-} +gqlValueToSQLData :: Value -> SQLData +gqlValueToSQLData = \case + Int int32 -> SQLInteger $ fromIntegral int32 -- Int64 + Float double -> SQLFloat double + String text -> SQLText text + Null -> SQLNull + Boolean aBool -> + if aBool + then SQLInteger 1 + else SQLInteger 0 + Enum name -> SQLText name + List aList -> SQLText $ show aList + Object obj -> SQLText $ show obj + + +mutationTypeNameField :: Text -> (Text, Resolver IO) +mutationTypeNameField nameOfTable = + let + typeNameOutField = + outFieldToField $ + OutField + { descriptionMb = Just $ "The type name of " <> nameOfTable + , fieldType = Out.NonNullScalarType string + , arguments = HashMap.empty + } + in + ( "__typename" + , ValueResolver typeNameOutField $ + pure $ + String $ + doubleXEncodeGql nameOfTable <> "_mutation_response" + ) + + +getMutationResponse + :: Text + -> [ColumnEntry] + -> Out.Type IO +getMutationResponse tableName columnEntries = + Out.NamedObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = doubleXEncodeGql tableName <> "_mutation_response" + , descriptionMb = + Just $ + tableName <> " mutation response description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "affected_rows" + , let + field :: Out.Field m + field = + outFieldToField $ + OutField + { descriptionMb = Just "nonNullInt description" + , fieldType = Out.NonNullScalarType int + , arguments = HashMap.empty + } + + value :: ReaderT Out.Context IO Value + value = do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (Int 0) $ + HashMap.lookup "affected_rows" obj + _ -> pure $ Int 0 + in + ValueResolver field value + ) + , + ( "returning" + , let + field :: Out.Field IO + field = + outFieldToField $ + OutField + { descriptionMb = + Just + "Non null returning description" + , fieldType = + Out.NonNullListType $ + Out.NamedObjectType $ + Out.ObjectType + "returning" + (Just "short desc") + [] + ( HashMap.fromList $ + colNamesWithValResolver columnEntries + ) + , arguments = HashMap.empty + } + + value :: ReaderT Out.Context IO Value + value = do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (Object P.mempty) $ + HashMap.lookup "returning" obj + _ -> pure $ Object P.mempty + in + ValueResolver field value + ) + , mutationTypeNameField tableName + ] + } + + +rowsToList :: (MonadIO m) => Text -> Text -> [ColumnEntry] -> [[SQLData]] -> m Value +rowsToList dbId tableName columnEntries updatedRows = + let + buildMetadataJson :: Text -> Text -> Text + buildMetadataJson colName rowid = + object ["url" .= colToFileUrl dbId tableName colName rowid] + & encodeToText + + parseSqlData :: (ColumnEntry, SQLData) -> Either (Text, Text) (Text, Value) + parseSqlData (colEntry, colVal) = + if "BLOB" `T.isPrefixOf` colEntry.datatype + then + pure + ( colEntry.column_name_gql + , case colVal of + SQLNull -> Null + SQLInteger id -> + String $ + buildMetadataJson colEntry.column_name (show id) + SQLText id -> + String $ + buildMetadataJson colEntry.column_name id + _ -> Null + ) + else case sqlDataToGQLValue colEntry.datatype colVal of + Left err -> + Left + (colEntry.column_name_gql, err) + Right gqlData -> + Right + (colEntry.column_name_gql, gqlData) + in + updatedRows + <&> ( \row -> + -- => [(ColumnEntry, SQLData)] + P.zip columnEntries row + -- => [Either (Text, Text) (Text, Value)] + <&> parseSqlData + -- => Either [(Text, Text)] (Text, Value) + & collectErrorList + -- => Either [(Text, Text)] (HashMap Text Value) + <&> HashMap.fromList + -- => Either [(Text, Text)] Value + <&> Object + ) + -- => Either [[(Text, Text)]] [Value] + & collectErrorList + & \case + Right values -> pure $ List values + Left errors -> + let + errorLines = + P.join errors + <&> \(column, err) -> "On column " <> show column <> ": " <> err + in + P.throwIO $ + userError $ + T.unpack $ + "Multiple errors occurred:\n" <> P.unlines errorLines + + +executeSqlMutation + :: Connection + -> Text + -> HashMap.HashMap Text Value + -> [ColumnEntry] + -> [(Text, Value)] + -> IO (Int, [[SQLData]]) +executeSqlMutation connection tableName args columnEntries filterElements = do + let + colNamesToUpdateRaw :: [Text] + colNamesToUpdateRaw = + case HashMap.lookup "set" args of + Just (Object dataObj) -> HashMap.keys dataObj + _ -> [] + + colNamesToUpdate :: [Text] + colNamesToUpdate = + columnEntries + <&> column_name + <&> ( \columnName -> + if doubleXEncodeGql columnName `P.elem` colNamesToUpdateRaw + then Just columnName + else Nothing + ) + & P.catMaybes + + columnNamesText :: Text + columnNamesText = + columnEntries + <&> column_name + <&> quoteKeyword + & intercalate ", " + + setText :: Text + setText = + colNamesToUpdate + <&> (\columnName -> quoteKeyword columnName <> " = ?") + & intercalate ", " + + valuesToSet :: [SQLData] + valuesToSet = + case HashMap.lookup "set" args of + Just (Object dataObj) -> + columnEntries + <&> column_name + <&> ( \columnName -> + HashMap.lookup + (doubleXEncodeGql columnName) + dataObj + ) + & P.catMaybes + <&> gqlValueToSQLData + _ -> [] + + updatedRows :: [[SQLData]] <- + if setText == "" + then pure [] + else + let + sqlQuery = + Query $ + "UPDATE " + <> quoteKeyword tableName + <> "\n" + <> "SET " + <> setText + <> "\n" + <> getWhereClause filterElements + <> "\n" + <> "RETURNING " + <> columnNamesText + + colTypesToUpdate :: [Text] + colTypesToUpdate = + columnEntries + <&> ( \colEntry -> + if doubleXEncodeGql colEntry.column_name + `P.elem` colNamesToUpdateRaw + then Just colEntry.datatype + else Nothing + ) + & P.catMaybes + + valuesToSetNorm = + P.zip valuesToSet colTypesToUpdate + <&> \(val, datatype) -> + if (val == SQLText "{}") + P.&& ("BLOB" `T.isPrefixOf` T.toUpper datatype) + then SQLBlob "" + else val + in + catchAll + ( liftIO $ do + setCaseInsensitive connection filterElements + query connection sqlQuery valuesToSetNorm + ) + (throw . ResolverException) + + liftIO $ + changes connection + & P.fmap (,updatedRows) + + +mutationType + :: Connection + -> Integer + -> Text + -> [TableEntryRaw] + -> IO (Maybe (Out.ObjectType IO)) +mutationType connection maxRowsPerTable dbId tables = do + let + documentation = + "Available queries for database \"" <> dbId <> "\"" + + getTableFilterType :: Text -> [ColumnEntry] -> InputObjectType + getTableFilterType tableName columnEntries = do + InputObjectType + (doubleXEncodeGql tableName <> "_filter") + ( Just + "Filter objects for the specified columns" + ) + (HashMap.fromList (colNamesWithFilterField tableName columnEntries)) + + getOutField :: Text -> IO (Out.Field IO) + getOutField tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + let + colNamesWithField :: [(Text, InputField)] + colNamesWithField = + columnEntries <&> \colEntry -> + let + inputField = + InputField + (Just colEntry.column_name_gql) + ( In.NamedScalarType $ + typeNameToScalarType colEntry.datatype_gql + ) + Nothing -- Default value + in + ( colEntry.column_name_gql + , inputField + ) + + let + objectsType = + inArgumentToArgument $ + InArgument + { argDescMb = + Just + "Objects to be inserted into the database" + , argType = + In.ListType $ + NamedInputObjectType $ + InputObjectType + ( doubleXEncodeGql tableName + <> "_insert_input" + ) + ( Just + "Object to be inserted into the database" + ) + (HashMap.fromList colNamesWithField) + , valueMb = Nothing + } + + onConflictDescription = + "Specifies how to handle brtoken unique constraints" :: Text + + columnEnumVariants = + columnEntries + <&> \entry -> + (entry.column_name_gql, EnumValue Nothing) + + columnEnumType = + EnumType + (doubleXEncodeGql tableName <> "_column") + (Just "This enum contains a variant for each colum in the table") + (HashMap.fromList columnEnumVariants) + + onConflictType = + inArgumentToArgument $ + InArgument + { argDescMb = Just onConflictDescription + , argType = + In.ListType + $ In.NonNullInputObjectType + $ InputObjectType + ( doubleXEncodeGql tableName + <> "_upsert_on_conflict" + ) + (Just onConflictDescription) + $ HashMap.fromList + [ + ( "constraint" + , InputField + (Just "columns to handle conflicts of") + ( In.NonNullListType $ + In.NonNullEnumType columnEnumType + ) + Nothing + ) + , + ( "update_columns" + , InputField + (Just "columns to override on conflict") + ( In.NonNullListType $ + In.NonNullEnumType columnEnumType + ) + Nothing + ) + , + ( "where" + , InputField + (Just "filter specifying which conflicting columns to update") + ( In.NamedInputObjectType $ + getTableFilterType tableName columnEntries + ) + Nothing + ) + ] + , valueMb = Nothing + } + + pure $ + outFieldToField $ + OutField + { descriptionMb = Just "description" + , fieldType = getMutationResponse tableName columnEntries + , arguments = + HashMap.fromList + [ ("objects", objectsType) + , ("on_conflict", onConflictType) + ] + } + + getColValue :: HashMap.HashMap Text Value -> Text -> Value + getColValue rowObj columnName = + HashMap.findWithDefault Null (doubleXEncodeGql columnName) rowObj + + executeDbInserts :: Text -> ReaderT Out.Context IO Value + executeDbInserts tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + context <- ask + + let + columnNames :: [Text] + columnNames = + columnEntries <&> column_name + + columnNamesText :: Text + columnNamesText = + columnNames + <&> quoteKeyword + & intercalate ", " + + insertInDb :: Arguments -> ReaderT Out.Context IO (Int, [[SQLData]]) + insertInDb (Arguments argMap) = do + let + -- Yields for example: + -- [ { name: "John", email: "john@example.com" } + -- , { name: "Eve", email: "eve@example.com" } + -- ] + entries = + HashMap.findWithDefault + (List []) + "objects" + argMap + + -- All colums that are contained in the entries + containedColumns :: [Text] + containedColumns = + case entries of + List values -> + ( values + <&> \case + Object rowObj -> + HashMap.keys rowObj + _ -> [] + ) + & P.concat + & nub + <&> doubleXDecode + _ -> [] + + boundVariableNames :: [Text] + boundVariableNames = + containedColumns + <&> (\name -> ":" <> doubleXEncodeGql name) + + onConflictArg = + case HashMap.lookup "on_conflict" argMap of + Just (List values) -> values + _ -> [] + + onConflictClauses <- P.for onConflictArg $ \case + Object fields -> do + let + getColumnList fieldName = do + case HashMap.lookup fieldName fields of + Just (List elements) -> do + element <- elements + case element of + Enum columnName -> pure columnName + _ -> [] + _ -> [] + + constraint = getColumnList "constraint" + update = getColumnList "update_columns" + + updateClauses <- P.for update $ \column -> do + when (column `notElem` containedColumns) $ do + P.throwIO $ + userError $ + "Column " + <> T.unpack column + <> " cannot be set on conflicts without being explicitly provided" + + pure $ + quoteKeyword column + <> " = :" + <> doubleXEncodeGql column + + let + filterElements = case HashMap.lookup "where" fields of + Just (Object filterObj) -> HashMap.toList filterObj + _ -> [] + + pure $ + "ON CONFLICT (" + <> ( constraint + <&> quoteKeyword + & intercalate "<>" + ) + <> ")\n DO UPDATE SET \n" + <> intercalate ",\n" updateClauses + <> "\n" + <> getWhereClause filterElements + _ -> pure "" + + let + columnList = + if P.null containedColumns + then "" + else + " (" + <> ( containedColumns + <&> quoteKeyword + & intercalate ", " + ) + <> ")" + insertedValues = + if P.null boundVariableNames + then "DEFAULT VALUES" + else + "VALUES (" + <> intercalate ", " boundVariableNames + <> ")" + sqlQuery = + Query $ + "INSERT INTO " + <> quoteKeyword tableName + <> columnList + <> insertedValues + <> "\n" + <> P.unlines onConflictClauses + <> "RETURNING " + <> + -- TODO: Only return the actually requested values + columnNamesText + + sqlDataRows :: [[SQLData]] + sqlDataRows = + case entries of + List values -> + values <&> \case + Object rowObj -> + containedColumns + <&> getColValue rowObj + <&> gqlValueToSQLData + _ -> [] + _ -> [] + + -- Exception from SQLite must be converted into + -- ResolverExceptions to be picked up by GQL query executor + returnedRows <- + catchAll + ( liftIO $ P.forM sqlDataRows $ \sqlDataRow -> do + numRowsRes :: [[Integer]] <- + query_ + connection + $ Query + $ "SELECT COUNT() FROM " + <> quoteKeyword tableName + + case numRowsRes of + [[numRows]] -> do + when (numRows >= maxRowsPerTable) $ + P.throwIO $ + userError $ + "Please upgrade to a Pro account \ + \to insert more than " + <> show maxRowsPerTable + <> " rows into a table" + _ -> pure () + + SS.queryNamed connection sqlQuery $ + P.zipWith (SS.:=) boundVariableNames sqlDataRow + ) + (throw . ResolverException) + + -- FIXME: + -- This should probably be used, but sqlite-simple + -- doesn't use only one query to execute the insert + -- https://github.com/nurpax/sqlite-simple/issues/82 + -- liftIO $ changes connection + pure (P.length sqlDataRows, returnedRows & P.concat) + + (numOfChanges, returnedRows) <- insertInDb context.arguments + returning <- rowsToList dbId tableName columnEntries returnedRows + + pure $ + Object $ + HashMap.fromList + [ ("affected_rows", Int $ fromIntegral numOfChanges) + , ("returning", returning) + ] + + -- Execute SQL query to update selected entries + executeDbUpdates :: Text -> ReaderT Out.Context IO Value + executeDbUpdates tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + context <- ask + + let Arguments args = context.arguments + + (numOfChanges, updatedRows) <- case HashMap.lookup "filter" args of + Just (Object filterObj) -> case HashMap.toList filterObj of + [] -> P.throwIO $ userError "Error: Filter must not be empty" + filterElements -> + liftIO $ + executeSqlMutation + connection + tableName + args + columnEntries + filterElements + _ -> pure (0, []) + + returning <- rowsToList dbId tableName columnEntries updatedRows + + pure $ + Object $ + HashMap.fromList + [ ("affected_rows", Int $ fromIntegral (numOfChanges :: Int)) + , ("returning", returning) + ] + + -- Execute SQL query to delete selected entries + executeDbDeletions :: Text -> ReaderT Out.Context IO Value + executeDbDeletions tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + context <- ask + + let + columnNamesText :: Text + columnNamesText = + columnEntries + <&> column_name + <&> quoteKeyword + & intercalate ", " + + deleteEntry columnName value = do + let sqlQuery = + Query $ + "DELETE FROM " + <> quoteKeyword tableName + <> " \ + \WHERE " + <> quoteKeyword columnName + <> " = ?\n" + <> "RETURNING " + <> columnNamesText + deletedRows :: [[SQLData]] <- + catchAll + (liftIO $ query connection sqlQuery [value]) + (throw . ResolverException) + numChanges <- liftIO $ changes connection + + pure (numChanges, deletedRows) + + (numOfChanges, deletedRows) <- case context.arguments of + Arguments args -> case HashMap.lookup "filter" args of + Just colToFilter -> case colToFilter of + Object filterObj -> case HashMap.toList filterObj of + [(columnName, Object operatorAndValue)] -> do + case HashMap.toList operatorAndValue of + [("eq", String value)] -> + deleteEntry columnName value + [("eq", Int value)] -> + deleteEntry columnName $ show value + _ -> pure (0, []) + _ -> pure (0, []) + _ -> pure (0, []) + Nothing -> pure (0, []) + + returning <- rowsToList dbId tableName columnEntries deletedRows + + pure $ + Object $ + HashMap.fromList + [ ("affected_rows", Int $ fromIntegral numOfChanges) + , ("returning", returning) + ] + + getOutFieldUpdate :: Text -> IO (Out.Field IO) + getOutFieldUpdate tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + let + colNamesWithField :: [(Text, InputField)] + colNamesWithField = + columnEntries <&> \colEntry -> + let + inputField = + InputField + (Just colEntry.column_name_gql) + ( In.NamedScalarType $ + typeNameToScalarType colEntry.datatype_gql + ) + Nothing -- Default value + in + ( colEntry.column_name_gql + , inputField + ) + + pure $ + outFieldToField $ + OutField + { descriptionMb = Just $ "Provides entries from " <> tableName + , fieldType = getMutationResponse tableName columnEntries + , arguments = + HashMap.fromList + [ + ( "filter" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "Filter objects" + , argType = + NamedInputObjectType $ + getTableFilterType tableName columnEntries + , valueMb = Nothing + } + ) + , + ( "set" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "Map with new values" + , argType = + NamedInputObjectType $ + InputObjectType + (doubleXEncodeGql tableName <> "_set_input") + (Just "New values for the specified columns") + (HashMap.fromList colNamesWithField) + , valueMb = Nothing + } + ) + ] + } + + getOutFieldDeletion :: Text -> IO (Out.Field IO) + getOutFieldDeletion tableName = do + columnEntries <- liftIO $ getColumns dbId connection tableName + + pure $ + outFieldToField $ + OutField + { descriptionMb = Just $ "Provides entries from " <> tableName + , fieldType = getMutationResponse tableName columnEntries + , arguments = + HashMap.fromList + [ + ( "filter" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "Filter objects" + , argType = + NamedInputObjectType $ + InputObjectType + (doubleXEncodeGql tableName <> "_filter") + ( Just + "Filter objects for the specified columns" + ) + (HashMap.fromList (colNamesWithFilterField tableName columnEntries)) + , valueMb = Nothing + } + ) + ] + } + -- -- TODO: Use for retrieving record by primary key + -- , arguments = HashMap.fromList $ columnEntries + -- <&> (\colEntry -> + -- ( colEntry & column_name_gql :: Text + -- , inArgumentToArgument $ InArgument + -- { argDescMb = Just "Retrieve object by primary key" + -- , argType = In.NamedScalarType $ + -- typeNameToScalarType $ colEntry & datatype + -- , valueMb = Nothing + -- } + -- ) + -- ) + + getMutationResolvers :: IO (HashMap.HashMap Text (Resolver IO)) + getMutationResolvers = do + let + getInsertTableTuple :: TableEntryRaw -> IO (Text, Resolver IO) + getInsertTableTuple table = do + outFieldInsertion <- getOutField table.name + pure + ( "insert_" <> doubleXEncodeGql table.name + , ValueResolver + outFieldInsertion + (executeDbInserts table.name) + ) + + getUpdateTableTuple :: TableEntryRaw -> IO (Text, Resolver IO) + getUpdateTableTuple table = do + outFieldUpdate <- getOutFieldUpdate table.name + pure + ( "update_" <> doubleXEncodeGql table.name + , ValueResolver + outFieldUpdate + (executeDbUpdates table.name) + ) + + getDeleteTableTuple :: TableEntryRaw -> IO (Text, Resolver IO) + getDeleteTableTuple table = do + outFieldDeletion <- getOutFieldDeletion table.name + pure + ( "delete_" <> doubleXEncodeGql table.name + , ValueResolver + outFieldDeletion + (executeDbDeletions table.name) + ) + + getTableTuples :: IO [(Text, Resolver IO)] + getTableTuples = + sequence $ + (tables <&> getInsertTableTuple) + <> (tables <&> getUpdateTableTuple) + <> (tables <&> getDeleteTableTuple) + + getTableTuples <&> HashMap.fromList + + Just + . Out.ObjectType + "Mutation" + (Just documentation) + [] + <$> getMutationResolvers + + +-- | Automatically generated schema derived from the SQLite database +getDerivedSchema + :: SchemaConf + -> Connection + -> Text + -> [TableEntryRaw] + -> IO (Schema IO) +getDerivedSchema schemaConf connection dbId tables = do + sqlitePragmas <- getSQLitePragmas schemaConf.pragmaConf + P.forM_ sqlitePragmas (execute_ connection) + + queries <- queryType connection schemaConf.accessMode dbId tables + mutations <- + mutationType + connection + schemaConf.maxRowsPerTable + dbId + tables + + pure $ + schema + queries + ( case schemaConf.accessMode of + ReadOnly -> Nothing + WriteOnly -> mutations + ReadAndWrite -> mutations + ) + Nothing -- subscriptions + mempty diff --git a/source/AirGQL/Introspection.hs b/source/AirGQL/Introspection.hs new file mode 100644 index 0000000..e7a393a --- /dev/null +++ b/source/AirGQL/Introspection.hs @@ -0,0 +1,2540 @@ +module AirGQL.Introspection ( + getSchemaResolver, + typeNameResolver, + createType, +) +where + +import Protolude ( + Applicative (pure), + Bool (False, True), + Eq ((/=)), + Foldable (null), + IO, + Int, + IsString, + Maybe (Just, Nothing), + MonadReader (ask), + Monoid (mempty), + Num ((+)), + Ord ((<)), + Semigroup ((<>)), + Text, + concat, + filter, + forM, + fromMaybe, + not, + ($), + (&), + (<&>), + (>>=), + ) + +import Data.HashMap.Strict as HashMap ( + HashMap, + empty, + fromList, + lookup, + singleton, + ) +import Database.SQLite.Simple (Connection) +import Language.GraphQL.Type ( + Value (Boolean, List, Null, Object, String), + boolean, + string, + ) +import Language.GraphQL.Type.In as In (Type (NamedScalarType)) +import Language.GraphQL.Type.Out as Out ( + Context (values), + Field (Field), + Resolver (ValueResolver), + Type ( + ListType, + NamedObjectType, + NamedScalarType, + NonNullListType, + NonNullObjectType, + NonNullScalarType + ), + ) + +import AirGQL.GQLWrapper ( + InArgument (InArgument, argDescMb, argType, valueMb), + OutField (OutField, arguments, descriptionMb, fieldType), + inArgumentToArgument, + outFieldToField, + ) +import AirGQL.Lib ( + AccessMode (ReadAndWrite, ReadOnly, WriteOnly), + ColumnEntry, + GqlTypeName (full), + TableEntryRaw (name), + column_name_gql, + datatype_gql, + getColumns, + notnull, + select_options, + ) +import AirGQL.Types.OutObjectType ( + OutObjectType (OutObjectType, descriptionMb, fields, interfaceTypes, name), + outObjectTypeToObjectType, + ) +import DoubleXEncoding (doubleXEncodeGql) + + +emptyType :: Value +emptyType = + Object $ HashMap.singleton "kind" "OBJECT" + + +intType :: Value +intType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "Int") + , + ( "description" + , "The `Int` scalar type represents \ + \non-fractional signed whole numeric values. \ + \Int can represent values between -(2^31) and 2^31 - 1." + ) + ] + + +floatType :: Value +floatType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "Float") + , + ( "description" + , "Signed double-precision floating-point value." + ) + ] + + +stringType :: Value +stringType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "String") + , + ( "description" + , "The `String` scalar type represents textual data, \ + \represented as UTF-8 character sequences. \ + \The String type is most often used by GraphQL \ + \to represent free-form human-readable text." + ) + ] + + +booleanType :: Value +booleanType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "Boolean") + , + ( "description" + , "The `Boolean` scalar type represents `true` or `false`." + ) + ] + + +nonNullString :: Out.Field IO +nonNullString = + outFieldToField $ + OutField + { descriptionMb = Just "nonNullString description" + , fieldType = Out.NonNullScalarType string + , arguments = HashMap.empty + } + + +nullableString :: Out.Field IO +nullableString = + Out.Field + (Just "nullableString") + (Out.NamedScalarType string) + HashMap.empty + + +nonNullBoolean :: Out.Field IO +nonNullBoolean = + outFieldToField $ + OutField + { descriptionMb = Just "nonNullBoolean description" + , fieldType = Out.NonNullScalarType boolean + , arguments = HashMap.empty + } + + +getTypeTuple :: (IsString a) => Value -> Value -> (a, Value) +getTypeTuple theKind theType = + ( "type" + , Object $ + HashMap.fromList + [ ("kind", theKind) + , ("name", theType) + ] + ) + + +nonNullType :: Value -> Value +nonNullType inner = + Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , ("ofType", inner) + ] + + +listType :: Value -> Value +listType inner = + Object $ + HashMap.fromList + [ ("kind", "LIST") + , ("ofType", inner) + ] + + +createType :: Text -> Text -> [Value] -> [Text] -> Text -> Value +createType rootName description args nestedTypes name = + let + createChildType :: [Text] -> Text -> Value + createChildType nestedChildTypes childName = + case nestedChildTypes of + [] -> Null + (childHeadKind : childRestKinds) -> + if not $ null childRestKinds + then + Object $ + HashMap.fromList + [ ("kind", String childHeadKind) + , ("ofType", createChildType childRestKinds childName) + ] + else + Object $ + HashMap.fromList + [ ("kind", String childHeadKind) + , ("name", String name) + ] + in + case nestedTypes of + [] -> Null + kinds -> + Object $ + HashMap.fromList + ( [ ("name", String rootName) + , ("description", String description) + , ("type", createChildType kinds name) + ] + <> if null args then [] else [("args", List args)] + ) + + +createField :: Text -> Maybe Text -> Value -> Value +createField name descriptionMb type_ = + Object $ + HashMap.fromList + [ ("name", String name) + , ("type", type_) + ] + <> case descriptionMb of + Nothing -> mempty + Just description -> + HashMap.singleton + "description" + (String description) + + +nameField :: Value +nameField = + Object $ + HashMap.fromList + [ ("name", "name") + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", "String") + ] + ) + ] + + +descriptionField :: Value +descriptionField = + Object $ + HashMap.fromList + [ ("name", "description") + , getTypeTuple "SCALAR" "String" + ] + + +argsFieldValue :: Value +argsFieldValue = + Object $ + HashMap.fromList + [ ("name", "args") + , + ( "type" + , nonNullType $ + listType $ + nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__InputValue") + ] + ) + ] + + +locationsFieldValue :: Value +locationsFieldValue = + Object $ + HashMap.fromList + [ ("name", "locations") + , + ( "type" + , nonNullType $ + listType $ + nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "ENUM") + , ("name", "__DirectiveLocation") + ] + ) + ] + + +typeFieldValue :: Value +typeFieldValue = + Object $ + HashMap.fromList + [ ("name", "type") + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Type") + ] + ) + ] + + +isDeprecatedFieldValue :: Value +isDeprecatedFieldValue = + Object $ + HashMap.fromList + [ ("name", "isDeprecated") + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", "Boolean") + ] + ) + ] + + +typeType :: Int -> Out.Type IO +typeType level = + Out.NamedObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__Type" + , descriptionMb = Just "__Type description" + , interfaceTypes = [] + , fields = + HashMap.fromList $ + [ + ( "__typename" + , ValueResolver nonNullString $ pure "__Type" + ) + , + ( "kind" + , ValueResolver nonNullString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe "ERROR: kind" $ + HashMap.lookup "kind" obj + _ -> pure "ERROR: kind" + ) + , + ( "name" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "name" obj + _ -> pure Null + ) + , + ( "description" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "description" obj + _ -> pure Null + ) + , + ( "fields" + , ValueResolver fieldsField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "fields" obj + _ -> pure Null + ) + , + ( "possibleTypes" + , ValueResolver typesField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "possibleTypes" obj + _ -> pure Null + ) + , + ( "interfaces" + , ValueResolver typesField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (List []) $ + HashMap.lookup "interfaces" obj + _ -> pure $ List [] + ) + , + ( "inputFields" + , ValueResolver inputsFieldsField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "inputFields" obj + _ -> pure Null + ) + , + ( "enumValues" + , ValueResolver enumValuesField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "enumValues" obj + _ -> pure Null + ) + ] + <> ( if level < 7 + then + [ + ( "ofType" + , ValueResolver (typeField $ level + 1) $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "ofType" obj + _ -> pure Null + ) + ] + else [] + ) + } + + +typeField :: Int -> Field IO +typeField level = + outFieldToField $ + OutField + { descriptionMb = Just "typeField description" + , fieldType = typeType level + , arguments = HashMap.empty + } + + +typesField :: Field IO +typesField = + outFieldToField $ + OutField + { descriptionMb = Just "typesField description" + , fieldType = Out.ListType $ typeType 0 + , arguments = HashMap.empty + } + + +inputValueType :: Out.Type IO +inputValueType = + Out.NonNullObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__InputValue" + , descriptionMb = Just "__InputValue description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "__typename" + , ValueResolver nonNullString $ pure "__InputValue" + ) + , + ( "name" + , ValueResolver nonNullString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe "ERROR: name" $ + HashMap.lookup "name" obj + _ -> pure "ERROR: name" + ) + , + ( "description" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "description" obj + _ -> pure Null + ) + , + ( "defaultValue" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "defaultValue" obj + _ -> pure Null + ) + , + ( "type" + , ValueResolver (typeField 0) $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe emptyType $ + HashMap.lookup "type" obj + _ -> pure emptyType + ) + ] + } + + +argsField :: Field IO +argsField = + outFieldToField $ + OutField + { descriptionMb = Just "argsField description" + , fieldType = Out.NonNullListType inputValueType + , arguments = HashMap.empty + } + + +inputsFieldsField :: Field IO +inputsFieldsField = + outFieldToField $ + OutField + { descriptionMb = Just "inputsFieldsField description" + , fieldType = Out.ListType inputValueType + , arguments = HashMap.empty + } + + +enumValuesType :: Out.Type IO +enumValuesType = + Out.ListType $ + Out.NonNullObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__EnumValue" + , descriptionMb = Just "__EnumValue description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "__typename" + , ValueResolver nonNullString $ pure "__EnumValue" + ) + , + ( "name" + , ValueResolver nonNullString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe "ERROR: name" $ + HashMap.lookup "name" obj + _ -> pure "ERROR: name" + ) + , + ( "description" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "description" obj + _ -> pure Null + ) + , + ( "isDeprecated" + , ValueResolver nonNullBoolean $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (Boolean False) $ + HashMap.lookup "isDeprecated" obj + _ -> pure $ Boolean False + ) + , + ( "deprecationReason" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "deprecationReason" obj + _ -> pure Null + ) + ] + } + + +enumValuesField :: Field IO +enumValuesField = + outFieldToField $ + OutField + { descriptionMb = Just "enumValuesField description" + , fieldType = enumValuesType + , arguments = + HashMap.fromList + [ + ( "includeDeprecated" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "includeDeprecated description" + , argType = In.NamedScalarType boolean + , valueMb = Just $ Boolean True + } + ) + ] + } + + +queryTypeType :: Field IO +queryTypeType = + outFieldToField $ + OutField + { descriptionMb = Just "Provides the queryType" + , fieldType = typeType 0 + , arguments = HashMap.empty + } + + +mutationTypeType :: Field IO +mutationTypeType = + outFieldToField $ + OutField + { descriptionMb = Just "Provides the mutationType" + , fieldType = typeType 0 + , arguments = HashMap.empty + } + + +subscriptionTypeType :: Field IO +subscriptionTypeType = + outFieldToField $ + OutField + { descriptionMb = Just "Provides the subscriptionType" + , fieldType = typeType 0 + , arguments = HashMap.empty + } + + +fieldsTypeOutput :: Out.Type IO +fieldsTypeOutput = + Out.ListType $ + Out.NonNullObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__Field" + , descriptionMb = Just "__Field description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "__typename" + , ValueResolver nonNullString $ pure "__Field" + ) + , + ( "name" + , ValueResolver nonNullString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe "ERROR: name" $ + HashMap.lookup "name" obj + _ -> pure "ERROR: name" + ) + , + ( "description" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "description" obj + _ -> pure Null + ) + , + ( "args" + , ValueResolver argsField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (List []) $ + HashMap.lookup "args" obj + _ -> pure $ List [] + ) + , + ( "type" + , ValueResolver (typeField 0) $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe emptyType $ + HashMap.lookup "type" obj + _ -> pure emptyType + ) + , + ( "isDeprecated" + , ValueResolver nonNullBoolean $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (Boolean False) $ + HashMap.lookup "isDeprecated" obj + _ -> pure $ Boolean False + ) + , + ( "deprecationReason" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "deprecationReason" obj + _ -> pure Null + ) + ] + } + + +fieldsField :: Field IO +fieldsField = + outFieldToField $ + OutField + { descriptionMb = Just "The fields type" + , fieldType = fieldsTypeOutput + , arguments = + HashMap.fromList + [ + ( "includeDeprecated" + , inArgumentToArgument $ + InArgument + { argDescMb = Just "includeDeprecated description" + , argType = In.NamedScalarType boolean + , valueMb = Just $ Boolean True + } + ) + ] + } + + +directivesType :: Field IO +directivesType = + let + directivesTypeOutput :: Out.Type IO + directivesTypeOutput = + Out.ListType $ + Out.NonNullObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__Directive" + , descriptionMb = Just "__Directive description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "__typename" + , ValueResolver nullableString $ pure "__Directive" + ) + , + ( "name" + , ValueResolver nonNullString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe "ERROR: name" $ + HashMap.lookup "name" obj + _ -> pure "ERROR: name" + ) + , + ( "description" + , ValueResolver nullableString $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe Null $ + HashMap.lookup "description" obj + _ -> pure Null + ) + , + ( "locations" + , let + locationsTypeName :: Field m + locationsTypeName = + Out.Field + (Just "locationsTypeName name") + (Out.ListType $ Out.NonNullScalarType string) + HashMap.empty + in + ValueResolver locationsTypeName $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (List []) $ + HashMap.lookup "locations" obj + _ -> pure $ List [] + ) + , + ( "args" + , ValueResolver argsField $ do + context <- ask + case context & Out.values of + Object obj -> + pure $ + fromMaybe (List []) $ + HashMap.lookup "args" obj + _ -> pure $ List [] + ) + ] + } + in + outFieldToField $ + OutField + { descriptionMb = Just "Provides the directivesType" + , fieldType = directivesTypeOutput + , arguments = HashMap.empty + } + + +filterType :: Bool -> Text -> Value +filterType isRequired tableName = + let + filterObj = + Object $ + HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , ("name", String $ doubleXEncodeGql tableName <> "_filter") + , + ( "description" + , "Select rows matching the provided filter object" + ) + ] + in + if isRequired + then nonNullType filterObj + else filterObj + + +getFieldsForQuery :: Text -> Value +getFieldsForQuery tableName = + createType + (doubleXEncodeGql tableName) + ("Rows from the table \"" <> tableName <> "\"") + [ Object $ + HashMap.fromList + [ ("name", "filter") + , ("description", "Filter to select specific rows") + , ("type", filterType False tableName) + ] + , Object $ + HashMap.fromList + [ ("name", "order_by") + , ("description", "Columns used to sort the data") + , + ( "type" + , listType $ + Object $ + HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql tableName + <> "_order_by" + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "limit") + , ("description", "Limit the number of returned rows") + , ("type", intType) + ] + , Object $ + HashMap.fromList + [ ("name", "offset") + , ("description", "The index to start returning rows from") + , ("type", intType) + ] + ] + ["NON_NULL", "LIST", "NON_NULL", "OBJECT"] + (doubleXEncodeGql tableName <> "_row") + + +getFieldsForMutation :: Text -> [Value] +getFieldsForMutation tableName = + [ Object $ + HashMap.fromList + [ ("name", String $ "insert_" <> doubleXEncodeGql tableName) + , + ( "description" + , String $ + "Insert new rows in table \"" <> tableName <> "\"" + ) + , + ( "args" + , List + [ createField + "objects" + (Just "Rows to be inserted") + $ nonNullType + $ listType + $ nonNullType + $ Object + $ HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql + tableName + <> "_insert_input" + ) + ] + , createField + "on_conflict" + (Just "Specifies how to handle broken UNIQUE constraints") + $ listType + $ nonNullType + $ Object + $ HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql + tableName + <> "_upsert_on_conflict" + ) + ] + ] + ) + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql tableName + <> "_mutation_response" + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", String $ "update_" <> doubleXEncodeGql tableName) + , + ( "description" + , String $ + "Update rows in table \"" <> tableName <> "\"" + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "filter") + , ("description", "Filter to select rows to be updated") + , ("type", filterType True tableName) + ] + , Object $ + HashMap.fromList + [ ("name", "set") + , ("description", "Fields to be updated") + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql tableName + <> "_set_input" + ) + ] + ) + ] + ] + ) + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql tableName + <> "_mutation_response" + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", String $ "delete_" <> doubleXEncodeGql tableName) + , + ( "description" + , String $ "Delete rows in table \"" <> tableName <> "\"" + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "filter") + , ("description", "Filter to select rows to be deleted") + , ("type", filterType True tableName) + ] + ] + ) + , + ( "type" + , nonNullType $ + Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql tableName + <> "_mutation_response" + ) + ] + ) + ] + ] + + +makeComparisonType :: Text -> Text -> Value -> Value +makeComparisonType typeName description type_ = + let field fieldName = createField fieldName Nothing type_ + in Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , ("name", String typeName) + , + ( "description" + , String description + ) + , + ( "inputFields" + , List + [ field "eq" + , field "neq" + , field "gt" + , field "gte" + , field "lt" + , field "lte" + , field "like" + , field "ilike" + , createField + "in" + Nothing + (listType type_) + , createField + "nin" + Nothing + (listType type_) + ] + ) + ] + + +comparisonTypes :: AccessMode -> [Value] +comparisonTypes accessMode = + case accessMode of + ReadOnly -> [] + _ -> + [ makeComparisonType "IntComparison" "Compare to an Int" intType + , makeComparisonType "FloatComparison" "Compare to a Float" floatType + , makeComparisonType "StringComparison" "Compare to a String" stringType + , makeComparisonType "BooleanComparison" "Compare to a Boolean" booleanType + ] + + +orderingTermType :: Value +orderingTermType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "ENUM") + , ("name", String "OrderingTerm") + , + ( "description" + , String "Ordering options when ordering by a column" + ) + , + ( "enumValues" + , List + [ Object $ + HashMap.fromList + [ ("name", "ASC") + , ("description", "In ascending order") + ] + , Object $ + HashMap.fromList + [ ("name", "asc") + , ("description", "In ascending order") + , ("isDeprecated", Boolean True) + , + ( "deprecationReason" + , String "GraphQL spec recommends all caps for enums" + ) + ] + , Object $ + HashMap.fromList + [ ("name", "DESC") + , ("description", "In descending order") + ] + , Object $ + HashMap.fromList + [ ("name", "desc") + , ("description", "In descending order") + , ("isDeprecated", Boolean True) + , + ( "deprecationReason" + , String "GraphQL spec recommends all caps for enums" + ) + ] + ] + ) + ] + + +getFullDatatype :: ColumnEntry -> Text +getFullDatatype entry = case entry.datatype_gql of + -- TODO: Should be "Any", but that's not a valid GraphQL type + Nothing -> "String" + Just type_ -> type_.full + + +getSchemaFieldOutput + :: Text + -> Connection + -> AccessMode + -> [TableEntryRaw] + -> IO (Out.Type IO) +getSchemaFieldOutput dbId conn accessMode tables = do + typesForTables <- forM tables $ \table -> do + columns <- getColumns dbId conn table.name + fields <- forM columns $ \columnEntry -> do + let colName = columnEntry.column_name_gql + pure $ + createType + colName + "" -- TODO: Reactivate description when user can specify it + [] -- No arguments + ( if columnEntry.notnull + then ["NON_NULL", "SCALAR"] + else ["SCALAR"] + ) + (getFullDatatype columnEntry) + + fieldsNullable <- forM columns $ \columnEntry -> do + let colName = columnEntry.column_name_gql + pure $ + createType + colName + "" -- TODO: Reactivate description when user can specify it + [] -- No arguments + ["SCALAR"] + (getFullDatatype columnEntry) + + fieldsWithComparisonExp <- forM columns $ \columnEntry -> do + let colName = columnEntry.column_name_gql + pure $ + createType + colName + "" -- TODO: Reactivate description when user can specify it + [] -- No arguments + ["INPUT_OBJECT"] + (getFullDatatype columnEntry <> "Comparison") + + fieldsWithOrderingTerm <- forM columns $ \columnEntry -> do + let colName = columnEntry.column_name_gql + pure $ + createType + colName + "" -- TODO: Reactivate description when user can specify it + [] -- No arguments + ["INPUT_OBJECT"] + "OrderingTerm" + + let + customRowTypes = + columns + >>= \columnEntry -> + case (columnEntry.select_options, columnEntry.datatype_gql) of + (Just _, Just name) -> + let + colName = columnEntry.column_name_gql + typeName = name.full + description = "Data type for column " <> colName + rowType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", String typeName) + , ("description", String description) + ] + + comparisonType = + makeComparisonType + (typeName <> "Comparison") + ("Compare with values for column" <> colName) + ( Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", String typeName) + , ("description", String description) + ] + ) + in + [rowType, comparisonType] + _ -> [] + + fieldEnumVariants = + columns + <&> \columnEntry -> + Object $ + HashMap.singleton "name" $ + String $ + column_name_gql columnEntry + + fieldEnumDescription = + "This enum contains a variant for each column in the table" :: Value + + fieldEnumType = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "ENUM") + , ("name", String $ doubleXEncodeGql table.name <> "_column") + , + ( "description" + , fieldEnumDescription + ) + , ("enumValues", List fieldEnumVariants) + ] + + fieldEnumTypeReference = + Object $ + HashMap.fromList + [ ("kind", "INPUT_OBJECT") + , ("name", String $ doubleXEncodeGql table.name <> "_column") + , + ( "description" + , fieldEnumDescription + ) + ] + + requiresWrite obj = case accessMode of + ReadOnly -> Null + WriteOnly -> obj + ReadAndWrite -> obj + + pure $ + customRowTypes + <> [ Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", String $ doubleXEncodeGql table.name <> "_row") + , + ( "description" + , String $ + "Available columns for table \"" + <> table.name + <> "\"" + ) + , ("fields", List fields) + ] + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , + ( "name" + , String $ + doubleXEncodeGql table.name <> "_mutation_response" + ) + , + ( "description" + , String $ "Mutation response for " <> table.name + ) + , + ( "fields" + , List + [ Object $ + HashMap.fromList + [ ("name", "affected_rows") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", "Int") + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "returning") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_row" + ) + ] + ) + ] + ) + ] + ) + ] + ) + ] + ] + ) + ] + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_insert_input" + ) + , + ( "description" + , String $ "Input object for " <> table.name + ) + , ("inputFields", List fields) + ] + , fieldEnumType + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_upsert_on_conflict" + ) + , + ( "description" + , String $ "Specifies how broken UNIQUE constraints for " <> table.name <> " should be handled" + ) + , + ( "inputFields" + , List + [ createField + "constraint" + (Just "columns to handle conflicts of") + $ nonNullType + $ listType + $ nonNullType fieldEnumTypeReference + , createField + "update_columns" + (Just "columns to override on conflict") + $ nonNullType + $ listType + $ nonNullType fieldEnumTypeReference + , createField + "where" + (Just "filter specifying which conflicting columns to update") + (filterType False table.name) + ] + ) + ] + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_set_input" + ) + , + ( "description" + , String $ "Fields to set for " <> table.name + ) + , ("inputFields", List fieldsNullable) + ] + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_filter" + ) + , + ( "description" + , String "Filter object to select rows" + ) + , ("inputFields", List fieldsWithComparisonExp) + ] + , requiresWrite $ + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "INPUT_OBJECT") + , + ( "name" + , String $ doubleXEncodeGql table.name <> "_order_by" + ) + , + ( "description" + , String $ + "Ordering options when selecting data from \"" + <> table.name + <> "\"." + ) + , ("inputFields", List fieldsWithOrderingTerm) + ] + ] + + let + queryTypeObj = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "Query") + , + ( "fields" + , List $ + tables + <&> AirGQL.Lib.name + <&> getFieldsForQuery + ) + ] + mutationTypeObj = + Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "Mutation") + , + ( "fields" + , List $ + tables + <&> AirGQL.Lib.name + <&> getFieldsForMutation + & concat + ) + ] + + pure $ + Out.NonNullObjectType $ + outObjectTypeToObjectType $ + OutObjectType + { name = "__Schema" + , descriptionMb = Just "__Schema description" + , interfaceTypes = [] + , fields = + HashMap.fromList + [ + ( "__typename" + , ValueResolver nonNullString $ pure "__Schema" + ) + , + ( "queryType" + , ValueResolver queryTypeType $ pure queryTypeObj + ) + , + ( "mutationType" + , case accessMode of + ReadOnly -> ValueResolver mutationTypeType $ pure Null + WriteOnly -> + ValueResolver mutationTypeType $ pure mutationTypeObj + ReadAndWrite -> + ValueResolver mutationTypeType $ pure mutationTypeObj + ) + , + ( "subscriptionType" + , -- AirGQL doesn't support Subscriptions yet + ValueResolver subscriptionTypeType $ pure Null + ) + , + ( "types" + , ValueResolver typesField $ + pure $ + List $ + concat typesForTables + <> comparisonTypes accessMode + <> [orderingTermType] + <> [ queryTypeObj + , case accessMode of + ReadOnly -> Null + WriteOnly -> mutationTypeObj + ReadAndWrite -> mutationTypeObj + , booleanType + , intType + , floatType + , stringType + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "ID") + , + ( "description" + , "The `ID` scalar type represents a unique identifier, \ + \often used to refetch an object or as key for a cache. \ + \The ID type appears in a JSON response as a String; \ + \however, it is not intended to be human-readable. \ + \When expected as an input type, any string \ + \(such as `\"4\"`) or integer (such as `4`) input value \ + \will be accepted as an ID." + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "SCALAR") + , ("name", "Upload") + , + ( "description" + , "The `Upload` scalar type represents a file upload." + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__Schema") + , + ( "description" + , "A GraphQL Schema defines the capabilities of a GraphQL server. \ + \It exposes all available types and directives on the server, \ + \as well as the entry points for \ + \query, mutation, and subscription operations." + ) + , + ( "fields" + , List + [ Object $ + HashMap.fromList + [ ("name", "types") + , + ( "description" + , "A list of all types supported by this server." + ) + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Type") + ] + ) + ] + ) + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "queryType") + , + ( "description" + , "The type that query operations will be rooted at." + ) + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Type") + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "mutationType") + , + ( "description" + , "If this server supports mutation, the type \ + \that mutation operations will be rooted at." + ) + , getTypeTuple "OBJECT" "__Type" + ] + , Object $ + HashMap.fromList + [ ("name", "subscriptionType") + , + ( "description" + , "If this server support subscription, the type \ + \that subscription operations will be rooted at." + ) + , getTypeTuple "OBJECT" "__Type" + ] + , Object $ + HashMap.fromList + [ ("name", "directives") + , + ( "description" + , "A list of all directives supported by this server." + ) + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Directive") + ] + ) + ] + ) + ] + ) + ] + ) + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__Type") + , + ( "description" + , "The fundamental unit of any GraphQL Schema is the type. \ + \There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\n\ + \Depending on the kind of a type, certain fields describe information about that type. \ + \Scalar types provide no information beyond a name and description, while Enum types provide their values. \ + \Object and Interface types provide the fields they describe. \ + \Abstract types, Union and Interface, provide the Object types possible at runtime. \ + \List and NonNull types compose other types." + ) + , + ( "fields" + , List + [ Object $ + HashMap.fromList + [ ("name", "kind") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "ENUM") + , ("name", "__TypeKind") + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "name") + , -- Don't know why not "NON_NULL" + getTypeTuple "SCALAR" "String" + ] + , descriptionField + , Object $ + HashMap.fromList + [ ("name", "fields") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Field") + ] + ) + ] + ) + ] + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "includeDeprecated") + , getTypeTuple "SCALAR" "Boolean" + , -- Don't know why this has to be a string + ("defaultValue", "false") + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "interfaces") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Type") + ] + ) + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "possibleTypes") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__Type") + ] + ) + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "enumValues") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__EnumValue") + ] + ) + ] + ) + ] + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "includeDeprecated") + , getTypeTuple "SCALAR" "Boolean" + , -- Don't know why this has to be a string + ("defaultValue", "false") + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "inputFields") + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", "__InputValue") + ] + ) + ] + ) + ] + ) + ] + , Object $ + HashMap.fromList + [ ("name", "ofType") + , getTypeTuple "OBJECT" "__Type" + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "ENUM") + , ("name", "__TypeKind") + , + ( "description" + , "An enum describing what kind of type a given `__Type` is." + ) + , + ( "enumValues" + , List + [ Object $ + HashMap.fromList + [ ("name", "SCALAR") + , + ( "description" + , "Indicates this type is a scalar." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "OBJECT") + , + ( "description" + , "Indicates this type is an object. `fields` and `interfaces` are valid fields." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INTERFACE") + , + ( "description" + , "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "UNION") + , + ( "description" + , "Indicates this type is a union. `possibleTypes` is a valid field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "ENUM") + , + ( "description" + , "Indicates this type is an enum. `enumValues` is a valid field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INPUT_OBJECT") + , + ( "description" + , "Indicates this type is an input object. `inputFields` is a valid field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "LIST") + , + ( "description" + , "Indicates this type is a list. `ofType` is a valid field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "NON_NULL") + , + ( "description" + , "Indicates this type is a non-null. `ofType` is a valid field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__Field") + , + ( "description" + , "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type." + ) + , + ( "fields" + , List + [ nameField + , descriptionField + , argsFieldValue + , typeFieldValue + , isDeprecatedFieldValue + , Object $ + HashMap.fromList + [ ("name", "deprecationReason") + , getTypeTuple "SCALAR" "String" + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__InputValue") + , + ( "description" + , "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value." + ) + , + ( "fields" + , List + [ nameField + , descriptionField + , typeFieldValue + , Object $ + HashMap.fromList + [ ("name", "defaultValue") + , + ( "description" + , "A GraphQL-formatted string representing \ + \the default value for this input value." + ) + , getTypeTuple "SCALAR" "String" + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__EnumValue") + , + ( "description" + , "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string." + ) + , + ( "fields" + , List + [ nameField + , descriptionField + , isDeprecatedFieldValue + , Object $ + HashMap.fromList + [ ("name", "deprecationReason") + , getTypeTuple "SCALAR" "String" + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "OBJECT") + , ("name", "__Directive") + , + ( "description" + , "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor." + ) + , + ( "fields" + , List + [ nameField + , descriptionField + , locationsFieldValue + , argsFieldValue + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Type") + , ("kind", "ENUM") + , ("name", "__DirectiveLocation") + , + ( "description" + , "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies." + ) + , + ( "enumValues" + , List + [ Object $ + HashMap.fromList + [ ("name", "QUERY") + , + ( "description" + , "Location adjacent to a query operation." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "MUTATION") + , + ( "description" + , "Location adjacent to a mutation operation." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "SUBSCRIPTION") + , + ( "description" + , "Location adjacent to a subscription operation." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "FIELD") + , + ( "description" + , "Location adjacent to a field." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "FRAGMENT_DEFINITION") + , + ( "description" + , "Location adjacent to a fragment definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "FRAGMENT_SPREAD") + , + ( "description" + , "Location adjacent to a fragment spread." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INLINE_FRAGMENT") + , + ( "description" + , "Location adjacent to an inline fragment." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "VARIABLE_DEFINITION") + , + ( "description" + , "Location adjacent to a variable definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "SCHEMA") + , + ( "description" + , "Location adjacent to a schema definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "SCALAR") + , + ( "description" + , "Location adjacent to a scalar definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "OBJECT") + , + ( "description" + , "Location adjacent to an object type definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "FIELD_DEFINITION") + , + ( "description" + , "Location adjacent to a field definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "ARGUMENT_DEFINITION") + , + ( "description" + , "Location adjacent to an argument definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INTERFACE") + , + ( "description" + , "Location adjacent to an interface definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "UNION") + , + ( "description" + , "Location adjacent to a union definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "ENUM") + , + ( "description" + , "Location adjacent to an enum definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "ENUM_VALUE") + , + ( "description" + , "Location adjacent to an enum value definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INPUT_OBJECT") + , + ( "description" + , "Location adjacent to an input object \ + \type definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + , Object $ + HashMap.fromList + [ ("name", "INPUT_FIELD_DEFINITION") + , + ( "description" + , "Location adjacent to an input object \ + \field definition." + ) + , ("isDeprecated", Boolean False) + , ("deprecationReason", Null) + ] + ] + ) + ] + ] + & filter (/= Null) + ) + , + ( "directives" + , ValueResolver directivesType $ + pure $ + List + [ Object $ + HashMap.fromList + [ ("__typename", "__Directive") + , ("name", "skip") + , + ( "description" + , "Directs the executor to skip this field or fragment \ + \when the `if` argument is true." + ) + , + ( "locations" + , List ["INLINE_FRAGMENT", "FRAGMENT_SPREAD", "FIELD"] + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "if") + , ("description", "Skipped when true.") + , ("defaultValue", Null) + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", "Boolean") + ] + ) + ] + ) + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Directive") + , ("name", "include") + , + ( "description" + , "Directs the executor to include this field or fragment \ + \only when the `if` argument is true." + ) + , + ( "locations" + , List ["INLINE_FRAGMENT", "FRAGMENT_SPREAD", "FIELD"] + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "if") + , ("description", "Included when true.") + , ("defaultValue", Null) + , + ( "type" + , Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , Object $ + HashMap.fromList + [ ("kind", "SCALAR") + , ("name", "Boolean") + ] + ) + ] + ) + ] + ] + ) + ] + , Object $ + HashMap.fromList + [ ("__typename", "__Directive") + , ("name", "deprecated") + , + ( "description" + , "Marks an element of a GraphQL schema \ + \as no longer supported." + ) + , + ( "locations" + , List ["ENUM_VALUE", "FIELD_DEFINITION"] + ) + , + ( "args" + , List + [ Object $ + HashMap.fromList + [ ("name", "reason") + , + ( "description" + , "Explains why this element was deprecated, \ + \usually also including a suggestion \ + \for how to access supported similar data. \ + \Formatted using the Markdown syntax \ + \(as specified by \ + \[CommonMark](https://commonmark.org/)." + ) + , ("defaultValue", "\"No longer supported\"") + , getTypeTuple "SCALAR" "String" + ] + ] + ) + ] + ] + ) + ] + } + + +getSchemaField + :: Text + -> Connection + -> AccessMode + -> [TableEntryRaw] + -> IO (Field IO) +getSchemaField dbId conn accessMode tables = do + schemaFieldOutput <- getSchemaFieldOutput dbId conn accessMode tables + + pure $ + outFieldToField $ + OutField + { descriptionMb = Just "The schema" + , fieldType = schemaFieldOutput + , arguments = HashMap.empty + } + + +getSchemaResolver + :: Text + -> Connection + -> AccessMode + -> [TableEntryRaw] + -> IO (HashMap Text (Resolver IO)) +getSchemaResolver dbId conn accessMode tables = do + schemaField <- getSchemaField dbId conn accessMode tables + + pure $ + HashMap.singleton + "__schema" + (ValueResolver schemaField (pure Null)) + + +typeNameOutField :: Field m +typeNameOutField = + outFieldToField $ + OutField + { descriptionMb = Just "The type name" + , fieldType = Out.NonNullScalarType string + , arguments = HashMap.empty + } + + +typeNameResolver :: HashMap Text (Resolver IO) +typeNameResolver = + HashMap.singleton + "__typename" + (ValueResolver typeNameOutField $ pure "Query") diff --git a/source/AirGQL/Lib.hs b/source/AirGQL/Lib.hs new file mode 100644 index 0000000..01e5a11 --- /dev/null +++ b/source/AirGQL/Lib.hs @@ -0,0 +1,1272 @@ +{-# LANGUAGE QuasiQuotes #-} +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use list comprehension" #-} +{-# HLINT ignore "Replace case with maybe" #-} +{-# HLINT ignore "Use tuple-section" #-} + +module AirGQL.Lib ( + AccessMode (..), + ColumnEntry (..), + GqlTypeName (..), + getColumns, + getRowidColumnName, + getTables, + getTableNames, + getColumnNames, + getEnrichedTables, + ObjectType (..), + parseSql, + replaceCaseInsensitive, + sanitizeSql, + sqlDataToAesonValue, + sqlDataToText, + SQLPost (..), + sqlTypeNameToGQLTypeName, + TableEntryRaw (..), + TableEntry (..), + UniqueConstraint (..), + ReferencesConstraint (..), + ReferencesConstraintColumns (..), + CheckConstraint (..), + sqlite, -- useful for pretty printing + stringToGqlTypeName, + lintTableCreationCode, + resolveReferencesConstraintColumns, + resolveReferencesConstraint, +) +where + +import Protolude ( + Applicative (pure), + Bool (False, True), + Either (Left, Right), + Eq ((/=), (==)), + Exception (toException), + Generic, + IO, + Int, + Maybe (Just, Nothing), + Semigroup ((<>)), + Show, + Text, + notElem, + otherwise, + show, + ($), + (&), + (&&), + (<$>), + (<&>), + (>>=), + (||), + ) +import Protolude qualified as P + +import AirGQL.Utils (collectAllErrorsAsText, quoteText) +import Control.Monad (MonadFail (fail)) +import Control.Monad.Catch (catchAll) +import Data.Aeson (FromJSON, ToJSON, Value (Bool, Null, Number, String)) +import Data.Scientific qualified as Scientific +import Data.Text (isInfixOf, toUpper) +import Data.Text qualified as T +import Database.SQLite.Simple ( + Connection, + FromRow, + ResultError (ConversionFailed, errHaskellType, errMessage, errSQLType), + SQLData (SQLBlob, SQLFloat, SQLInteger, SQLNull, SQLText), + query_, + ) +import Database.SQLite.Simple qualified as SS +import Database.SQLite.Simple.FromField (FromField (fromField), fieldData) +import Database.SQLite.Simple.Ok (Ok (Errors, Ok)) +import Database.SQLite.Simple.QQ qualified as SS +import DoubleXEncoding (doubleXEncodeGql) +import Language.SQL.SimpleSQL.Dialect ( + Dialect ( + diAppKeywords, + diAutoincrement, + diBackquotedIden, + diKeywords, + diLimit, + diSquareBracketQuotedIden + ), + ansi2011, + ) +import Language.SQL.SimpleSQL.Parse (ParseError, parseStatement) +import Language.SQL.SimpleSQL.Pretty (prettyScalarExpr) +import Language.SQL.SimpleSQL.Syntax ( + ColConstraint (ColCheckConstraint, ColNotNullConstraint), + ColConstraintDef (ColConstraintDef), + ColumnDef (ColumnDef), + InPredValue (InList), + ScalarExpr (In, NumLit, StringLit), + Statement (CreateTable), + TableElement (TableColumnDef), + ) +import Language.SQL.SimpleSQL.Syntax qualified as SQL +import Servant.Docs (ToSample (toSamples), singleSample) + + +data AccessMode = ReadOnly | WriteOnly | ReadAndWrite + deriving (Eq, Show) + + +data ObjectType = Table | Index | View | Trigger + deriving (Show, Eq, Generic) + + +instance ToJSON ObjectType + + +instance FromJSON ObjectType + + +instance FromField ObjectType where + fromField fData = case fieldData fData of + SQLText "table" -> Ok Table + SQLText "index" -> Ok Index + SQLText "view" -> Ok View + SQLText "trigger" -> Ok Trigger + sqlData -> + Errors + [ toException $ + ConversionFailed + { errSQLType = "Object Type" + , errHaskellType = "String" + , errMessage = + "\"" <> show sqlData <> "\" is not a vaild object type" + } + ] + + +data TableEntryRaw = TableEntryRaw + { name :: Text + , tbl_name :: Text + , object_type :: ObjectType + , rootpage :: Int + , sql :: Text + } + deriving (Show, Eq, Generic) + + +instance ToJSON TableEntryRaw +instance FromRow TableEntryRaw + + +data UniqueConstraint = UniqueConstraint + { name :: Maybe Text + , columns :: [Text] + } + deriving (Show, Eq, Generic) + + +instance ToJSON UniqueConstraint + + +data ReferencesConstraintColumns + = -- | The "to" column is implicit. + -- Eg: `a TEXT REFERENCES other_table` + ImplicitColumns Text + | -- | Explicit (from, to) pairs + ExplicitColumns [(Text, Text)] + deriving (Show, Eq, Generic) + + +instance ToJSON ReferencesConstraintColumns + + +data ReferencesConstraint = ReferencesConstraint + { name :: Maybe Text + , table :: Text + , columns :: ReferencesConstraintColumns + } + deriving (Show, Eq, Generic) + + +instance ToJSON ReferencesConstraint + + +data CheckConstraint = CheckConstraint + { name :: Maybe Text + , predicate :: Text + , columns :: Maybe [Text] + } + deriving (Show, Eq, Generic) + + +instance ToJSON CheckConstraint + + +data TableEntry = TableEntry + { name :: Text + , tbl_name :: Text + , object_type :: ObjectType + , rootpage :: Int + , sql :: Text + , statement :: Statement + , uniqueConstraints :: [UniqueConstraint] + , referencesConstraints :: [ReferencesConstraint] + , checkConstraints :: [CheckConstraint] + , columns :: [ColumnEntry] + } + deriving (Show, Eq, Generic) + + +-- | As requested from SQLite +data ColumnEntryRaw = ColumnEntryRaw + { cid :: Int + , column_name :: Text + , datatype :: Text + , notnull :: Int -- TODO: Should be boolean + , dflt_value :: Maybe Text + , primary_key :: Int -- TODO: Should be boolean + , -- See the docs for the different meanings: + -- https://www.sqlite.org/pragma.html#pragma_table_xinfo + -- - 0 means normal + -- - 1 means hidden column in a virtual table + -- - 2 and 3 mean generated columns + hidden :: Int + } + deriving (Show, Eq, Generic) + + +instance FromRow ColumnEntryRaw + + +data GqlTypeName = GqlTypeName + { root :: Text + , full :: Text + } + deriving (Show, Eq, Generic) + + +instance ToJSON GqlTypeName + + +-- | Enhanced with generated information from SQL query "CREATE TABLE" +data ColumnEntry = ColumnEntry + { column_name :: Text + , column_name_gql :: Text + , datatype :: Text + -- ^ double-X-encoded GQL identifiers + , datatype_gql :: Maybe GqlTypeName + , select_options :: Maybe [Text] + , notnull :: Bool + , isGenerated :: Bool + , isUnique :: Bool + , isOmittable :: Bool + -- ^ If column is NON NULL, but will be set automatically + , dflt_value :: Maybe Text + , primary_key :: Bool + } + deriving (Show, Eq, Generic) + + +instance ToJSON ColumnEntry + + +data ParsedTable = ParsedTable + { uniqueConstraints :: [UniqueConstraint] + , referencesConstraints :: [ReferencesConstraint] + , checkConstraints :: [CheckConstraint] + , statement :: Statement + } + deriving (Show, Eq, Generic) + + +getTables :: Connection -> IO [TableEntryRaw] +getTables connection = do + query_ + connection + [SS.sql| + SELECT name, tbl_name, type, rootpage, sql + FROM sqlite_master + WHERE + type == 'table' OR + type == 'view' + |] + :: IO [TableEntryRaw] + + +getTableNames :: Connection -> IO [Text] +getTableNames connection = do + results :: [SS.Only Text] <- + query_ + connection + [SS.sql| + SELECT tbl_name + FROM sqlite_master + WHERE type='table' or type='view' + |] + + pure (SS.fromOnly <$> results) + + +getColumnNames :: Connection -> Text -> IO [Text] +getColumnNames connection tableName = do + results :: [SS.Only Text] <- + query_ + connection + $ SS.Query + $ "SELECT name FROM pragma_table_xinfo(" <> quoteText tableName <> ")" + pure (SS.fromOnly <$> results) + + +-- TODO: investigate whether we ever want to quote the result +nameAsText :: SQL.Name -> Text +nameAsText = \case + SQL.Name _ name -> T.pack name + + +getFirstName :: Maybe [SQL.Name] -> Maybe Text +getFirstName namesMb = do + names <- namesMb + first <- P.head names + pure (nameAsText first) + + +getColumnUniqueConstraint + :: Text + -> SQL.ColConstraintDef + -> Maybe UniqueConstraint +getColumnUniqueConstraint col_name = \case + SQL.ColConstraintDef names SQL.ColUniqueConstraint -> + Just $ + UniqueConstraint + { name = getFirstName names + , columns = [col_name] + } + _ -> Nothing + + +tableUniqueConstraints :: SQL.TableElement -> [UniqueConstraint] +tableUniqueConstraints = \case + SQL.TableConstraintDef names (SQL.TableUniqueConstraint columns) -> + [ UniqueConstraint + { name = getFirstName names + , columns = P.fmap nameAsText columns + } + ] + SQL.TableColumnDef (SQL.ColumnDef col_name _ _ constraints) -> + P.mapMaybe (getColumnUniqueConstraint (nameAsText col_name)) constraints + _ -> [] + + +getColumnCheckConstraint + :: Text + -> SQL.ColConstraintDef + -> Maybe CheckConstraint +getColumnCheckConstraint col_name = \case + SQL.ColConstraintDef names (SQL.ColCheckConstraint expr) -> + Just $ + CheckConstraint + { name = getFirstName names + , columns = Just [col_name] + , predicate = T.pack $ prettyScalarExpr sqlite expr + } + _ -> Nothing + + +tableCheckConstraints :: SQL.TableElement -> [CheckConstraint] +tableCheckConstraints = \case + SQL.TableConstraintDef names (SQL.TableCheckConstraint expr) -> + [ CheckConstraint + { name = getFirstName names + , predicate = T.pack $ prettyScalarExpr sqlite expr + , -- not sure how to do this properly + columns = Nothing + } + ] + SQL.TableColumnDef (SQL.ColumnDef col_name _ _ constraints) -> + P.mapMaybe (getColumnCheckConstraint (nameAsText col_name)) constraints + _ -> [] + + +getColumnReferencesConstraint + :: Text + -> SQL.ColConstraintDef + -> P.Either Text (Maybe ReferencesConstraint) +getColumnReferencesConstraint col_name = \case + SQL.ColConstraintDef + names + (SQL.ColReferencesConstraint table_names foreign_col_name _ _ _) -> do + table_name <- + P.note "Column references constraint has no table name" $ + P.head table_names + + pure $ + Just $ + ReferencesConstraint + { name = getFirstName names + , table = nameAsText table_name + , columns = case foreign_col_name of + Just explicit_col_name -> + ExplicitColumns [(col_name, nameAsText explicit_col_name)] + Nothing -> + ImplicitColumns col_name + } + _ -> pure Nothing + + +tableReferencesConstraints + :: SQL.TableElement + -> P.Either Text [ReferencesConstraint] +tableReferencesConstraints = \case + SQL.TableConstraintDef + names + ( SQL.TableReferencesConstraint + self_columns + table_names + foreign_columns + _ + _ + _ + ) -> do + table_name <- + P.note "Table references constraint has no table name" $ + P.head table_names + + columns <- case (self_columns, foreign_columns) of + ([column], Nothing) -> + pure $ ImplicitColumns (nameAsText column) + (_, Nothing) -> + P.throwError + "References constraints where more than one column is \ + \implicit are not supported" + (columns, Just many_foreign_columns) -> do + P.when (P.length columns /= P.length many_foreign_columns) $ do + P.throwError + "Number of columns in references constraint \ + \must be equal" + + pure $ + ExplicitColumns $ + P.zip + (P.fmap nameAsText columns) + (P.fmap nameAsText many_foreign_columns) + + pure + [ ReferencesConstraint + { name = getFirstName names + , table = nameAsText table_name + , columns = columns + } + ] + SQL.TableColumnDef (SQL.ColumnDef col_name _ _ constraints) -> + -- => [ColumnConstraint] + constraints + -- => [Either Text (Maybe ColumnConstraint)] + <&> getColumnReferencesConstraint (nameAsText col_name) + -- => Either Text [Maybe ColumnConstraint] + & collectAllErrorsAsText + -- => Either Text [ColumnConstraint] + <&> P.catMaybes + _ -> pure [] + + +getTableUniqueIndexConstraints :: SS.Connection -> Text -> IO [UniqueConstraint] +getTableUniqueIndexConstraints connection tableName = do + indices :: [[SQLData]] <- + catchAll + ( SS.query + connection + [SS.sql| + SELECT sql + FROM sqlite_master + WHERE tbl_name = ? AND type = 'index' + |] + [tableName] + ) + (\_ -> pure []) + + indices + <&> \case + [SQLText sqlTxt] + -- Get column name from SQL query + | P.Right (SQL.CreateIndex True indexNames _ columns) <- + parseSql sqlTxt -> do + Just $ + UniqueConstraint + { name = nameAsText <$> P.head indexNames + , columns = nameAsText <$> columns + } + _ -> Nothing + & P.catMaybes + & pure + + +getSqlObjectName :: Statement -> Maybe Text +getSqlObjectName = \case + SQL.CreateTable names _ -> + names + & P.head + <&> nameAsText + SQL.CreateView _ _ names _ _ -> + names + >>= P.head + <&> nameAsText + _ -> Nothing + + +{-| Collects the different kinds of constraints found in a sql statement. + +An optional connection can be used to read existing indices for unique +constraints of columns added after table creation. +-} +collectTableConstraints + :: Maybe SS.Connection + -> Statement + -> IO (P.Either Text ParsedTable) +collectTableConstraints connectionMb statement = do + uniqueIndices <- case (connectionMb, getSqlObjectName statement) of + (Just conn, Just name) -> getTableUniqueIndexConstraints conn name + _ -> pure [] + case statement of + CreateTable _ elements -> do + let referencesConstraintsEither = + -- => [TableElemenet] + elements + -- => [Either Text TableElemenet] + & P.fmap tableReferencesConstraints + -- => Either Text [[TableElemenet]] + & collectAllErrorsAsText + -- => Either Text [TableElemenet] + & P.fmap P.join + + P.for referencesConstraintsEither $ \referencesConstraints -> do + pure $ + ParsedTable + { uniqueConstraints = + uniqueIndices + <> (elements >>= tableUniqueConstraints) + , referencesConstraints = referencesConstraints + , checkConstraints = elements >>= tableCheckConstraints + , statement = statement + } + _ -> + pure $ + P.Right $ + ParsedTable + { uniqueConstraints = uniqueIndices + , referencesConstraints = [] + , checkConstraints = [] + , statement = statement + } + + +enrichTableEntry + :: SS.Connection + -> TableEntryRaw + -> IO (P.Either Text TableEntry) +enrichTableEntry connection tableEntry@(TableEntryRaw{..}) = + case parseSql tableEntry.sql of + P.Left err -> pure $ P.Left (show err) + P.Right sqlStatement -> + collectTableConstraints (Just connection) sqlStatement + <&> P.fmap + ( \(ParsedTable{..}) -> + TableEntry{columns = [], ..} + ) + + +getEnrichedTables :: Connection -> IO (P.Either Text [TableEntry]) +getEnrichedTables connection = do + tables <- getTables connection + enriched <- P.for tables $ \table -> do + enrichedEither <- enrichTableEntry connection table + P.for enrichedEither $ \enriched@TableEntry{..} -> do + tableColumns <- + getColumnsFromParsedTableEntry + connection + enriched + pure $ + TableEntry + { columns = tableColumns + , .. + } + pure $ collectAllErrorsAsText enriched + + +{-| SQLite allows references constraints to not specify the exact column they +are referencing. This functions tries to recover that information by +looking for primary keys among the columns of the referenced table. +Note: we currently do not support having composite primary keys +referenced implicitly, as that would lead to multiple complications like: +- figuring out the correct order for the references +- having to perform the "enrichTableEntry" computation in two separate passes +-} +resolveReferencesConstraint :: [TableEntry] -> Text -> Maybe Text +resolveReferencesConstraint tables referencedTable = + -- => [(TableEntry, [ColumnEntry])] + tables + -- => Maybe (TableEntry, [ColumnEntry]) + & P.find (\table -> table.tbl_name == referencedTable) + -- => Maybe [ColumnEntry] + <&> (\table -> table.columns) + -- => Maybe ColumnEntry + >>= P.find (\column -> column.primary_key) + -- => Maybe Text + <&> (.column_name) + + +-- See the docs for `resolveReferencesConstraint` for details +resolveReferencesConstraintColumns + :: [TableEntry] + -> ReferencesConstraint + -> Maybe [(Text, Text)] +resolveReferencesConstraintColumns allEntries constraint = + case constraint.columns of + ExplicitColumns explicit -> Just explicit + ImplicitColumns from -> + case resolveReferencesConstraint allEntries constraint.table of + Just to -> Just [(from, to)] + Nothing -> Nothing + + +-- | Returns a set of warnings related to a given table. +lintTable :: [TableEntry] -> ParsedTable -> [Text] +lintTable allEntries parsed = + let + rowidReferenceWarnings = + parsed.referencesConstraints + & P.mapMaybe + ( \constraint -> + resolveReferencesConstraintColumns allEntries constraint + & P.fromMaybe [] + & P.find (\(_, to) -> to == "rowid") + <&> \case + (from, _to) -> + "Column " + <> quoteText from + <> " references the rowid column of table " + <> quoteText constraint.table + <> ".\n" + <> "This is not supported by SQLite:\n" + <> "https://www.sqlite.org/foreignkeys.html" + ) + in + rowidReferenceWarnings + + +{-| Lint the sql code for creating a table + +An optional connection can be used to retrieve the existing db data, which +is used for things like resolving implicit references constraints (where +the primary key is not explicitly given) +-} +lintTableCreationCode :: Maybe SS.Connection -> Statement -> IO [Text] +lintTableCreationCode connectionMb statement = do + constraintsEither <- collectTableConstraints connectionMb statement + allEntriesEither <- case connectionMb of + Just connection -> getEnrichedTables connection + Nothing -> pure $ Right [] + pure $ case (constraintsEither, allEntriesEither) of + (Right _, Left err) -> [err] + (Left err, Right _) -> [err] + (Left errL, Left errR) -> [errL, errR] + (Right parsed, Right allEntries) -> + lintTable allEntries parsed + + +getRowidColumnName :: [Text] -> Text +getRowidColumnName colNames + | "rowid" `notElem` colNames = "rowid" + | "_rowid_" `notElem` colNames = "_rowid_" + | "oid" `notElem` colNames = "oid" + | otherwise = "rowid" -- TODO: Return error to user + + +columnDefName :: ColumnDef -> Text +columnDefName (ColumnDef name _ _ _) = nameAsText name + + +-- Computes whether a column is NOT NULL +columnIsNonNull :: SQL.ColumnDef -> Bool +columnIsNonNull (ColumnDef _ _ _ constraints) = + let isNotNullConstraint = \case + ColConstraintDef _ ColNotNullConstraint -> True + _ -> False + in P.any isNotNullConstraint constraints + + +-- For a single column, returns selectable values +-- E.g. ("color", (SelectOptions ["red", "green", "blue"])) +columnSelectOptions :: SQL.ColumnDef -> Maybe SelectOptions +columnSelectOptions (ColumnDef _ _ _ colConstraints) = + let + getSelectOptions + :: ColConstraintDef + -> Maybe SelectOptions + getSelectOptions = \case + ColConstraintDef + _ + (ColCheckConstraint (In _ _ (InList options))) -> + let + textOnlyOptions = + options + <&> \case + StringLit _ _ value -> + T.pack value + NumLit value -> + T.pack value + _ -> "UNSUPPORTED" + in + Just (SelectOptions textOnlyOptions) + _ -> Nothing + in + colConstraints + & P.mapMaybe getSelectOptions + & P.head + + +getColumnsFromParsedTableEntry + :: Connection + -> TableEntry + -> IO [ColumnEntry] +getColumnsFromParsedTableEntry connection tableEntry = do + keyColumns :: [[SQLData]] <- + query_ connection $ + SS.Query $ + "SELECT * FROM pragma_index_info(" + <> quoteText tableEntry.tbl_name + <> ")" + + -- TODO: Catch only SQL specific exceptions + colEntriesRaw :: [ColumnEntryRaw] <- + catchAll + ( query_ connection $ + SS.Query $ + "SELECT * FROM pragma_table_xinfo(" + <> quoteText tableEntry.tbl_name + <> ")" + ) + ( \exception -> do + P.putErrText $ show exception + pure [] + ) + + let + tableElementsMb = case tableEntry.statement of + SQL.CreateTable _ tableElements -> + Just tableElements + _ -> Nothing + + columnDefs = case tableElementsMb of + Just tableElements -> + tableElements + <&> \case + TableColumnDef columnDef -> Just columnDef + _ -> Nothing + & P.catMaybes + Nothing -> [] + + -- As described here: https://www.sqlite.org/withoutrowid.html (Point 5) + hasRowId :: Bool + hasRowId = P.null keyColumns + + colNames :: [Text] + colNames = colEntriesRaw <&> \c -> c.column_name + + rowIdColName :: Text + rowIdColName = getRowidColumnName colNames + + rowIdColumnEntry :: ColumnEntry + rowIdColumnEntry = + ColumnEntry + { column_name = rowIdColName + , column_name_gql = rowIdColName + , datatype = "INTEGER" + , datatype_gql = Just $ stringToGqlTypeName "Int" + , select_options = P.Nothing + , -- While the rowid is actually NOT NULL, + -- it must be set to false here + -- to show in the GraphQL docs that it can be omitted + -- since it will be set automatically. + notnull = False + , isUnique = False + , isOmittable = True + , isGenerated = False + , dflt_value = P.Nothing + , primary_key = True + } + + let + entries = + colEntriesRaw <&> \(ColumnEntryRaw{..}) -> do + let + columnDefMb = P.find (\d -> columnDefName d == column_name) columnDefs + selectOpts = columnDefMb >>= columnSelectOptions + + ColumnEntry + { column_name_gql = doubleXEncodeGql column_name + , datatype_gql = + sqlTypeNameToGQLTypeName + datatype + ( P.const + (tableEntry.tbl_name <> "_" <> column_name) + <$> selectOpts + ) + , select_options = selectOpts <&> unSelectOptions + , isUnique = + P.any + (\constraint -> column_name `P.elem` constraint.columns) + tableEntry.uniqueConstraints + , primary_key = primary_key == 1 + , isOmittable = + (primary_key == 1 && T.isPrefixOf "int" (T.toLower datatype)) + || P.isJust dflt_value + , notnull = + notnull == 1 || case columnDefMb of + Just columnDef -> columnIsNonNull columnDef + Nothing -> False + , -- See the comment on the `hidden` property of + -- the `ColumnEntryRaw` type for an explanation. + isGenerated = hidden == 2 || hidden == 3 + , .. + } + -- Views don't have a rowid column + -- (https://stackoverflow.com/q/38519169) + rowidColumns = + if hasRowId && tableEntry.object_type /= View + then [rowIdColumnEntry] + else [] + + pure $ rowidColumns <> entries + + +getColumns :: Text -> Connection -> Text -> IO [ColumnEntry] +getColumns dbId connection tableName = + let + columns = do + tables :: [TableEntryRaw] <- + SS.query + connection + [SS.sql| + SELECT name, tbl_name, type, rootpage, sql + FROM sqlite_master + WHERE name == ? + |] + [tableName] + + table <- case P.head tables of + Just table -> pure table + Nothing -> + fail $ + P.fold + [ "Could not find table info for table " + , T.unpack tableName + , " of db " + , T.unpack dbId + ] + + enrichmentResultEither <- enrichTableEntry connection table + enrichingResult <- case enrichmentResultEither of + Right result -> pure result + Left err -> + fail $ + P.fold + [ "An error occurred while parsing table " + , T.unpack tableName + , " of db " + , T.unpack dbId + , ": " + , T.unpack err + ] + getColumnsFromParsedTableEntry connection enrichingResult + in + catchAll + columns + $ \err -> do + P.putErrText $ P.show err + pure [] + + +newtype SelectOptions = SelectOptions {unSelectOptions :: [Text]} + deriving (Show, Eq, Generic) + + +stringToGqlTypeName :: Text -> GqlTypeName +stringToGqlTypeName name = GqlTypeName{full = name, root = name} + + +{-| Computes storage class through type affinity + as described in https://www.sqlite.org/datatype3.html#affname + with an extension for boolean (Order is important) + TODO: Add Support for GraphQL's type "ID" +-} +sqlTypeNameToGQLTypeName :: Text -> Maybe Text -> Maybe GqlTypeName +sqlTypeNameToGQLTypeName sqliteType typeNameMb = + let + containsText text = + isInfixOf text $ toUpper sqliteType + + rootType + -- If it is a view, column might not have a type + | sqliteType == "" = Nothing + | containsText "INT" = Just "Int" + | containsText "CHAR" || containsText "CLOB" || containsText "TEXT" = + Just "String" + | containsText "BLOB" = Just "String" + | containsText "REAL" || containsText "FLOA" || containsText "DOUB" = + Just "Float" + | containsText "BOOL" = Just "Boolean" + | otherwise = Just "Int" + in + rootType <&> \root -> + GqlTypeName + { root = root + , full = case typeNameMb of + P.Just typeName -> doubleXEncodeGql (typeName <> "_" <> root) + P.Nothing -> root + } + + +sqlDataToText :: SQLData -> Text +sqlDataToText = \case + SQLInteger int64 -> show int64 + SQLFloat double -> show double + SQLText text -> text + SQLBlob _ -> "BLOB" + SQLNull -> "NULL" + + +-- | WARNING: Also change duplicate `sqlDataToGQLValue` +sqlDataToAesonValue :: Text -> SQLData -> Value +sqlDataToAesonValue datatype sqlData = case sqlData of + SQLInteger int64 -> + if isInfixOf "BOOL" $ toUpper datatype + then case int64 of + 0 -> Bool False + _ -> Bool True + else Number $ P.fromIntegral int64 -- Int32 + SQLFloat double -> Number $ Scientific.fromFloatDigits double + SQLText text -> String text + SQLBlob byteString -> String $ show byteString + SQLNull -> Null + + +{-| Case-insensitively replaces all occurrences of a substring within a string + with a replacement string. + + Examples: + + >>> replaceCaseInsensitive "hello" "hi" "Hello World" + "hi World" + + >>> replaceCaseInsensitive "l" "L" "Hello World" + "HeLLo WorLd" +-} +replaceCaseInsensitive :: Text -> Text -> Text -> Text +replaceCaseInsensitive removable replacement txt = + let + len = T.length removable + process remaining result + | T.null remaining = result + | (remaining & T.take len & T.toLower) == (removable & T.toLower) = + process (remaining & T.drop len) (result <> replacement) + | otherwise = + process (remaining & T.drop 1) (result <> T.take 1 remaining) + in + process txt "" + + +{-| Replace rem(movable) with rep(lacement) +| and make sure its surrounded by spaces +-} +replaceWithSpace :: Text -> Text -> Text -> Text +replaceWithSpace rem rep txt = + txt + & replaceCaseInsensitive (" " <> rem <> " ") (" " <> rep <> " ") + & replaceCaseInsensitive (" " <> rem <> "\n") (" " <> rep <> "\n") + & replaceCaseInsensitive ("\n" <> rem <> " ") ("\n" <> rep <> " ") + & replaceCaseInsensitive ("\n" <> rem <> "\n") ("\n" <> rep <> "\n") + + +sanitizeSql :: Text -> Text +sanitizeSql sql = + sql + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/27 + & replaceWithSpace "if not exists" "" + -- TOOD: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/37 + & replaceCaseInsensitive "insert or abort " "insert " + & replaceCaseInsensitive "insert or fail " "insert " + & replaceCaseInsensitive "insert or ignore " "insert " + & replaceCaseInsensitive "insert or replace " "insert " + & replaceCaseInsensitive "insert or rollback " "insert " + -- Removing the JSON arrow operator seems to be enough + -- to make the parser accept all queries containing JSON operators + & T.replace "->" "" + -- https://www.sqlite.org/stricttables.html + & replaceCaseInsensitive ")strict" ")" + & replaceCaseInsensitive ") strict" ")" + & replaceCaseInsensitive ")\nstrict" ")" + & replaceCaseInsensitive ") \nstrict" ")" + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/20 + & ( \sqlQuery -> + if P.all + (\word -> word `P.elem` T.words (T.toLower sqlQuery)) + ["alter", "table", "rename"] + then "SELECT 0" -- Dummy statement to accept the query + else sqlQuery + ) + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/41 + & ( \sqlQuery -> + if P.all + (\word -> word `P.elem` T.words (T.toLower sqlQuery)) + ["create", "trigger", "on", "begin", "end"] + then "SELECT 0" -- Dummy statement to accept the query + else sqlQuery + ) + & replaceCaseInsensitive "drop trigger" "drop table" + & replaceCaseInsensitive "drop index" "drop table" + -- Uncomment unsupported "RETURNING" clause + -- TODO: Add support for DELETE and UPDATE with RETURNING + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/42 + & replaceCaseInsensitive ")returning " ") -- returning " + & replaceCaseInsensitive ") returning " ") -- returning " + & replaceCaseInsensitive ")\nreturning " ")\n-- returning " + & replaceCaseInsensitive ") \nreturning " ")\n-- returning " + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/43 + & replaceWithSpace "==" "=" + & replaceWithSpace "is not" "%$@_TEMP_@$%" + & replaceWithSpace "is" "=" + & replaceWithSpace "%$@_TEMP_@$%" "is not" + -- The internal table is created without column types + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/38#issuecomment-1413340116 + & replaceCaseInsensitive + "sqlite_sequence(name,seq)" + "sqlite_sequence(name TEXT,seq INT)" + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/40 + & replaceWithSpace "NOT NULL DEFAULT" "DEFAULT" + -- TODO: Remove after + -- https://github.com/JakeWheat/simple-sql-parser/issues/46 + & replaceCaseInsensitive "STORED" "" + & replaceCaseInsensitive "VIRTUAL" "" + & replaceWithSpace "GLOB" "LIKE" + + +-- | SQLite dialect +sqlite :: Dialect +sqlite = + ansi2011 + { diLimit = True + , diAutoincrement = True + , diAppKeywords = + ansi2011.diAppKeywords + <> [ "abs" + , -- https://www.sqlite.org/lang_mathfunc.html + "acos" + , "acosh" + , "asin" + , "asinh" + , "atan" + , "atan2" + , "atanh" + , "ceil" + , "ceiling" + , "cos" + , "cosh" + , "degrees" + , "exp" + , "floor" + , "ln" + , "log" + , "log" + , "log10" + , "log2" + , "mod" + , "pi" + , "pow" + , "power" + , "radians" + , "sin" + , "sinh" + , "sqrt" + , "tan" + , "tanh" + , "trunc" + ] + , diKeywords = + [ "abort" + , "action" + , "add" + , "after" + , "all" + , "alter" + , "always" + , "analyze" + , "and" + , "as" + , "asc" + , "attach" + , "autoincrement" + , "before" + , "begin" + , "between" + , "by" + , "cascade" + , "case" + , "cast" + , "check" + , "collate" + , "column" + , "commit" + , "conflict" + , "constraint" + , "create" + , "cross" + , "current" + , "current_date" + , "current_time" + , "current_timestamp" + , "database" + , "default" + , "deferrable" + , "deferred" + , "delete" + , "desc" + , "detach" + , "distinct" + , "do" + , "drop" + , "each" + , "else" + , "end" + , "escape" + , "except" + , "exclude" + , "exclusive" + , "exists" + , "explain" + , "fail" + , "filter" + , "first" + , "following" + , "for" + , "foreign" + , "from" + , "full" + , "generated" + , "glob" + , "group" + , "groups" + , "having" + , "if" + , "ignore" + , "immediate" + , "in" + , "index" + , "indexed" + , "initially" + , "inner" + , "insert" + , "instead" + , "intersect" + , "into" + , "is" + , "isnull" + , "join" + , "key" + , "last" + , "left" + , "like" + , "limit" + , "match" + , "materialized" + , "natural" + , "no" + , "not" + , "nothing" + , "notnull" + , -- although "null" is on the official list of keywords, adding it here + -- seems to break "select NULL as ..." statemenets + -- , "null" + "nulls" + , "of" + , "offset" + , "on" + , "or" + , "order" + , "others" + , "outer" + , "over" + , "partition" + , "plan" + , "pragma" + , "preceding" + , "primary" + , "query" + , "raise" + , "range" + , "recursive" + , "references" + , "regexp" + , "reindex" + , "release" + , "rename" + , "replace" + , "restrict" + , "returning" + , "right" + , "rollback" + , "row" + , "rows" + , "savepoint" + , "select" + , "set" + , "table" + , "temp" + , "temporary" + , "then" + , "ties" + , "to" + , "transaction" + , "trigger" + , "unbounded" + , "union" + , "unique" + , "update" + , "using" + , "vacuum" + , "values" + , "view" + , "virtual" + , "when" + , "where" + , "window" + , "with" + , "without" + ] + , diBackquotedIden = True -- https://sqlite.org/lang_keywords.html + , diSquareBracketQuotedIden = True -- https://sqlite.org/lang_keywords.html + } + + +parseSql :: Text -> P.Either ParseError Statement +parseSql sqlQuery = + parseStatement sqlite "" P.Nothing $ + T.unpack $ + sanitizeSql sqlQuery + + +newtype SQLPost = SQLPost + { query :: Text + } + deriving (Eq, Show, Generic) + + +instance ToJSON SQLPost +instance FromJSON SQLPost + + +instance ToSample AirGQL.Lib.SQLPost where + toSamples _ = singleSample $ SQLPost "SELECT * FROM users" diff --git a/source/AirGQL/Raw.hs b/source/AirGQL/Raw.hs new file mode 100644 index 0000000..9bfee55 --- /dev/null +++ b/source/AirGQL/Raw.hs @@ -0,0 +1,30 @@ +{-# OPTIONS_GHC -Wno-deprecations #-} + +module AirGQL.Raw (raw) where + +import Protolude (pure, (.)) +import Protolude.Error (error) + +import Language.Haskell.TH (Exp (LitE), Lit (StringL)) +import Language.Haskell.TH.Quote ( + QuasiQuoter (QuasiQuoter, quoteDec, quoteExp, quotePat, quoteType), + ) + + +raw :: QuasiQuoter +raw = + QuasiQuoter + { quoteExp = pure . LitE . StringL + , quotePat = \_ -> + error + "Illegal raw string QuasiQuote \ + \(allowed as expression only, used as a pattern)" + , quoteType = \_ -> + error + "Illegal raw string QuasiQuote \ + \(allowed as expression only, used as a type)" + , quoteDec = \_ -> + error + "Illegal raw string QuasiQuote \ + \(allowed as expression only, used as a declaration)" + } diff --git a/source/AirGQL/Servant/Database.hs b/source/AirGQL/Servant/Database.hs new file mode 100644 index 0000000..0519721 --- /dev/null +++ b/source/AirGQL/Servant/Database.hs @@ -0,0 +1,46 @@ +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use maybe" #-} +{-# HLINT ignore "Avoid lambda" #-} +{-# HLINT ignore "Replace case with maybe" #-} + +module AirGQL.Servant.Database ( + apiDatabaseSchemaGetHandler, + apiDatabaseVacuumPostHandler, +) where + +import Protolude ( + Applicative (pure), + MonadIO (liftIO), + Monoid (mempty), + ($), + ) + +import Data.Aeson (Object) +import Data.Text (Text) +import Database.SQLite.Simple qualified as SS +import Servant.Server qualified as Servant + +import AirGQL.ExternalAppContext (ExternalAppContext) +import AirGQL.Utils ( + getMainDbPath, + runSqliteCommand, + withRetryConn, + ) + + +apiDatabaseSchemaGetHandler + :: ExternalAppContext + -> Text + -> Servant.Handler Text +apiDatabaseSchemaGetHandler ctx dbId = do + runSqliteCommand ctx (getMainDbPath dbId) ".schema" + + +apiDatabaseVacuumPostHandler + :: Text + -> Servant.Handler Object +apiDatabaseVacuumPostHandler dbId = do + liftIO $ withRetryConn (getMainDbPath dbId) $ \conn -> + SS.execute_ conn "VACUUM" + pure mempty diff --git a/source/AirGQL/Servant/GraphQL.hs b/source/AirGQL/Servant/GraphQL.hs new file mode 100644 index 0000000..89bdd59 --- /dev/null +++ b/source/AirGQL/Servant/GraphQL.hs @@ -0,0 +1,149 @@ +module AirGQL.Servant.GraphQL ( + gqlQueryGetHandler, + gqlQueryPostHandler, + playgroundDefaultQueryHandler, + readOnlyGqlPostHandler, + writeOnlyGqlPostHandler, +) where + +import Protolude ( + Applicative (pure), + MonadIO (liftIO), + Monoid (mempty), + Semigroup ((<>)), + ($), + (&), + ) +import Protolude qualified as P + +import Control.Monad.Catch (catchAll) +import Data.Aeson (Object) +import Data.Text (Text) +import Data.Text qualified as T +import DoubleXEncoding (doubleXEncodeGql) +import Servant (NoContent, err303, errHeaders) +import Servant.Server qualified as Servant +import System.Directory (makeAbsolute) + +import AirGQL.Lib ( + AccessMode (ReadOnly, WriteOnly), + column_name, + getColumns, + getTableNames, + ) +import AirGQL.ServerUtils (executeQuery) +import AirGQL.Types.SchemaConf (SchemaConf (accessMode), defaultSchemaConf) +import AirGQL.Types.Types (GQLPost (operationName, query, variables)) +import AirGQL.Utils ( + getDbDir, + getMainDbPath, + getReadOnlyFilePath, + throwErr400WithMsg, + throwErr404WithMsg, + withRetryConn, + ) +import System.FilePath (pathSeparator, takeDirectory) + + +gqlQueryGetHandler :: Text -> Servant.Handler NoContent +gqlQueryGetHandler dbId = + P.throwError + err303 + { errHeaders = + [("Location", P.encodeUtf8 $ "/dbs/" <> dbId <> "/graphiql")] + } + + +gqlQueryPostHandler + :: SchemaConf + -> Text + -> GQLPost + -> Servant.Handler Object +gqlQueryPostHandler schemaConf dbIdOrPath gqlPost = do + let + handleNoDbError :: P.SomeException -> Servant.Handler a + handleNoDbError excpetion = do + let errMsg = P.show excpetion + + if "unable to open database file" `T.isInfixOf` errMsg + then + throwErr404WithMsg $ + "Database \"" <> dbIdOrPath <> "\" does not exist" + else do + P.putErrLn $ + "Error during execution of GraphQL query: " <> errMsg + throwErr400WithMsg errMsg + + catchAll + ( liftIO $ do + reqDir <- + if pathSeparator `T.elem` dbIdOrPath + then pure $ takeDirectory $ T.unpack dbIdOrPath + else makeAbsolute $ getDbDir dbIdOrPath + + executeQuery + schemaConf + dbIdOrPath + reqDir + gqlPost.query + (gqlPost.variables & P.fromMaybe mempty) + gqlPost.operationName + ) + handleNoDbError + + +readOnlyGqlPostHandler :: Text -> GQLPost -> Servant.Handler Object +readOnlyGqlPostHandler dbIdOrPath gqlPost = + liftIO $ do + reqDir <- makeAbsolute $ getReadOnlyFilePath dbIdOrPath + + executeQuery + defaultSchemaConf{accessMode = ReadOnly} + dbIdOrPath + reqDir + gqlPost.query + (gqlPost.variables & P.fromMaybe mempty) + gqlPost.operationName + + +writeOnlyGqlPostHandler :: Text -> GQLPost -> Servant.Handler Object +writeOnlyGqlPostHandler dbPath gqlPost = + liftIO $ do + reqDir <- makeAbsolute $ getReadOnlyFilePath dbPath + + executeQuery + defaultSchemaConf{accessMode = WriteOnly} + dbPath + reqDir + gqlPost.query + (gqlPost.variables & P.fromMaybe mempty) + gqlPost.operationName + + +playgroundDefaultQueryHandler + :: Text + -> Servant.Handler Text +playgroundDefaultQueryHandler dbId = do + liftIO $ withRetryConn (getMainDbPath dbId) $ \mainConn -> do + tableEntries <- getTableNames mainConn + + case tableEntries of + (headTable : _) -> do + cols <- getColumns dbId mainConn headTable + pure $ + P.fold + [ "query " + , doubleXEncodeGql headTable + , "Query {\n" + , " " + , doubleXEncodeGql headTable + , "( limit: 100 ) {\n" + , cols + & P.foldMap + ( \col -> + " " <> doubleXEncodeGql col.column_name <> "\n" + ) + , " }\n" + , "}" + ] + _ -> pure "" diff --git a/source/AirGQL/Servant/SqlQuery.hs b/source/AirGQL/Servant/SqlQuery.hs new file mode 100644 index 0000000..e128258 --- /dev/null +++ b/source/AirGQL/Servant/SqlQuery.hs @@ -0,0 +1,198 @@ +module AirGQL.Servant.SqlQuery ( + getAffectedTables, + sqlQueryPostHandler, +) +where + +import Protolude ( + Applicative (pure), + Either (Left, Right), + Maybe (Just, Nothing), + MonadIO (liftIO), + Semigroup ((<>)), + otherwise, + show, + when, + ($), + (&), + (*), + (-), + (/=), + (<&>), + (>), + ) +import Protolude qualified as P + +import Data.Aeson.Key qualified as Key +import Data.Aeson.KeyMap qualified as KeyMap +import Data.Text (Text) +import Data.Text qualified as T +import Data.Time (diffUTCTime, getCurrentTime, nominalDiffTimeToSeconds) +import Database.SQLite.Simple qualified as SS +import Language.SQL.SimpleSQL.Parse (ParseError (peFormattedError)) +import Language.SQL.SimpleSQL.Syntax (Statement (CreateTable)) +import Servant.Server qualified as Servant +import System.Timeout (timeout) + +import AirGQL.Config (defaultConfig, sqlTimeoutTime) +import AirGQL.Lib ( + SQLPost (query), + TableEntryRaw (sql, tbl_name), + getTables, + lintTableCreationCode, + parseSql, + sqlDataToAesonValue, + ) +import AirGQL.Types.PragmaConf (PragmaConf, getSQLitePragmas) +import AirGQL.Types.SqlQueryPostResult ( + SqlQueryPostResult ( + SqlQueryPostResult, + affectedTables, + columns, + errors, + rows, + runtimeSeconds + ), + resultWithErrors, + ) +import AirGQL.Utils ( + getMainDbPath, + throwErr400WithMsg, + withRetryConn, + ) + + +getAffectedTables :: [TableEntryRaw] -> [TableEntryRaw] -> [Text] +getAffectedTables pre post = + let + loop left right = do + case (left, right) of + ([], _) -> right <&> tbl_name + (_, []) -> left <&> tbl_name + (headLeft : tailLeft, headRight : tailRight) -> + case P.compare headLeft.tbl_name headRight.tbl_name of + P.LT -> headLeft.tbl_name : loop tailLeft right + P.GT -> headRight.tbl_name : loop left tailRight + P.EQ + | headLeft.sql /= headRight.sql -> + headLeft.tbl_name : loop tailLeft tailRight + | otherwise -> + loop tailLeft tailRight + in + loop + (P.sortOn tbl_name pre) + (P.sortOn tbl_name post) + + +sqlQueryPostHandler + :: PragmaConf + -> Text + -> SQLPost + -> Servant.Handler SqlQueryPostResult +sqlQueryPostHandler pragmaConf dbId sqlPost = do + let maxSqlQueryLength :: P.Int = 100_000 + + when (T.length sqlPost.query > maxSqlQueryLength) $ do + throwErr400WithMsg $ + "SQL query is too long (" + <> show (T.length sqlPost.query) + <> " characters, maximum is " + <> show maxSqlQueryLength + <> ")" + + validationErrors <- liftIO $ case parseSql sqlPost.query of + Left error -> pure [T.pack error.peFormattedError] + Right statement@(CreateTable _ _) -> + SS.withConnection (getMainDbPath dbId) $ \conn -> + lintTableCreationCode (Just conn) statement + _ -> pure [] + + case validationErrors of + [] -> do + let + dbFilePath = getMainDbPath dbId + microsecondsPerSecond = 1000000 :: P.Int + + timeoutTimeMicroseconds = + defaultConfig.sqlTimeoutTime + * microsecondsPerSecond + + sqlitePragmas <- liftIO $ getSQLitePragmas pragmaConf + + let + performSqlOperations = + withRetryConn dbFilePath $ \conn -> do + preTables <- getTables conn + + P.for_ sqlitePragmas $ SS.execute_ conn + SS.execute_ conn "PRAGMA foreign_keys = True" + + let query = SS.Query sqlPost.query + + columnNames <- SS.withStatement conn query $ \statement -> do + numCols <- SS.columnCount statement + P.for [0 .. (numCols - 1)] $ SS.columnName statement + + tableRowsMb :: Maybe [[SS.SQLData]] <- + timeout timeoutTimeMicroseconds $ SS.query_ conn query + changes <- SS.changes conn + + postTables <- getTables conn + + pure $ case tableRowsMb of + Just tableRows -> + Right (columnNames, tableRows, changes, preTables, postTables) + Nothing -> Left "Sql query execution timed out" + + startTime <- liftIO getCurrentTime + sqlResults <- + liftIO $ + P.catches + performSqlOperations + [ P.Handler $ + \(error :: SS.SQLError) -> pure $ Left $ show error + , P.Handler $ + \(error :: SS.ResultError) -> pure $ Left $ show error + , P.Handler $ + \(error :: SS.FormatError) -> pure $ Left $ show error + ] + endTime <- liftIO getCurrentTime + + let measuredTime = + nominalDiffTimeToSeconds + (diffUTCTime endTime startTime) + + case sqlResults of + Left error -> + pure $ resultWithErrors measuredTime [error] + Right (columnNames, tableRows, changes, preTables, postTables) -> do + -- TODO: Use GQL error format {"message": "…", "code": …, …} instead + let + keys = columnNames <&> Key.fromText + + rowList = + tableRows + <&> \row -> + row + <&> sqlDataToAesonValue "" + & P.zip keys + & KeyMap.fromList + + affectedTables = + if changes > 0 + then postTables <&> tbl_name + else getAffectedTables preTables postTables + + pure $ + SqlQueryPostResult + { rows = rowList + , columns = columnNames + , runtimeSeconds = measuredTime + , affectedTables = affectedTables + , errors = [] + } + _ -> + pure $ + resultWithErrors + 0 + validationErrors diff --git a/source/AirGQL/ServerUtils.hs b/source/AirGQL/ServerUtils.hs new file mode 100644 index 0000000..4cfbb4e --- /dev/null +++ b/source/AirGQL/ServerUtils.hs @@ -0,0 +1,71 @@ +module AirGQL.ServerUtils ( + executeQuery, +) where + +import Protolude ( + Applicative (pure), + Either (Left, Right), + FilePath, + IO, + Maybe (Just, Nothing), + toList, + ($), + (&), + (<&>), + ) +import Protolude qualified as P + +import Conduit (sourceToList) +import Control.Arrow ((>>>)) +import Data.Aeson (Object, Value (String)) +import Data.Text (Text) +import Data.Text qualified as T +import Database.SQLite.Simple qualified as SS +import Language.GraphQL.Error (Error (Error), Response (Response)) +import Language.GraphQL.JSON (graphql) +import System.FilePath (pathSeparator, (</>)) + +import AirGQL.GraphQL (getDerivedSchema) +import AirGQL.Lib (getTables) +import AirGQL.Types.SchemaConf (SchemaConf) +import AirGQL.Types.Types ( + GQLResponse (GQLResponse, data_, errors), + gqlResponseToObject, + ) + + +executeQuery + :: SchemaConf + -> Text + -> FilePath + -> Text + -> Object + -> Maybe Text + -> IO Object +executeQuery schemaConf dbIdOrPath reqDir query vars opNameMb = do + let dbFilePath = + if pathSeparator `T.elem` dbIdOrPath + then T.unpack dbIdOrPath + else reqDir </> "main.sqlite" + + theConn <- SS.open dbFilePath + tables <- getTables theConn + schema <- getDerivedSchema schemaConf theConn dbIdOrPath tables + result <- graphql schema opNameMb vars query + SS.close theConn + + case result of + Left errMsg -> do + errors <- sourceToList errMsg + pure $ + gqlResponseToObject $ + GQLResponse + { data_ = Nothing + , errors = + Just $ + errors + <&> ((\(Response _ errs) -> errs) >>> toList) + & P.concat + <&> (\(Error msg _ _) -> String msg) + } + Right response -> pure response diff --git a/source/AirGQL/Types/OutObjectType.hs b/source/AirGQL/Types/OutObjectType.hs new file mode 100644 index 0000000..7a43ace --- /dev/null +++ b/source/AirGQL/Types/OutObjectType.hs @@ -0,0 +1,28 @@ +module AirGQL.Types.OutObjectType ( + OutObjectType (OutObjectType, name, descriptionMb, interfaceTypes, fields), + outObjectTypeToObjectType, +) +where + +import Protolude (Maybe, Text, (&)) + +import Data.HashMap.Strict (HashMap) +import Language.GraphQL.Type (InterfaceType) +import Language.GraphQL.Type.Out qualified as Out + + +data OutObjectType m = OutObjectType + { name :: Text + , descriptionMb :: Maybe Text + , interfaceTypes :: [InterfaceType m] + , fields :: HashMap Text (Out.Resolver m) + } + + +outObjectTypeToObjectType :: OutObjectType m -> Out.ObjectType m +outObjectTypeToObjectType objectType = + Out.ObjectType + (objectType & name) + (objectType & (descriptionMb :: OutObjectType m -> Maybe Text)) + (objectType & interfaceTypes) + (objectType & fields) diff --git a/source/AirGQL/Types/PragmaConf.hs b/source/AirGQL/Types/PragmaConf.hs new file mode 100644 index 0000000..c027d20 --- /dev/null +++ b/source/AirGQL/Types/PragmaConf.hs @@ -0,0 +1,56 @@ +module AirGQL.Types.PragmaConf ( + PragmaConf (..), + getSQLitePragmas, + defaultConf, +) +where + +import Protolude ( + Bool (True), + IO, + Int, + Integer, + pure, + show, + ($), + (<>), + ) + +import Database.SQLite.Simple qualified as SS + + +data PragmaConf = PragmaConf + { maxPageCount :: Int + , hardHeapLimit :: Integer + , allowRecursTrig :: Bool + } + + +defaultConf :: PragmaConf +defaultConf = + PragmaConf + { maxPageCount = 4096 + , hardHeapLimit = 500_000_000 -- Bytes + , allowRecursTrig = True + } + + +-- | Get the SQLite pragmas to use for a database +getSQLitePragmas :: PragmaConf -> IO [SS.Query] +getSQLitePragmas pragConf = do + let + getPrag key value = + SS.Query $ "PRAGMA " <> key <> " = " <> value + + pure + [ getPrag "case_sensitive_like" "True" + , getPrag "foreign_keys" "True" + , -- TODO: Check if this really works + getPrag "hard_heap_limit" $ show @Integer pragConf.hardHeapLimit + , getPrag "max_page_count" $ show @Int pragConf.maxPageCount + , getPrag "recursive_triggers" $ show @Bool pragConf.allowRecursTrig + , -- TODO: Reactivate after https://sqlite.org/forum/forumpost/d7b9a365e0 + -- (Also activate in SqlQuery.hs) + -- , getPrag "trusted_schema" "False" + getPrag "writable_schema" "False" + ] diff --git a/source/AirGQL/Types/SchemaConf.hs b/source/AirGQL/Types/SchemaConf.hs new file mode 100644 index 0000000..19466e7 --- /dev/null +++ b/source/AirGQL/Types/SchemaConf.hs @@ -0,0 +1,26 @@ +module AirGQL.Types.SchemaConf ( + SchemaConf (..), + defaultSchemaConf, +) where + +import Protolude (Integer) + +import AirGQL.Lib (AccessMode (ReadAndWrite)) +import AirGQL.Types.PragmaConf (PragmaConf, defaultConf) + + +data SchemaConf = SchemaConf + { accessMode :: AccessMode + , pragmaConf :: PragmaConf + , maxRowsPerTable :: Integer + } + + +-- | Default schema configuration +defaultSchemaConf :: SchemaConf +defaultSchemaConf = + SchemaConf + { accessMode = ReadAndWrite + , pragmaConf = AirGQL.Types.PragmaConf.defaultConf + , maxRowsPerTable = 100_000 + } diff --git a/source/AirGQL/Types/SqlQueryPostResult.hs b/source/AirGQL/Types/SqlQueryPostResult.hs new file mode 100644 index 0000000..524ee19 --- /dev/null +++ b/source/AirGQL/Types/SqlQueryPostResult.hs @@ -0,0 +1,107 @@ +module AirGQL.Types.SqlQueryPostResult ( + SqlQueryPostResult (..), + resultWithErrors, +) +where + +import Protolude ( + Generic, + Show, + Text, + foldMap, + fromMaybe, + ($), + (&), + (<>), + ) + +import Control.Arrow ((>>>)) +import Data.Aeson ( + FromJSON, + Object, + ToJSON, + Value (Null, Number), + ) +import Data.Aeson.Encoding (list, pair, pairs) +import Data.Aeson.Key qualified as Key +import Data.Aeson.KeyMap qualified as KeyMap +import Data.Aeson.Types (toEncoding, (.=)) +import Data.Fixed (Pico) +import Servant.Docs (ToSample (toSamples), singleSample) + + +data SqlQueryPostResult = SqlQueryPostResult + { affectedTables :: [Text] + , rows :: [Object] + , columns :: [Text] -- Only necessary for order of columns in the result + , runtimeSeconds :: Pico -- Precision contained by `NominalDiffTime` + , errors :: [Text] + } + deriving (Show, Generic) + + +instance FromJSON SqlQueryPostResult + + +{-| Even though JSON objects are unordered by definition, +the fields (columns) must be returned in the requested order +as Elm relies on it for decoding. +-} +instance ToJSON SqlQueryPostResult where + toEncoding sqlQueryPostResult = + pairs $ + "affectedTables" .= sqlQueryPostResult.affectedTables + <> "rows" + `pair` ( sqlQueryPostResult.rows + & list + ( \(row :: Object) -> + -- Apply order of columns + sqlQueryPostResult.columns + & foldMap + ( Key.fromText + >>> ( \col -> + col + .= ( row + & KeyMap.lookup col + & fromMaybe Null + ) + ) + ) + & pairs + ) + ) + <> "runtimeSeconds" .= sqlQueryPostResult.runtimeSeconds + <> "errors" .= sqlQueryPostResult.errors + + +instance ToSample SqlQueryPostResult where + toSamples _ = + singleSample $ + SqlQueryPostResult + { affectedTables = ["users"] + , rows = + [ KeyMap.fromList + [ (Key.fromText "id", Number 1) + , (Key.fromText "name", "John") + ] + , KeyMap.fromList + [ (Key.fromText "id", Number 2) + , (Key.fromText "name", "Jane") + ] + ] + , columns = ["id", "name"] + , runtimeSeconds = 0.05 + , errors = [] + } + + +-- | Construct a result for a failed sql query execution. +resultWithErrors :: Pico -> [Text] -> SqlQueryPostResult +resultWithErrors runtimeSeconds errors = + SqlQueryPostResult + { affectedTables = [] + , rows = [] + , columns = [] + , runtimeSeconds = runtimeSeconds + , errors = errors + } diff --git a/source/AirGQL/Types/TextNullable.hs b/source/AirGQL/Types/TextNullable.hs new file mode 100644 index 0000000..81e16c4 --- /dev/null +++ b/source/AirGQL/Types/TextNullable.hs @@ -0,0 +1,31 @@ +module AirGQL.Types.TextNullable ( + TextNullable (..), +) +where + +import Protolude ( + Eq, + Generic, + Show, + Text, + pure, + ($), + ) + +import Data.Aeson ( + FromJSON, + ToJSON, + Value (Null, String), + parseJSON, + ) + + +data TextNullable = TextUndefined | TextNull | TextValue Text + deriving (Show, Eq, Generic) + + +instance FromJSON TextNullable where + parseJSON (String str) = pure $ TextValue str + parseJSON Null = pure TextNull + parseJSON _ = pure TextUndefined +instance ToJSON TextNullable diff --git a/source/AirGQL/Types/Types.hs b/source/AirGQL/Types/Types.hs new file mode 100644 index 0000000..71778f9 --- /dev/null +++ b/source/AirGQL/Types/Types.hs @@ -0,0 +1,135 @@ +module AirGQL.Types.Types ( + FileFormat (..), + FilenameField (..), + GQLPost (..), + GQLResponse (..), + gqlResponseToObject, + MetadataPair (..), + RawJsonMime, + Database (..), + UsageError (..), +) +where + +import Protolude ( + Eq, + Generic, + Maybe (Nothing), + Monoid (mempty), + Show, + Text, + ) +import Protolude qualified as P + +import Data.Aeson ( + FromJSON, + KeyValue ((.=)), + Object, + ToJSON (toJSON), + Value (Object), + object, + ) +import Database.SQLite.Simple qualified as SS +import Servant.Docs (ToSample (toSamples), singleSample) + + +-- Necessary to avoid JSON string quoting +data RawJsonMime + + +data FileFormat + = SQLiteFile + | CSVFile + | PlainTextFile + | DisallowedFile Text + deriving (Show, Eq) + + +data GQLPost = GQLPost + { query :: Text + , operationName :: Maybe Text + , variables :: Maybe Object + } + deriving (Eq, Show, Generic) + + +instance ToJSON GQLPost +instance FromJSON GQLPost + + +instance ToSample GQLPost where + toSamples _ = + singleSample + GQLPost + { query = "{ users { name, email } }" + , variables = Nothing + , operationName = Nothing + } + + +data GQLResponse = GQLResponse + { data_ :: Maybe Value + , errors :: Maybe [Value] + } + deriving (Eq, Show, Generic) + + +instance ToJSON GQLResponse where + toJSON GQLResponse{data_, errors} = + object + [ "data" .= data_ + , "errors" .= errors + ] + + +-- emptyGQLResponse :: GQLResponse +-- emptyGQLResponse = GQLResponse +-- { data_ = Nothing +-- , errors = Nothing +-- } + +newtype FilenameField = FilenameField Text + deriving (Generic, Show) + + +instance SS.FromRow FilenameField + + +data MetadataPair = MetadataPair + { attribute :: Text + , value :: Text + } + deriving (Eq, Show, Generic) + + +instance SS.FromRow MetadataPair + + +gqlResponseToObject :: GQLResponse -> Object +gqlResponseToObject gqlRes = + case toJSON gqlRes of + Object obj -> obj + _ -> mempty + + +data Database = Database + { id :: Text + , name :: Text + , environment :: Maybe Text + , ownership_utc :: Text + } + deriving (Generic, Show) + + +instance FromJSON Database +instance ToJSON Database +instance SS.FromRow Database + + +-- Errors + +newtype UsageError = UsageError Text + deriving (Eq, Show) + + +instance P.Exception UsageError diff --git a/source/AirGQL/Types/Utils.hs b/source/AirGQL/Types/Utils.hs new file mode 100644 index 0000000..8007e26 --- /dev/null +++ b/source/AirGQL/Types/Utils.hs @@ -0,0 +1,14 @@ +module AirGQL.Types.Utils ( + encodeToText, +) +where + +import Data.Aeson (ToJSON, encode) +import Data.ByteString.Lazy (toStrict) +import Data.Text.Encoding (decodeUtf8) +import Protolude (Text, (.)) + + +encodeToText :: (ToJSON a) => a -> Text +encodeToText = + decodeUtf8 . toStrict . encode diff --git a/source/AirGQL/Utils.hs b/source/AirGQL/Utils.hs new file mode 100644 index 0000000..68aca22 --- /dev/null +++ b/source/AirGQL/Utils.hs @@ -0,0 +1,361 @@ +-- For embedded SQL queries +{-# LANGUAGE QuasiQuotes #-} +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use maybe" #-} +-- HLint can't figure out where TemplateHaskell is used, +-- even though it throws an error without the pragma. +{-# HLINT ignore "Unused LANGUAGE pragma" #-} + +module AirGQL.Utils ( + collectAllErrorsAsText, + collectErrorList, + colToFileUrl, + escDoubleQuotes, + escSingleQuotes, + getDbDir, + getGraphiQLVersion, + getMainDbPath, + getOrderOfLinkedList, + getReadOnlyFilePath, + getDbIdFromReadOnlyId, + getSqliteBinaryVersion, + getSqliteEmbeddedVersion, + headerJsonContent, + quoteKeyword, + quoteText, + removeIfExists, + runSqliteCommand, + throwErr400WithMsg, + throwErr404WithMsg, + throwErr500WithMsg, + withRetryConn, + DiffKind (..), +) where + +import Protolude ( + Applicative (pure), + ExitCode (ExitFailure, ExitSuccess), + FilePath, + IO, + Maybe (Just, Nothing), + Monoid (mempty), + Semigroup ((<>)), + Text, + catch, + liftIO, + not, + show, + throwError, + throwIO, + when, + ($), + (&), + (.), + (/=), + (<&>), + ) +import Protolude qualified as P + +import Control.Monad.Catch (catchAll) +import Data.Aeson (KeyValue ((.=)), Value (String), encode, object) +import Data.ByteString qualified as BS +import Data.ByteString.Builder (toLazyByteString) +import Data.ByteString.Lazy qualified as BL +import Data.Either.Extra (mapLeft) +import Data.List qualified as List +import Data.Text qualified as T +import Data.Text.Encoding qualified as T +import Database.SQLite.Simple (Connection) +import Database.SQLite.Simple qualified as SS +import Network.HTTP.Types (HeaderName, encodePathSegments) +import Servant.Server ( + ServerError (errBody, errHeaders), + err400, + err404, + err500, + ) +import Servant.Server qualified as Servant +import System.Directory (removeFile) +import System.FilePath (takeFileName, (</>)) +import System.IO.Error (IOError, isDoesNotExistError) +import System.Posix.Files (readSymbolicLink) +import System.Process (readProcess) +import System.Process.Typed ( + byteStringInput, + createPipe, + proc, + readProcessInterleaved, + setStderr, + setStdin, + setStdout, + ) + +import AirGQL.ExternalAppContext (ExternalAppContext (sqlite)) + + +getDbDir :: Text -> FilePath +getDbDir dbId = + "data" + </> "databases" + </> T.unpack dbId + + +getMainDbPath :: Text -> FilePath +getMainDbPath dbId = + getDbDir dbId + </> "main.sqlite" + + +getReadOnlyFilePath :: Text -> FilePath +getReadOnlyFilePath readonlyId = + "data" </> "readonly" </> T.unpack readonlyId + + +getSqliteEmbeddedVersion :: Connection -> IO Text +getSqliteEmbeddedVersion conn = do + sqliteEmbeddedVersion <- + SS.query_ + conn + "select sqlite_version()" + :: IO [[SS.SQLData]] + case sqliteEmbeddedVersion of + [[SS.SQLText verTxt]] -> pure verTxt + _ -> pure mempty + + +getSqliteBinaryVersion :: ExternalAppContext -> IO Text +getSqliteBinaryVersion ctx = do + P.fmap (T.strip . T.pack) $ + readProcess + ctx.sqlite + ["--safe", ":memory:"] + (T.unpack "select sqlite_version()") + + +getGraphiQLVersion :: IO Text +getGraphiQLVersion = do + -- let packageJson :: BL.ByteString = + -- $( "package.json" + -- & makeRelativeToProject + -- P.>>= embedStringFile + -- ) + -- + -- pure $ + -- (Aeson.decode packageJson :: Maybe Object) + -- P.>>= KeyMap.lookup "dependencies" + -- P.>>= ( \case + -- Aeson.Object o -> KeyMap.lookup "graphiql" o + -- _ -> Nothing + -- ) + -- P.>>= ( \case + -- Aeson.String s -> Just s + -- _ -> Nothing + -- ) + -- & fromMaybe "" + pure "TODO" + + +-- | Escape double quotes in SQL strings +escDoubleQuotes :: Text -> Text +escDoubleQuotes = + T.replace "\"" "\"\"" + + +-- | Quote a keyword in an SQL query +quoteKeyword :: Text -> Text +quoteKeyword keyword = + keyword + & escDoubleQuotes + & (\word -> "\"" <> word <> "\"") + + +-- | Escape single quotes in SQL strings +escSingleQuotes :: Text -> Text +escSingleQuotes = + T.replace "'" "''" + + +-- | Quote literal text in an SQL query +quoteText :: Text -> Text +quoteText keyword = + keyword + & escSingleQuotes + & (\word -> "'" <> word <> "'") + + +headerJsonContent :: [(HeaderName, BS.ByteString)] +headerJsonContent = + [("Content-Type", "application/json;charset=utf-8")] + + +-- | Throw the specified server error with a message +throwServerErrorWithMsg :: ServerError -> Text -> Servant.Handler a +throwServerErrorWithMsg serverError errorMsg = + throwError $ + serverError + { errHeaders = headerJsonContent + , errBody = + encode $ + object + ["errors" .= [String errorMsg]] + } + + +-- | Throw an "400 Bad Request" error with a message +throwErr400WithMsg :: Text -> Servant.Handler a +throwErr400WithMsg = throwServerErrorWithMsg err400 + + +-- | Throw an "404 Not Found" error with a message +throwErr404WithMsg :: Text -> Servant.Handler a +throwErr404WithMsg = throwServerErrorWithMsg err404 + + +-- | Throw an "500 Internal Server Error" error with a message +throwErr500WithMsg :: Text -> Servant.Handler a +throwErr500WithMsg = throwServerErrorWithMsg err500 + + +{-| Get the order of a linked list. + | Each tuple is `(name, previous name in list)`. + | The first's element previous name is `Nothing`. + | Tries to find the longest chain of elements if no start element is found. + | It's quite complicated to also handle incomplete orderings correctly. +-} +getOrderOfLinkedList :: [(Text, Maybe Text)] -> [Text] +getOrderOfLinkedList tables = + let + findAfter :: [(Text, Maybe Text)] -> (Text, Maybe Text) -> [Text] + findAfter remaining (tableName, previousTableMb) = + P.maybeToList previousTableMb + <> case P.find ((P.== Just tableName) P.. P.snd) remaining of + Just found@(name, _) -> + let remaining' = List.filter (/= found) remaining + in tableName : findAfter remaining' (name, Nothing) + Nothing -> [tableName] + in + if P.null tables + then [] + else + let + sortByLength :: [[Text]] -> [[Text]] + sortByLength = + P.sortBy (\x y -> P.compare (P.length y) (P.length x)) + + chainsByLength = + tables + <&> findAfter tables + & sortByLength + + -- First table ist always the (x, Nothing) table entry + firstElement = + case P.find ((P.== Nothing) P.. P.snd) tables of + Just tableEntry -> [P.fst tableEntry] + Nothing -> [] + in + -- Sort them by length, combine them, and remove duplicates + ([firstElement] <> chainsByLength) + & P.concat + & List.nub + + +getDbIdFromReadOnlyId :: Text -> IO (Maybe Text) +getDbIdFromReadOnlyId readOnlyId = do + catchAll + ( do + dbId <- liftIO $ readSymbolicLink $ getReadOnlyFilePath readOnlyId + pure $ Just $ T.pack $ takeFileName dbId + ) + ( \err -> do + when (not $ "does not exist" `P.isInfixOf` show err) $ do + P.putErrText $ "Error while reading readonly symlink:\n" <> show err + pure Nothing + ) + + +colToFileUrl :: Text -> Text -> Text -> Text -> Text +colToFileUrl readonlyId tableName colName rowid = + T.decodeUtf8 $ + BL.toStrict $ + toLazyByteString $ + encodePathSegments + [ "readonly" + , readonlyId + , "tables" + , tableName + , "columns" + , colName + , "files" + , "rowid" + , rowid + ] + + +removeIfExists :: FilePath -> IO () +removeIfExists fileName = + let + handleExists :: IOError -> IO () + handleExists e + | isDoesNotExistError e = pure () + | P.otherwise = throwIO e + in + removeFile fileName `catch` handleExists + + +runSqliteCommand :: ExternalAppContext -> FilePath -> BL.ByteString -> Servant.Handler Text +runSqliteCommand ctx dbPath command = do + let + processConfig = + setStdin + (byteStringInput command) + $ setStdout createPipe + $ setStderr createPipe + $ proc ctx.sqlite [dbPath] + + (exitCode, output) <- readProcessInterleaved processConfig + + let outputText = P.decodeUtf8 $ BS.toStrict output + + case exitCode of + ExitSuccess -> + pure outputText + ExitFailure _ -> + throwErr500WithMsg outputText + + +-- | Similar to `sequence`, except it doesn't stop on the first error. +collectErrorList :: [P.Either e b] -> P.Either [e] [b] +collectErrorList results = + case P.lefts results of + [] -> P.Right (P.rights results) + lefts -> P.Left lefts + + +{-| +Similar to `sequence`, except it doesn't stop on the first error. +What differentiates this from `collectErrorList` is +that it also merges the errors into a single error message. +-} +collectAllErrorsAsText :: [P.Either Text b] -> P.Either Text [b] +collectAllErrorsAsText results = + collectErrorList results + & mapLeft + ( \lefts -> + "Multiple errors occurred:\n" <> P.unlines lefts + ) + + +data DiffKind = Added | Removed | Kept + deriving (P.Eq, P.Ord, P.Show) + + +{-| Run an action with a connection, retrying if the database is busy. +| Necessary because of WAL mode: +| https://sqlite.org/wal.html#sometimes_queries_return_sqlite_busy_in_wal_mode +-} +withRetryConn :: FilePath -> (Connection -> IO a) -> IO a +withRetryConn filePath action = do + SS.withConnection filePath $ \conn -> do + SS.execute_ conn "PRAGMA busy_timeout = 5000;" -- 5 seconds + action conn diff --git a/source/Server/Server.hs b/source/Server/Server.hs new file mode 100644 index 0000000..d53927b --- /dev/null +++ b/source/Server/Server.hs @@ -0,0 +1,194 @@ +-- Necessary for servant-docs instances +{-# OPTIONS_GHC -Wno-orphans #-} +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +module Server.Server (platformAPI, platformApp) +where + +import Protolude ( + Int, + Monoid (mempty), + Proxy (Proxy), + ($), + ) +import Protolude qualified as P + +import Data.Aeson (Object, Value, object) +import Data.Aeson.KeyMap qualified as KeyMap +import Data.ByteString.Lazy qualified as BL +import Data.Text (Text) +import Data.Text qualified as T +import Network.Wai (Application) +import Network.Wai.Parse ( + defaultParseRequestBodyOptions, + setMaxRequestFilesSize, + setMaxRequestNumFiles, + ) +import Servant (Context (EmptyContext, (:.)), NoContent) +import Servant.API ( + Capture, + Get, + JSON, + PlainText, + Post, + ReqBody, + (:<|>) ((:<|>)), + (:>), + ) +import Servant.Docs ( + DocCapture (DocCapture), + ToCapture (toCapture), + ToSample, + singleSample, + toSamples, + ) +import Servant.HTML.Blaze (HTML) +import Servant.Multipart ( + MultipartData (MultipartData), + MultipartOptions (generalOptions), + Tmp, + ToMultipartSample (toMultipartSamples), + defaultMultipartOptions, + ) +import Servant.Server (Server) +import Servant.Server qualified as Servant +import Text.Blaze.Internal (MarkupM) + +import AirGQL.Config (Config (maxDbSize), defaultConfig) +import AirGQL.ExternalAppContext (ExternalAppContext) +import AirGQL.Lib (SQLPost) +import AirGQL.Servant.Database ( + apiDatabaseSchemaGetHandler, + apiDatabaseVacuumPostHandler, + ) +import AirGQL.Servant.GraphQL ( + gqlQueryGetHandler, + gqlQueryPostHandler, + playgroundDefaultQueryHandler, + readOnlyGqlPostHandler, + writeOnlyGqlPostHandler, + ) +import AirGQL.Servant.SqlQuery (sqlQueryPostHandler) +import AirGQL.Types.SchemaConf (SchemaConf (pragmaConf), defaultSchemaConf) +import AirGQL.Types.SqlQueryPostResult (SqlQueryPostResult) +import AirGQL.Types.Types (GQLPost) + + +{- FOURMOLU_DISABLE -} +-- ATTENTION: Order of handlers matters! +type PlatformAPI = + -- gqlQueryGetHandler + -- Redirect to GraphiQL playground + "graphql" :> Get '[HTML] NoContent + + -- gqlQueryPostHandler + :<|> "graphql" + :> ReqBody '[JSON] GQLPost + :> Post '[JSON] Object + + -- writeOnlyGqlPostHandler + :<|> "readonly" :> "graphql" + :> ReqBody '[JSON] GQLPost + :> Post '[JSON] Object + + -- writeOnlyGqlPostHandler + :<|> "writeonly" :> "graphql" + :> ReqBody '[JSON] GQLPost + :> Post '[JSON] Object + + -- playgroundDefaultQueryHandler + :<|> "playground" :> "default-query" + :> Get '[PlainText] Text + + -- apiDatabaseSchemaGetHandler + :<|> "schema" :> Get '[PlainText] Text + + -- apiDatabaseVacuumPostHandler + :<|> "vacuum" :> Post '[JSON] Object + + -- sqlQueryPostHandler + :<|> "sql" + :> ReqBody '[JSON] SQLPost + :> Post '[JSON] SqlQueryPostResult + +{- FOURMOLU_ENABLE -} + + +-- | Instances for automatic documentation generation via servant-docs +instance ToSample (MultipartData Tmp) where + toSamples _ = singleSample $ MultipartData mempty mempty + + +instance ToMultipartSample Tmp (MultipartData Tmp) where + toMultipartSamples _ = [] + + +instance ToSample Value where + toSamples _ = singleSample $ object [] + + +instance ToSample (KeyMap.KeyMap Value) where + toSamples _ = singleSample $ KeyMap.fromList [] + + +instance ToSample (MarkupM ()) where + toSamples _ = singleSample mempty + + +instance ToSample BL.ByteString where + toSamples _ = singleSample mempty + + +instance ToSample Text where + toSamples _ = singleSample mempty + + +instance ToSample P.ByteString where + toSamples _ = singleSample mempty + + +instance ToCapture (Capture "readonlyId" Text) where + toCapture _ = DocCapture "readonlyId" "Read-only ID of the database" + + +instance ToCapture (Capture "dbId" Text) where + toCapture _ = DocCapture "dbId" "ID of the database to be served" + + +platformAPI :: Proxy PlatformAPI +platformAPI = Proxy + + +platformServer :: ExternalAppContext -> P.FilePath -> Server PlatformAPI +platformServer ctx filePath = do + let dbPath = T.pack filePath + gqlQueryGetHandler dbPath + :<|> gqlQueryPostHandler defaultSchemaConf dbPath + :<|> readOnlyGqlPostHandler dbPath + :<|> writeOnlyGqlPostHandler dbPath + :<|> playgroundDefaultQueryHandler dbPath + :<|> apiDatabaseSchemaGetHandler ctx dbPath + :<|> apiDatabaseVacuumPostHandler dbPath + :<|> sqlQueryPostHandler defaultSchemaConf.pragmaConf dbPath + + +platformApp :: ExternalAppContext -> P.FilePath -> Application +platformApp ctx filePath = do + let + maxFileSizeInByte :: Int = defaultConfig.maxDbSize + + multipartOpts :: MultipartOptions Tmp + multipartOpts = + (defaultMultipartOptions (Proxy :: Proxy Tmp)) + { generalOptions = + setMaxRequestNumFiles 1 $ + setMaxRequestFilesSize + (P.fromIntegral maxFileSizeInByte) + defaultParseRequestBodyOptions + } + + context :: Context '[MultipartOptions Tmp] + context = + multipartOpts :. EmptyContext + + Servant.serveWithContext platformAPI context $ platformServer ctx filePath diff --git a/stack-standalone.yaml b/stack-standalone.yaml new file mode 100644 index 0000000..fb6ecc2 --- /dev/null +++ b/stack-standalone.yaml @@ -0,0 +1,18 @@ +resolver: lts-22.19 + +extra-deps: + - double-x-encoding-1.1.1 + + - graphql-spice-1.0.2.0 + + - github: Airsequel/simple-sql-parser + commit: 680f2b77c53fcc086dc7d5f498f764ad2235b828 + +allow-newer: true + +flags: + aeson-pretty: + lib-only: true + + direct-sqlite: + mathfunctions: true diff --git a/stack-standalone.yaml.lock b/stack-standalone.yaml.lock new file mode 100644 index 0000000..257929a --- /dev/null +++ b/stack-standalone.yaml.lock @@ -0,0 +1,37 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/lock_files + +packages: +- completed: + hackage: double-x-encoding-1.1.1@sha256:f3b99d41172e51451532391ceefcd3cddcd6c4e494c6f744c3e91b2a31cc452d,1781 + pantry-tree: + sha256: 383992adc327adfc2b5ecfc7e026105f8f162dccf76bb2b157c642e41ca8e28d + size: 188 + original: + hackage: double-x-encoding-1.1.1 +- completed: + hackage: graphql-spice-1.0.2.0@sha256:97d22da8ec38342408bcd237e91e76ffcdad4f14df666f62b0b1ccba6bf39b87,2000 + pantry-tree: + sha256: d60774d462c7c0af08c82b3f754e0ac49e598640e382c417e1809880192747cb + size: 937 + original: + hackage: graphql-spice-1.0.2.0 +- completed: + name: simple-sql-parser + pantry-tree: + sha256: a7f399e93b6cb3056e43702b57ecda1a6a86dfdbeca4361ae3d2d27518ba4fe7 + size: 3846 + sha256: 5731c4471e011dede78b8f1d8812dd5eeb1c79024307f0b03f1855f9028e43e0 + size: 137333 + url: https://github.com/Airsequel/simple-sql-parser/archive/680f2b77c53fcc086dc7d5f498f764ad2235b828.tar.gz + version: 0.6.1 + original: + url: https://github.com/Airsequel/simple-sql-parser/archive/680f2b77c53fcc086dc7d5f498f764ad2235b828.tar.gz +snapshots: +- completed: + sha256: e5cac927cf7ccbd52aa41476baa68b88c564ee6ddc3bc573dbf4210069287fe7 + size: 713340 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/22/19.yaml + original: lts-22.19 diff --git a/tests/Spec.hs b/tests/Spec.hs new file mode 100644 index 0000000..9eb98c3 --- /dev/null +++ b/tests/Spec.hs @@ -0,0 +1,3395 @@ +-- Unit and integration tests +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoMonomorphismRestriction #-} +{-# OPTIONS_GHC -Wno-missing-local-signatures #-} +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Replace case with maybe" #-} + +import Protolude ( + Applicative (pure), + Bool (False, True), + Either (Right), + FilePath, + IO, + Maybe (Just, Nothing), + Monoid (mempty), + Text, + fromMaybe, + readFile, + show, + ($), + (&), + (.), + (<$), + (<>), + ) +import Protolude qualified as P + +import Control.Monad.Catch (catchAll) +import Data.Aeson (Value (Number)) +import Data.Aeson qualified as Ae +import Data.Aeson.KeyMap qualified as KeyMap +import Data.Aeson.Types (Object) +import Data.ByteString.Lazy qualified as BL +import Data.HashMap.Internal.Strict qualified as HashMap +import Data.List qualified as List +import Data.Text (pack, unpack) +import Data.Text qualified as T +import Data.Text.Encoding as T (encodeUtf8) +import Database.SQLite.Simple ( + Query, + SQLData (SQLFloat, SQLInteger, SQLNull, SQLText), + execute_, + open, + query_, + ) +import Database.SQLite.Simple qualified as SS +import Database.SQLite.Simple.QQ (sql) +import Language.GraphQL.JSON (graphql) +import Language.GraphQL.TH (gql) +import Language.GraphQL.Type as GQL (Value (Object, String)) +import Servant.Server (runHandler) +import System.Directory (createDirectoryIfMissing, makeAbsolute) +import System.FilePath ((</>)) +import System.Posix (changeWorkingDirectory, getWorkingDirectory) +import Test.Hspec ( + SpecWith, + before_, + describe, + it, + pendingWith, + shouldBe, + shouldContain, + shouldMatchList, + ) +import Test.Hspec qualified as Hspec + +import AirGQL.GraphQL (getDerivedSchema) +import AirGQL.Introspection (createType) +import AirGQL.Lib ( + AccessMode (WriteOnly), + ColumnEntry ( + ColumnEntry, + column_name, + column_name_gql, + datatype, + datatype_gql, + dflt_value, + isGenerated, + isOmittable, + isUnique, + notnull, + primary_key, + select_options + ), + GqlTypeName (GqlTypeName, full, root), + ObjectType (Table), + SQLPost (SQLPost), + TableEntryRaw ( + TableEntryRaw, + name, + object_type, + rootpage, + tbl_name + ), + getColumns, + getTables, + parseSql, + replaceCaseInsensitive, + stringToGqlTypeName, + ) +import AirGQL.Lib qualified +import AirGQL.Raw (raw) +import AirGQL.Servant.SqlQuery (sqlQueryPostHandler) +import AirGQL.Types.PragmaConf qualified as PragmaConf +import AirGQL.Types.SchemaConf (SchemaConf (accessMode), defaultSchemaConf) +import AirGQL.Types.SqlQueryPostResult ( + SqlQueryPostResult ( + affectedTables, + errors, + rows + ), + ) +import AirGQL.Types.Utils (encodeToText) +import AirGQL.Utils ( + getOrderOfLinkedList, + removeIfExists, + withRetryConn, + ) +import Tests.Utils (testRoot, withDataDbConn, withTestDbConn) + + +-- | Save test databases after running tests for later inspection +shouldSaveDbs :: Bool +shouldSaveDbs = True + + +dbPath :: Text +dbPath = T.pack (testRoot </> "fixture.db") + + +rmSpaces :: Text -> BL.ByteString +rmSpaces text = + let + value :: Maybe Ae.Value = + text + & T.encodeUtf8 + & pure + & BL.fromChunks + & Ae.decode + in + case value of + Just val -> Ae.encode val + Nothing -> "ERROR: Failed to decode JSON" + + +createUsersTableQuery :: Query +createUsersTableQuery = + [sql| + CREATE TABLE IF NOT EXISTS "users" ( + "name" TEXT, + "email" TEXT NOT NULL UNIQUE, + "created_utc" TEXT NOT NULL, + "number_of_logins" INTEGER, + "progress" REAL, + PRIMARY KEY("email") + ); + |] + + +createUsersTable :: SS.Connection -> IO () +createUsersTable conn = + execute_ conn createUsersTableQuery + + +createSongsTableQuery :: Query +createSongsTableQuery = + [sql| + CREATE TABLE IF NOT EXISTS "songs" ( + "name" TEXT NOT NULL, + "duration_seconds" INTEGER NOT NULL + ); + |] + + +createSongsTable :: SS.Connection -> IO () +createSongsTable conn = + execute_ conn createSongsTableQuery + + +-- prettyPrintSchema :: Schema m -> Text +-- prettyPrintSchema schema = +-- let +-- ObjectType name queryDesc interfaceTypes resolvers = query schema +-- in "" +-- <> "Description:\n" <> show (description schema) <> "\n\n" +-- <> "Query:\n\n" +-- <> " Name:\n" <> name <> "\n\n" +-- <> " Description:\n" <> show queryDesc <> "\n\n" +-- <> " InterfaceTypes:\n" <> show interfaceTypes <> "\n\n" +-- <> " Resolvers:\n" <> show resolvers <> "\n\n" +-- <> "Mutation:\n\n" <> show (mutation schema) <> "\n\n" +-- <> "Subscriptions:\n" <> show (subscription schema) <> "\n\n" +-- -- "Directives:\n" <> show (directives schema) <> "\n\n" <> +-- -- "Types:\n" <> show (types schema) <> "\n\n" <> +-- -- "Interface Implementations:\n" <> show (implementations schema) <> "\n\n" +-- <> "" + +testSuite :: SpecWith () +testSuite = do + describe "Utils" $ do + -- Hspec.fit "pretty prints the schema" $ do + -- conn <- open $ unpack dbPath + -- tables <- getTables conn + -- schema <- + -- getDerivedSchema defaultSchemaConf conn dbPath tables + + -- putText $ prettyPrintSchema schema + + -- True `shouldBe` True + + it "replaces strings case insensitively" $ do + let + results = + [ replaceCaseInsensitive "hello" "hi" "hello world" + , replaceCaseInsensitive "hello" "hi" "Hello World" + , replaceCaseInsensitive "l" "L" "Hello World" + ] + + results `shouldBe` ["hi world", "hi World", "HeLLo WorLd"] + + it "loads all tables from database" $ do + tables <- do + conn <- open $ unpack dbPath + getTables conn + + shouldBe + tables + [ TableEntryRaw + { name = "users" + , tbl_name = "users" + , object_type = Table + , rootpage = 2 + , AirGQL.Lib.sql = + "\ + \CREATE TABLE \"users\" (\n\ + \ \"name\" TEXT,\n\ + \ \"email\" TEXT NOT NULL UNIQUE,\n\ + \ \"created_utc\" TEXT NOT NULL,\n\ + \ \"number_of_logins\" INTEGER,\n\ + \ \"progress\" REAL,\n\ + \ PRIMARY KEY(\"email\")\n\ + \ )" + } + , TableEntryRaw + { name = "songs" + , tbl_name = "songs" + , object_type = Table + , rootpage = 4 + , AirGQL.Lib.sql = + "\ + \CREATE TABLE \"songs\" (\n\ + \ \"name\" TEXT NOT NULL,\n\ + \ \"duration_seconds\" INTEGER NOT NULL\n\ + \ )" + } + ] + + describe "getColumns" $ do + it "loads all columns from users table" $ do + tableColumns <- do + conn <- open $ unpack dbPath + getColumns dbPath conn "users" + + let + columnsExpected = + [ ColumnEntry + { column_name = "rowid" + , column_name_gql = "rowid" + , datatype = "INTEGER" + , datatype_gql = Just $ stringToGqlTypeName "Int" + , select_options = Nothing + , notnull = False + , isUnique = False + , isOmittable = True + , isGenerated = False + , dflt_value = Nothing + , primary_key = True + } + , ColumnEntry + { column_name = "name" + , column_name_gql = "name" + , datatype = "TEXT" + , datatype_gql = Just $ stringToGqlTypeName "String" + , select_options = Nothing + , notnull = False + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + , ColumnEntry + { column_name = "email" + , column_name_gql = "email" + , datatype = "TEXT" + , datatype_gql = Just $ stringToGqlTypeName "String" + , select_options = Nothing + , notnull = True + , isUnique = True + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = True + } + , ColumnEntry + { column_name = "created_utc" + , column_name_gql = "created_utc" + , datatype = "TEXT" + , datatype_gql = Just $ stringToGqlTypeName "String" + , select_options = Nothing + , notnull = True + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + , ColumnEntry + { column_name = "number_of_logins" + , column_name_gql = "number_of_logins" + , datatype = "INTEGER" + , datatype_gql = Just $ stringToGqlTypeName "Int" + , select_options = Nothing + , notnull = False + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + , ColumnEntry + { column_name = "progress" + , column_name_gql = "progress" + , datatype = "REAL" + , datatype_gql = Just $ stringToGqlTypeName "Float" + , select_options = Nothing + , notnull = False + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + ] + + tableColumns `shouldBe` columnsExpected + + it "loads a nullable single-select column" $ do + let dbName = "creates_nullable_single-select.db" + withTestDbConn shouldSaveDbs dbName $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE checks ( + color TEXT CHECK (color IN ('red', 'green', 'blue')) + ) + |] + + tableColumns <- getColumns dbName conn "checks" + + let + columnExpected = + ColumnEntry + { column_name = "color" + , column_name_gql = "color" + , datatype = "TEXT" + , datatype_gql = + Just $ + GqlTypeName + { full = "checks_color_String" + , root = "String" + } + , select_options = Just ["red", "green", "blue"] + , notnull = False + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + + P.lastMay tableColumns `shouldBe` Just columnExpected + + it "loads a non-null single-select column" $ do + let dbName = "creates_non-null_single-select.db" + withTestDbConn shouldSaveDbs dbName $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE checks ( + color TEXT CHECK (color IN ('red', 'green', 'blue')) NOT NULL + ) + |] + + tableColumns <- getColumns dbName conn "checks" + + let + columnExpected = + ColumnEntry + { column_name = "color" + , column_name_gql = "color" + , datatype = "TEXT" + , datatype_gql = + Just $ + GqlTypeName + { full = "checks_color_String" + , root = "String" + } + , select_options = Just ["red", "green", "blue"] + , notnull = True + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + + P.lastMay tableColumns `shouldBe` Just columnExpected + + it "coerces a multi-type single-select column to text" $ do + let dbName = "multi-type_single-select.db" + withTestDbConn shouldSaveDbs dbName $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE checks ( + value TEXT CHECK (value IN (1, 2.2, 'three')) + ) + |] + + tableColumns <- getColumns dbName conn "checks" + + let + columnExpected = + ColumnEntry + { column_name = "value" + , column_name_gql = "value" + , datatype = "TEXT" + , datatype_gql = + Just $ + GqlTypeName + { full = "checks_value_String" + , root = "String" + } + , select_options = Just ["1", "2.2", "three"] + , notnull = False + , isUnique = False + , isOmittable = False + , isGenerated = False + , dflt_value = Nothing + , primary_key = False + } + + P.lastMay tableColumns `shouldBe` Just columnExpected + + it "marks integer primary keys as omittable" $ do + let dbName = "integer-omittable-primary-key.db" + withTestDbConn shouldSaveDbs dbName $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE items ( + id INTEGER PRIMARY KEY + ) + |] + + tableColumns <- getColumns dbName conn "items" + + let + columnExpected = + ColumnEntry + { column_name = "id" + , column_name_gql = "id" + , datatype = "INTEGER" + , datatype_gql = Just $ stringToGqlTypeName "Int" + , select_options = Nothing + , notnull = False + , isUnique = False + , isOmittable = True + , isGenerated = False + , dflt_value = Nothing + , primary_key = True + } + + P.lastMay tableColumns `shouldBe` Just columnExpected + + it "correctly parses generated columns" $ do + let dbName = "generated-columns.db" + withTestDbConn shouldSaveDbs dbName $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE "generated_columns" ( + "start" REAL NOT NULL, + "end" REAL NOT NULL, + "distance_km" REAL GENERATED ALWAYS AS ("end" - "start") STORED, + "distance_miles" REAL GENERATED ALWAYS AS (distance_km * 1.6) VIRTUAL + ) + |] + + tableColumns <- getColumns dbName conn "generated_columns" + + let (generated, non_generated) = + -- => [ColumnDef] + tableColumns + -- => ([ColumnDef], [ColumnDef]) + & List.partition (\c -> c.isGenerated) + -- => (Text, Text) + & P.bimap + (P.fmap column_name) + (P.fmap column_name) + + -- We use `shouldMatchList` because the order does not matter + non_generated `shouldMatchList` ["start", "end", "rowid"] + generated `shouldMatchList` ["distance_km", "distance_miles"] + + it "sorts the tables from the metadata table" $ do + getOrderOfLinkedList [("a", Nothing), ("b", Just "a"), ("c", Just "b")] + `shouldBe` ["a", "b", "c"] + + getOrderOfLinkedList [("a", Nothing), ("c", Just "b"), ("b", Just "a")] + `shouldBe` ["a", "b", "c"] + + getOrderOfLinkedList [("b", Just "a"), ("c", Just "b"), ("a", Nothing)] + `shouldBe` ["a", "b", "c"] + + getOrderOfLinkedList [("a", Just "x"), ("c", Just "b"), ("b", Just "a")] + `shouldBe` ["x", "a", "b", "c"] + + getOrderOfLinkedList [("c", Just "b"), ("b", Just "a"), ("a", Just "x")] + `shouldBe` ["x", "a", "b", "c"] + + getOrderOfLinkedList [("a", Nothing), ("b", Just "a"), ("c", Nothing)] + `shouldBe` ["a", "b", "c"] + + getOrderOfLinkedList [("a", Nothing), ("b", Nothing), ("c", Nothing)] + `shouldBe` ["a", "b", "c"] + + getOrderOfLinkedList [("a", Nothing), ("b", Just "a"), ("d", Just "c")] + `shouldBe` ["a", "b", "c", "d"] + + -- Nothing must always be first + getOrderOfLinkedList [("c", Just "b"), ("d", Just "c"), ("a", Nothing)] + `shouldBe` ["a", "b", "c", "d"] + + -- Should *not* error out on cycles + getOrderOfLinkedList [("c", Just "a"), ("b", Just "a"), ("a", Just "b")] + `shouldBe` ["a", "b", "c"] + + describe "REST API" $ do + describe "SQL" $ do + it "supports retrieving the schema" $ do + -- let query = "CREATE TABLE users (name TEXT, email TEXT);" + -- let dbPost = DatabasePost "test-db" (Just "test-team") "" (Just query) + + -- Right schemaResult <- + -- runHandler $ + -- apiDatabaseSchemaGetHandler ctx "TODO" + + -- T.strip schemaResult `shouldBe` query + pendingWith "Create database first" + + it "supports executing an SQL query" $ do + let dbId = "api-sql-query" + withDataDbConn dbId $ \_ -> do + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "SELECT TRUE, 123, 'test'"} + + result.rows + `shouldBe` [ KeyMap.fromList + [ ("TRUE", Number 1) + , ("123", Number 123) + , ("'test'", "test") + ] + ] + + result.affectedTables `shouldMatchList` [] + + it "return columns in the requested order" $ do + -- Even though JSON objects are unordered by definition, + -- the fields (columns) must be returned in the requested order + -- as Elm relies on it for decoding. + + let dbId = "api-sql-col-order" + withDataDbConn dbId $ \_ -> do + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "SELECT 0 AS b, 0 AS a"} + + (result & encodeToText & T.unpack) + `shouldContain` "[{\"b\":0,\"a\":0}]" + + result.affectedTables `shouldMatchList` [] + + it "supports using math functions" $ do + let dbId = "api-sql-math-query" + withDataDbConn dbId $ \_ -> do + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "SELECT abs(floor(cos(2 * pi() / 3)))"} + + result.rows + `shouldBe` [ KeyMap.singleton + "abs(floor(cos(2 * pi() / 3)))" + (Number 1.0) + ] + + result.affectedTables `shouldMatchList` [] + + -- The following few tests verify that different operations return + -- the proper set of affected tables. This query creates a completely + -- unrelated table just to ensure it is not getting returned by the + -- methods tested below + let createUnrelatedTable = do + [sql| + CREATE TABLE IF NOT EXISTS todos ( + title TEXT, + content TEXT, + done INTEGER + ) + |] + + -- This query creates a table commonly used by the following few tests. + let createNotesTable = do + [sql| + CREATE TABLE IF NOT EXISTS notes ( + content TEXT + ) + |] + + it "should not allow rowid references" $ do + let dbId = "api-sql-rowid-references" + let query = "CREATE TABLE foo ( bar TEXT references goo(rowid) )" + withDataDbConn dbId $ \_ -> do + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = query} + + let + expectedMessage = + "Column 'bar' references the rowid column of table 'goo'.\n" + <> "This is not supported by SQLite:\n" + <> "https://www.sqlite.org/foreignkeys.html" + + result.errors `shouldBe` [expectedMessage] + + it "should return no affected tables on a simple select" $ do + let dbId = "api-sql-simple-select" + withDataDbConn dbId $ \conn -> do + execute_ conn createUnrelatedTable + execute_ conn createNotesTable + + execute_ + conn + [sql| + INSERT INTO notes VALUES ("hello") + |] + + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "SELECT * from notes"} + + result.rows + `shouldBe` [KeyMap.singleton "content" "hello"] + + result.affectedTables `shouldMatchList` [] + + it "should return the correct affected tables when creating tables" $ do + let dbId = "api-sql-simple-create" + withDataDbConn dbId $ \conn -> do + execute_ conn createUnrelatedTable + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost + { query = SS.fromQuery createNotesTable + } + + result.affectedTables + `shouldMatchList` ["notes"] + + it "should return the correct affected tables when dropping tables" $ do + let dbId = "api-sql-simple-drop" + withDataDbConn dbId $ \conn -> do + execute_ conn createUnrelatedTable + execute_ conn createNotesTable + + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "DROP TABLE notes"} + + result.affectedTables `shouldMatchList` ["notes"] + + it "should return the correct affected tables when editing tables" $ do + let dbId = "api-sql-add-column" + withDataDbConn dbId $ \conn -> do + execute_ conn createUnrelatedTable + execute_ conn createNotesTable + + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost + { query = + "ALTER TABLE notes ADD COLUMN title TEXT" + } + + result.affectedTables `shouldMatchList` ["notes"] + + it "should mark every table as affected when changes are detected" $ do + let dbId = "api-sql-insert-values" + withDataDbConn dbId $ \conn -> do + execute_ conn createUnrelatedTable + execute_ conn createNotesTable + + Right result <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost + { query = + [raw| INSERT INTO "notes" VALUES ("hello") |] + } + + result.affectedTables `shouldMatchList` ["notes", "todos"] + + describe "Queries" $ do + it "supports retrieving data" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values ('Adrian', 'adrian@example.com', '2021-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- graphql schema Nothing mempty "{ users { name } }" + Ae.encode result + `shouldBe` [gql| + {"data":{"users":[{"name":"Adrian"}]}} + |] + + it "supports retrieving data from tables with special names" $ do + let testDbPath = testRoot </> "special_table_name.db" + removeIfExists testDbPath + let dbPathNorm = if shouldSaveDbs then testDbPath else ":memory:" + + withRetryConn dbPathNorm $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE "name with-special$chars" ( + name TEXT + ) + |] + execute_ + conn + [sql| + INSERT INTO "name with-special$chars" (name) + VALUES ('John') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- + graphql + schema + Nothing + mempty + "{ nameXX0withXXDspecialXX4chars { name } }" + Ae.encode result + `shouldBe` [gql| + {"data":{"nameXX0withXXDspecialXX4chars":[{"name":"John"}]}} + |] + + describe "column names with special characters" $ do + let + dbPathSpaces :: FilePath = testRoot </> "spaces-test.db" + setupDatabaseSpaces = do + removeIfExists dbPathSpaces + conn <- open dbPathSpaces + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS test_entries ( + id INTEGER PRIMARY KEY, + `column with spaces` TEXT + ) + + |] + execute_ + conn + [sql| + INSERT INTO test_entries (id, `column with spaces`) + VALUES (0, 'Just a test') + |] + + before_ setupDatabaseSpaces $ it "supports column names with spaces" $ do + conn <- open dbPathSpaces + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (pack dbPathSpaces) + tables + Right result <- + graphql + schema + Nothing + mempty + "{ test_entries { columnXX0withXX0spaces } }" + Ae.encode result + `shouldBe` rmSpaces + [raw| + { "data": { + "test_entries": [ + { "columnXX0withXX0spaces": "Just a test" } + ] + }} + |] + + before_ setupDatabaseSpaces $ + it "generates introspection schema for column names with spaces" $ + do + conn <- open dbPathSpaces + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (pack dbPathSpaces) + tables + Right result <- + graphql + schema + Nothing + mempty + "{ __schema{ types { name fields { name } } } } }" + show (Ae.encode result) `shouldContain` "columnXX0withXX0spaces" + + it "avoids column name remapping collisions" $ do + let dbPathSpaces = testRoot </> "spaces-collision-test.db" + removeIfExists dbPathSpaces + conn <- open dbPathSpaces + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS test_entries ( + id INTEGER PRIMARY KEY, + `the column` TEXT, + `the_column` TEXT, + `the_column_1` TEXT, + `the_column_2` TEXT + ) + |] + execute_ + conn + [sql| + INSERT INTO test_entries + (id, `the column`, the_column, the_column_1, the_column_2) + VALUES + (0, 'with spaces', 'no spaces', 'no spaces 1', 'no spaces 2') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (pack dbPathSpaces) + tables + + Right result <- + graphql + schema + Nothing + mempty + [gql| + { test_entries { + theXX0column + the_column + the_column_1 + the_column_2 + } + } + |] + Ae.encode result + `shouldBe` rmSpaces + [raw| + { "data": { + "test_entries": [ + { "theXX0column": "with spaces", + "the_column": "no spaces", + "the_column_1": "no spaces 1", + "the_column_2": "no spaces 2" + } + ] + }} + |] + + it "includes rowid and sorts by rowid" $ do + let dbPathSpaces = testRoot </> "rowid_test.db" + removeIfExists dbPathSpaces + conn <- open dbPathSpaces + + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS "users" ( + "email" TEXT NOT NULL UNIQUE PRIMARY KEY, + "number_of_logins" INTEGER + ); + |] + execute_ + conn + [sql| + INSERT INTO users (email, number_of_logins) + VALUES ('john@example.com', 0), ('eve@example.com', 4); + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (pack dbPathSpaces) + tables + + Right result <- + graphql + schema + Nothing + mempty + [gql| + { + users { + rowid + } + } + |] + Ae.encode result + `shouldBe` rmSpaces + [raw| + { + "data": { + "users": [ + { "rowid": 1 }, + { "rowid": 2 } + ] + } + } + |] + + it "supports aliases" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values ('Adrian', 'adrian@example.com', '2021-01-01T00:00Z') + |] + + let + query = + [gql| + { + userA: users { name } + userB: users { email } + } + |] + expected = + rmSpaces + [raw| + {"data":{ + "userB":[{"email":"adrian@example.com"}], + "userA":[{"name":"Adrian"}] + }} + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- graphql schema Nothing mempty query + Ae.encode result `shouldBe` expected + + it "supports fragments" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values ('Adrian', 'adrian@example.com', '2021-01-01T00:00Z') + |] + + let + query = + [gql| + { + userA: users { ...basicFields } + userB: users { ...basicFields } + } + + fragment basicFields on users { + name + email + } + |] + expected = + rmSpaces + [raw| + { "data": { + "userB":[{"email":"adrian@example.com","name":"Adrian"}], + "userA":[{"email":"adrian@example.com","name":"Adrian"}] + }} + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- graphql schema Nothing mempty query + Ae.encode result `shouldBe` expected + + it "supports directives" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z') + |] + + let + query = + [gql| + query DirectiveTest ($withName: Boolean!) { + users { + name @include(if: $withName) + } + } + |] + + variables :: Object + variables = + fromMaybe mempty $ Ae.decode "{ \"withName\": true }" + + expected = + rmSpaces + [raw| + { "data": { + "users": [ + { "name": "John" }, + { "name": "Eve" } + ] + }} + |] + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- graphql schema Nothing variables query + + Ae.encode result `shouldBe` expected + + it "supports retrieving records with a filter" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result <- + graphql + schema + Nothing + mempty + [gql| + { + users (filter: {email: {eq: "eve@example.com"}}) { + name + } + } + |] + + Ae.encode result + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Eve\"}]}}" + + it "supports retrieving records with a filter over int and float" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc, progress) + values + ('John', 'john@example.com', '2021-01-01T00:00Z', 0.7), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z', 0.4) + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right result1 <- + graphql + schema + Nothing + mempty + [gql|{ + users (filter: {rowid: {eq: 2}}) { + name + } + }|] + + Ae.encode result1 + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Eve\"}]}}" + + Right result2 <- + graphql + schema + Nothing + mempty + [gql|{ + users (filter: {progress: {eq: 0.4}}) { + name + } + }|] + + Ae.encode result2 + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Eve\"}]}}" + + it "supports retrieving records with a filter over boolean and null" $ do + let dbPathFilter = testRoot </> "filter_eq_boolean.db" + removeIfExists dbPathFilter + conn <- open dbPathFilter + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS "users" ( + "name" TEXT, + "is_admin" BOOLEAN + ); + |] + execute_ + conn + [sql| + INSERT INTO USERS (name, is_admin) + VALUES + ('John', TRUE), + ('Eve', FALSE), + ('Anna', NULL) + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result1 <- + graphql + schema + Nothing + mempty + [gql|{ + users (filter: {is_admin: {eq: false}}) { + name + } + }|] + + Ae.encode result1 + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Eve\"}]}}" + + Right result2 <- + graphql + schema + Nothing + mempty + [gql|{ + users (filter: {is_admin: {eq: null}}) { + name + } + }|] + + Ae.encode result2 + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Anna\"}]}}" + + it "supports retrieving records with like and ilike filter" $ do + withRetryConn (unpack dbPath) $ \conn -> do + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Anna', 'anna@EXAMPLE.COM', '2019-01-01T00:00Z'), + ('Eve', 'eve@evil.com', '2019-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right likeResult <- + graphql + schema + Nothing + mempty + [gql| + { + users (filter: {email: {like: "%example%"}}) { + name + } + } + |] + + Ae.encode likeResult + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"John\"}]}}" + + Right ilikeResult <- + graphql + schema + Nothing + mempty + [gql| + { + users (filter: {email: {ilike: "%example%"}}) { + name + } + } + |] + + Ae.encode ilikeResult + `shouldBe` "{\"data\":{\"users\":\ + \[{\"name\":\"John\"},{\"name\":\"Anna\"}]}}" + + it "supports retrieving records with several filters" $ do + withRetryConn (unpack dbPath) $ \conn -> do + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Anna', 'anna@example.com', '2019-01-01T00:00Z'), + ('Eve', 'eve@evil.com', '2019-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right likeResult <- + graphql + schema + Nothing + mempty + [gql| + { + users ( + filter: { + email: { like: "%example%" }, + name: { gt: "B" } + } + ) { + name + } + } + |] + + Ae.encode likeResult + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"John\"}]}}" + + it "supports mutating records with several filters" $ do + withRetryConn (unpack dbPath) $ \conn -> do + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Anna', 'anna@example.com', '2019-01-01T00:00Z'), + ('Eve', 'eve@evil.com', '2019-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + dbPath + tables + + Right likeResult <- + graphql + schema + Nothing + mempty + [gql| + mutation UpdateEmailAddress { + update_users ( + filter: { + email: { like: "%example%" } + name: { gt: "B" } + } + set: { email: "john@new.com" } + ) { + affected_rows + returning { + email + } + } + } + |] + + Ae.encode likeResult + `shouldBe` "{\"data\":{\"update_users\":\ + \{\"affected_rows\":1,\ + \\"returning\":[{\"email\":\"john@new.com\"}]}}}" + + it "supports retrieving multi-type columns" $ do + withTestDbConn shouldSaveDbs "multi-type_columns.db" $ \conn -> do + execute_ + conn + [sql| + CREATE VIEW "multi_type_column" AS + SELECT 1 AS col UNION + SELECT 2.2 AS col UNION + SELECT 'three' AS col UNION + SELECT NULL AS col + |] + + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result1 <- + graphql + schema + Nothing + mempty + [gql|{ + multi_type_column { col } + }|] + + let expected = + rmSpaces + [raw| + { + "data": { + "multi_type_column": [ + { "col": null }, + { "col": "1" }, + { "col": "2.2" }, + { "col": "three" } + ] + } + } + |] + + Ae.encode result1 `shouldBe` expected + + it "supports querying a single entry" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z') + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- + graphql + schema + Nothing + mempty + [gql| + { + users_by_pk (email: "eve@example.com") { + name + } + } + |] + + pendingWith "Not implemented yet" + + Ae.encode result + `shouldBe` "{\"data\":{\"users\":[{\"name\":\"Eve\"}]}}" + + it "errors out on integer overflows" $ do + withTestDbConn shouldSaveDbs "integer-overflows.db" $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE test ( + big INTEGER, + alsobig INTEGER + ) + |] + + execute_ + conn + [sql| + INSERT INTO test(big, alsobig) + VALUES (8000000000, 9000000000) + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- + graphql + schema + Nothing + mempty + [gql| + { + test { + big, + alsobig + } + } + |] + + let expected = + rmSpaces + [raw| + { + "data": { + "test": null + }, + "errors": [{ + "locations": [{ + "column": 3, + "line": 2 + }], + "message": + "user error (Multiple errors occurred:\nOn column \"big\": Integer 8000000000 would overflow. This happens because SQLite uses 64-bit ints, but GraphQL uses 32-bit ints. Use a Number (64-bit float) or Text column instead.\nOn column \"alsobig\": Integer 9000000000 would overflow. This happens because SQLite uses 64-bit ints, but GraphQL uses 32-bit ints. Use a Number (64-bit float) or Text column instead.\n)", + "path": ["test"] + }] + } + |] + + Ae.encode result `shouldBe` expected + + describe "Mutations" $ do + it "supports inserting data" $ do + let + query = + [gql| + mutation InsertUser { + insert_users(objects: [ + { + name: "John", + email: "john@example.com", + created_utc: "2021-11-21T09:51Z" + }, + { + email: "eve@example.com", + created_utc: "2021-11-21T09:51Z" + } + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_users": { + "affected_rows": 2 + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select * from users" + + allUsers + `shouldBe` [ + [ SQLText "John" + , SQLText "john@example.com" + , SQLText "2021-11-21T09:51Z" + , SQLNull + , SQLNull + ] + , + [ SQLNull + , SQLText "eve@example.com" + , SQLText "2021-11-21T09:51Z" + , SQLNull + , SQLNull + ] + ] + + it "supports inserting and retrieving booleans" $ do + let testDbPath = testRoot </> "boolean-test.db" + conn <- open testDbPath + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS checks ( + id INTEGER PRIMARY KEY, + completed BOOLEAN DEFAULT (FALSE) NOT NULL + ) + |] + execute_ conn "DELETE FROM checks" + execute_ + conn + [sql| + INSERT INTO checks (id, completed) + VALUES (1, 0), (2, 1), (3, FALSE), (4, TRUE) + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (T.pack testDbPath) + tables + + let + mutation = + [gql| + mutation InsertChecks { + insert_checks(objects: [ + { id: 5, completed: true }, + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_checks": { + "affected_rows": 1 + } + } + } + |] + + Right result <- graphql schema Nothing mempty mutation + + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select * from checks" + + allUsers + `shouldBe` [ [SQLInteger 1, SQLInteger 0] + , [SQLInteger 2, SQLInteger 1] + , [SQLInteger 3, SQLInteger 0] + , [SQLInteger 4, SQLInteger 1] + , [SQLInteger 5, SQLInteger 1] + ] + + let + query = + [gql| + query GetChecks { + checks { id, completed } + } + |] + expectedResult = + rmSpaces + [raw| + {"data":{"checks":[ + {"id":1,"completed":false}, + {"id":2,"completed":true}, + {"id":3,"completed":false}, + {"id":4,"completed":true}, + {"id":5,"completed":true} + ]}} + |] + + Right queryResult <- graphql schema Nothing mempty query + + Ae.encode queryResult `shouldBe` expectedResult + + it "supports inserting empty records" $ do + let testDbPath = testRoot </> "empty-record-insertion.db" + SS.withConnection testDbPath $ \conn -> do + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS items ( + id INTEGER PRIMARY KEY NOT NULL + ) + |] + execute_ conn "DELETE FROM items" + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (T.pack testDbPath) + tables + + let + mutation = + [gql| + mutation InsertItems { + insert_items(objects: [{}]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_items": { + "affected_rows": 1 + } + } + } + |] + + Right result <- graphql schema Nothing mempty mutation + + Ae.encode result `shouldBe` expected + + allItems <- query_ conn "select * from items" + allItems `shouldBe` [[SQLInteger 1]] + + it "treats NULL as a value when using 'neq' filters" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + (NULL, 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2021-01-02T00:00Z') + |] + + let + query = + [gql| + query { + users(filter: { name: { neq: "Eve" }}) { + name, email + } + } + |] + + expected = + rmSpaces + [raw|{ + "data": { + "users": [{ + "name": null, + "email": "john@example.com" + }] + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "treats NULL as a value when using 'nin' filters" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('Eve', 'eve@example.com', '2021-01-01T00:00Z'), + ('Jon', 'jon@example.com', '2021-01-02T00:00Z'), + ('Arbuckle', 'arbuckle@example.com', '2021-01-03T00:00Z'), + (NULL, 'adam@example.com', '2021-01-04T00:00Z') + |] + + let + query = + [gql| + query { + users(filter: { name: { nin: ["Eve", "Arbuckle"]}}) { + name, email + } + } + |] + + expected = + rmSpaces + [raw|{ + "data": { + "users": [{ + "name": "Jon", + "email": "jon@example.com" + }, { + "name": null, + "email": "adam@example.com" + }] + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "supports 'in' filters" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('Eve', 'eve@example.com', '2021-01-01T00:00Z'), + ('Jon', 'jon@example.com', '2021-01-02T00:00Z'), + ('Arbuckle', 'arbuckle@example.com', '2021-01-03T00:00Z'), + (NULL, 'adam@example.com', '2021-01-04T00:00Z') + |] + + let + query = + [gql| + query { + users(filter: { name: { in: ["Eve", "Arbuckle"]}}) { + name, email + } + } + |] + + expected = + rmSpaces + [raw|{ + "data": { + "users": [{ + "name": "Eve", + "email": "eve@example.com" + }, { + "name": "Arbuckle", + "email": "arbuckle@example.com" + }] + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "supports inserting data and returning the created data" $ do + let + query = + [gql| + mutation InsertUsers { + insert_users(objects: [ + { + name: "John", + email: "john@example.com", + created_utc: "2021-11-21T09:51Z" + } + ]) { + affected_rows + returning { + rowid, + name + } + } + } + |] + expected = + rmSpaces + [raw|{ + "data": { + "insert_users": { + "affected_rows": 1, + "returning": [ + { + "rowid": 1, + "name": "John" + } + ] + } + } + }|] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "supports updating data and returning the updated data" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@update-test.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@update-test.com', '2021-01-02T00:00Z') + |] + + let + query = + [gql| + mutation UpdateUsers { + update_users( + filter: { email: { eq: "eve@update-test.com" } }, + set: { name: "New Name" } + ) { + affected_rows + returning { + rowid + name + } + } + } + |] + expected = + rmSpaces + [raw|{ + "data": { + "update_users": { + "affected_rows": 1, + "returning": [ + { + "rowid": 2, + "name": "New Name" + } + ] + } + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "supports upserting data" $ do + let + firstQuery = + [gql| + mutation { + insert_users(objects: [ + { + email: "eve@example.com", + name: "Eve", + created_utc: "2021-11-21T09:51Z" + } + ]) { + affected_rows + } + } + |] + + -- the `where` clause is not required here, but is there + -- to make sure filters work properly. + secondQuery = + [gql| + mutation { + insert_users( + objects: { + email: "eve@example.com", + name: "Eveline", + created_utc: "2022-11-21T09:51Z" + }, + on_conflict: { + constraint: email, + update_columns: [name, created_utc], + where: { created_utc: { eq: "2021-11-21T09:51Z" }} + }) { + returning { name } + } + } + |] + + expected = + rmSpaces + [raw| + { + "data": { + "insert_users": { + "returning": [{ + "name": "Eveline" + }] + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right _ <- graphql schema Nothing mempty firstQuery + Right result <- graphql schema Nothing mempty secondQuery + + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select * from users" + + let user = + [ SQLText "Eveline" + , SQLText "eve@example.com" + , SQLText "2022-11-21T09:51Z" + , SQLNull + , SQLNull + ] + + allUsers `shouldBe` [user] + + it "fails on invalid upserts" $ do + let + firstQuery = + [gql| + mutation { + insert_users(objects: [ + { + email: "eve@example.com", + name: "Eve", + created_utc: "2021-11-21T09:51Z" + } + ]) { + affected_rows + } + } + |] + + -- We specify the `progress` column on `update_columns`, + -- but do not provide an explicit value. + secondQuery = + [gql| + mutation { + insert_users( + objects: { + email: "eve@example.com", + name: "Eveline", + created_utc: "2022-11-21T09:51Z" + }, + on_conflict: { + constraint: email, + update_columns: [name, created_utc, progress] + }) { + returning { name } + } + } + |] + + expected :: Text + expected = + "user error (Column progress cannot be set on conflicts without being explicitly provided)" + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right _ <- graphql schema Nothing mempty firstQuery + Just err <- + catchAll + (Nothing <$ graphql schema Nothing mempty secondQuery) + (pure . Just . show) + + err `shouldBe` expected + + it "supports deleting data and returning the deleted data" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@del-test.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@del-test.com', '2021-01-02T00:00Z') + |] + + let + query = + [gql| + mutation DeleteUsers { + delete_users( + filter: { email: { eq: "eve@del-test.com" } } + ) { + affected_rows + returning { + rowid + name + } + } + } + |] + expected = + rmSpaces + [raw|{ + "data": { + "delete_users": { + "affected_rows": 1, + "returning": [ + { + "rowid": 2, + "name": "Eve" + } + ] + } + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + it "supports parsing SQL queries" $ do + let + sqlQuery = + [raw| + CREATE TABLE IF NOT EXISTS checks ( + color TEXT CHECK ( color IN ('red', 'green', 'blue') ) NOT NULL + ) + |] + sqlQueryParsed :: Text = + [raw| + Right ( + CreateTable + [Name Nothing "checks"] + [TableColumnDef + (ColumnDef + (Name Nothing "color") + (TypeName [Name Nothing "TEXT"]) + Nothing + [ ColConstraintDef Nothing + (ColCheckConstraint + (In True + (Iden [Name Nothing "color"]) + (InList + [ StringLit "'" "'" "red" + , StringLit "'" "'" "green" + , StringLit "'" "'" "blue" + ] + ) + ) + ) + , ColConstraintDef Nothing ColNotNullConstraint + ] + ) + ] + ) + |] + + rmSpacesText txt = + txt + & T.replace " " "" + & T.replace "\n" "" + + rmSpacesText (show $ parseSql sqlQuery) + `shouldBe` rmSpacesText sqlQueryParsed + + it "supports inserting and retrieving single select fields" $ do + let testDbPath = testRoot </> "single-select-test.db" + conn <- open testDbPath + let + sqlQuery = + [raw| + CREATE TABLE IF NOT EXISTS checks ( + color TEXT CHECK ( color IN ('red', 'green', 'blue') ) + ) + |] + execute_ conn $ SS.Query sqlQuery + execute_ conn "DELETE FROM checks" + execute_ + conn + [sql| + INSERT INTO checks (color) + VALUES ('red') + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + (T.pack testDbPath) + tables + + -- tableColumns <- getColumns testDbPath conn "checks" + + let + mutation = + [gql| + mutation { + insert_checks(objects: [ + { color: "green" }, + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_checks": { + "affected_rows": 1 + } + } + } + |] + + Right result <- graphql schema Nothing mempty mutation + + Ae.encode result `shouldBe` expected + + allColors <- query_ conn "select * from checks" + + allColors `shouldBe` [[SQLText "red"], [SQLText "green"]] + + let + query = + [gql| + query { + checks { color } + } + |] + expectedResult = + rmSpaces + [raw| + { "data": { + "checks": [ + {"color": "red"}, + {"color": "green"} + ] + } } + |] + + Right queryResult <- graphql schema Nothing mempty query + + Ae.encode queryResult `shouldBe` expectedResult + + it "supports simultaneous inserts" $ do + conn <- open $ unpack dbPath + + let + query = + [gql| + mutation InsertObjects { + insert_users(objects: [ + { + email: "john@example.com", + created_utc: "2021-11-21T09:51Z", + } + ]) { + affected_rows + } + insert_songs(objects: [ + { + name: "Best Song", + duration_seconds: 125, + } + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_users": { + "affected_rows": 1 + }, + "insert_songs": { + "affected_rows": 1 + } + } + } + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty query + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select * from songs" + + allUsers `shouldBe` [[SQLText "Best Song", SQLInteger 125]] + + it "supports simultaneous updates" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@update-test.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@update-test.com', '2021-01-02T00:00Z') + |] + + let + query = + [gql| + mutation UpdateUsers { + john_update: update_users( + filter: { email: { eq: "john@update-test.com" } } + set: { name: "John New" } + ) { + affected_rows + } + eve_update: update_users( + filter: { email: { eq: "eve@update-test.com" } } + set: { name: "Eve New" } + ) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "john_update": { + "affected_rows": 1 + }, + "eve_update": { + "affected_rows": 1 + } + } + } + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty query + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select name from users" + + allUsers + `shouldBe` [ [SQLText "John New"] + , [SQLText "Eve New"] + ] + + it "supports variables" $ do + let + query = + [gql| + mutation InsertUsers ($objects: [users_insert_input]) { + insert_users(objects: $objects) { + affected_rows + } + } + |] + variables :: Object + variables = + fromMaybe mempty $ + Ae.decode + [raw| + { + "objects": [ + { + "email": "new.user@example.org", + "created_utc" : "2022-01-29T14:22Z" + } + ] + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_users": { + "affected_rows": 1 + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing variables query + + Ae.encode result `shouldBe` expected + + it "correctly converts between GraphQL and SQLite floats" $ do + conn <- open (testRoot </> "float-test.db") + execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS loaders ( + id INTEGER PRIMARY KEY, + progress REAL NOT NULL + ) + |] + execute_ conn "DELETE FROM loaders" + execute_ + conn + [sql| + INSERT INTO loaders (id, progress) + VALUES (1, 0), (2, 0.1), (3, -0.1), (4, 123.456) + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + let + mutation = + [gql| + mutation InsertLoaders { + insert_loaders(objects: [ + { id: 5, progress: 1.23 }, + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_loaders": { + "affected_rows": 1 + } + } + } + |] + + Right result <- graphql schema Nothing mempty mutation + + Ae.encode result `shouldBe` expected + + allUsers <- query_ conn "select * from loaders" + + allUsers + `shouldBe` [ [SQLInteger 1, SQLFloat 0.0] + , [SQLInteger 2, SQLFloat 0.1] + , [SQLInteger 3, SQLFloat (-0.1)] + , [SQLInteger 4, SQLFloat 123.456] + , [SQLInteger 5, SQLFloat 1.23] + ] + + let + query = + [gql| + query GetLoaders { + loaders { id, progress } + } + |] + expectedResult :: Object + expectedResult = + fromMaybe mempty $ + Ae.decode + [raw| + { "data": { "loaders": [ + { "id": 1, "progress": 0 }, + { "id": 2, "progress": 0.1 }, + { "id": 3, "progress": -0.1 }, + { "id": 4, "progress": 123.456 }, + { "id": 5, "progress": 1.23 } + ]}} + |] + + Right queryResult <- graphql schema Nothing mempty query + + queryResult `shouldBe` expectedResult + + -- This particular case used to fail in a previous ticket + it "correctly roundtrips floats inserted using graphql and retrieved using REST" $ do + let dbId = "float-roundtrip-test" + withDataDbConn dbId $ \conn -> do + SS.execute_ + conn + [sql| + CREATE TABLE IF NOT EXISTS loaders ( + progress REAL NOT NULL + ) + |] + + tables <- getTables conn + schema <- + getDerivedSchema + defaultSchemaConf + conn + ("_TEST_" <> dbId) + tables + + let + mutation = + [gql| + mutation InsertLoaders { + insert_loaders(objects: [ + { progress: 1.23000 }, + ]) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "insert_loaders": { + "affected_rows": 1 + } + } + } + |] + + Right result <- graphql schema Nothing mempty mutation + Ae.encode result `shouldBe` expected + + Right restResult <- + runHandler $ + sqlQueryPostHandler + PragmaConf.defaultConf + ("_TEST_" <> dbId) + SQLPost{query = "SELECT * from loaders"} + + restResult.rows + `shouldBe` [ KeyMap.singleton + "progress" + (Number 1.23) + ] + + it "supports updating data" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2018-01-01T00:00Z') + |] + + let + query = + [gql| + mutation UpdateUser { + update_users( + filter: {email: {eq: "eve@example.com"}}, + set: { + name: "Liz" + } + ) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "update_users": { + "affected_rows": 1 + } + } + } + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + remainingUsers <- + query_ + conn + "select * from users where email = 'eve@example.com'" + + remainingUsers + `shouldBe` [ + [ SQLText "Liz" + , SQLText "eve@example.com" + , SQLText "2018-01-01T00:00Z" + , SQLNull + , SQLNull + ] + ] + + it "supports deleting data by text id" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z') + |] + + let + query = + [gql| + mutation DeleteUsers { + delete_users(filter: {email: {eq: "eve@example.com"}}) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "delete_users": { + "affected_rows": 1 + } + } + } + |] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + remainingUsers <- query_ conn "select * from users" + + remainingUsers + `shouldBe` [ + [ SQLText "John" + , SQLText "john@example.com" + , SQLText "2021-01-01T00:00Z" + , SQLNull + , SQLNull + ] + ] + + it "supports deleting data by integer id" $ do + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values + ('John', 'john@example.com', '2021-01-01T00:00Z'), + ('Eve', 'eve@example.com', '2019-01-01T00:00Z') + |] + + let + query = + [gql| + mutation DeleteUsers { + delete_users(filter: {rowid: {eq: 2}}) { + affected_rows + } + } + |] + expected = + rmSpaces + [raw|{ + "data": { + "delete_users": { + "affected_rows": 1 + } + } + }|] + + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + Right result <- graphql schema Nothing mempty query + + Ae.encode result `shouldBe` expected + + remainingUsers <- query_ conn "select * from users" + + remainingUsers + `shouldBe` [ + [ SQLText "John" + , SQLText "john@example.com" + , SQLText "2021-01-01T00:00Z" + , SQLNull + , SQLNull + ] + ] + + it "returns error on foreign key constraint violation" $ do + withTestDbConn shouldSaveDbs (testRoot </> "foreign_key_constraint.db") $ + \conn -> do + execute_ + conn + [sql| + CREATE TABLE artist( + id INTEGER PRIMARY KEY, + name TEXT + ) + |] + execute_ + conn + [sql| + INSERT INTO artist (id, name) + VALUES (1, 'Artist 1') + |] + execute_ + conn + [sql| + CREATE TABLE track( + id INTEGER, + name TEXT, + artistId INTEGER, -- Must map to an artist.id + FOREIGN KEY(artistId) REFERENCES artist(id) + ) + |] + + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + let invalidQuery = + [gql| + mutation InsertTrack { + insert_track(objects: [ + { + id: 1, + name: "Track 1", + artistId: 2 + } + ]) { + affected_rows + } + } + |] + + Right result <- graphql schema Nothing mempty invalidQuery + + let expected = + rmSpaces + [raw|{ + "data": { "insert_track": null }, + "errors": [ + { + "locations": [ { "column": 3, "line": 2 } ], + "message": + "SQLite3 returned ErrorConstraint while attempting to perform step: FOREIGN KEY constraint failed", + "path": [ "insert_track" ] + } + ] + }|] + + Ae.encode result `shouldBe` expected + + describe "Introspection" $ do + it "creates JSON object for GQL types" $ do + let + tableName = "example_table" + + actual = + createType + tableName + "Description of field" + [] -- No arguments + ["NON_NULL", "LIST", "NON_NULL", "OBJECT"] + tableName + + expected = + GQL.Object $ + HashMap.fromList + [ ("name", GQL.String tableName) + , ("description", "Description of field") + , + ( "type" + , GQL.Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , GQL.Object $ + HashMap.fromList + [ ("kind", "LIST") + , + ( "ofType" + , GQL.Object $ + HashMap.fromList + [ ("kind", "NON_NULL") + , + ( "ofType" + , GQL.Object $ + HashMap.fromList + [ ("kind", "OBJECT") + , ("name", GQL.String tableName) + ] + ) + ] + ) + ] + ) + ] + ) + ] + + actual `shouldBe` expected + + describe "Query" $ do + it "supports a minimal introspection query" $ do + let + introspectionQuery :: Text + introspectionQuery = + [gql| + query IntrospectionQuery { + __schema { + queryType { name } + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__schema": { + "queryType": { + "name": "Query" + } + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "supports an optional filter argument" $ do + let + introspectionQuery :: Text + introspectionQuery = + [gql| + query IntrospectionQuery { + __schema { + queryType { + name + fields { + name + args { + name + type { + name + } + } + } + } + } + } + |] + expected = + rmSpaces + [raw|{ + "data": { + "__schema": { + "queryType": { + "name": "Query", + "fields": [ + { + "name": "users", + "args": [ + { "name": "filter", + "type": { "name": "users_filter" } + }, + { "name": "order_by", + "type": { "name": null } + }, + { "name": "limit", + "type": { "name": "Int" } + }, + { "name": "offset", + "type": { "name": "Int" } + } + ] + }, + { + "name": "songs", + "args": [ + { "name": "filter", + "type": { "name": "songs_filter" } + }, + { "name": "order_by", + "type": { "name": null } + }, + { "name": "limit", + "type": { "name": "Int" } + }, + { "name": "offset", + "type": { "name": "Int" } + } + ] + } + ] + } + } + } + }|] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "doesn't allow writeonly tokens to read data" $ do + let + introspectionQuery :: Text + introspectionQuery = + [gql| + query IntrospectionQuery { + __schema { + queryType { name } + } + } + |] + + expected = + rmSpaces + [raw| + { + "data": null, + "errors": [{ + "locations": [{ "column":3, "line":2 }], + "message": "user error (Cannot read field using writeonly access code)", + "path": ["__schema"] + }] + } + |] + + schema <- withRetryConn (unpack dbPath) $ \conn -> do + tables <- getTables conn + getDerivedSchema + defaultSchemaConf{accessMode = WriteOnly} + conn + dbPath + tables + + Right response <- + graphql schema Nothing mempty introspectionQuery + + Ae.encode response `shouldBe` expected + + describe "Mutation" $ do + it "supports introspection queries" $ do + let + introspectionQuery :: Text + introspectionQuery = + [gql| + query IntrospectionQuery { + __schema { + mutationType { + name + fields { + name + args { + name + } + } + } + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__schema": { + "mutationType": { + "name": "Mutation", + "fields": [ + { + "name": "insert_users", + "args": [ { "name": "objects" }, { "name": "on_conflict" } ] + }, + { + "name": "update_users", + "args": [ + { "name": "filter" }, + { "name": "set" } + ] + }, + { + "name": "delete_users", + "args": [ { "name": "filter" } ] + }, + { + "name": "insert_songs", + "args": [ { "name": "objects" }, { "name": "on_conflict" } ] + }, + { + "name": "update_songs", + "args": [ + { "name": "filter" }, + { "name": "set" } + ] + }, + { + "name": "delete_songs", + "args": [ { "name": "filter" } ] + } + ] + } + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "supports __typename on root query" $ do + let + introspectionQuery = + [gql| + query TypeName { + __typename + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__typename" : "Query" + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "returns fields for {query,mutation,subscription}Type" $ do + let + introspectionQuery = + [gql| + query { + __schema { + queryType { fields { name } } + mutationType { fields { name } } + subscriptionType { fields { name } } + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__schema": { + "queryType": { + "fields": [ + { "name": "users" }, + { "name": "songs" } + ] + }, + "subscriptionType": null, + "mutationType": { + "fields": [ + { "name": "insert_users" }, + { "name": "update_users" }, + { "name": "delete_users" }, + { "name": "insert_songs" }, + { "name": "update_songs" }, + { "name": "delete_songs" } + ] + } + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- + getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "supports __typename fields" $ do + let + introspectionQuery = + [gql| + query UsersTypeName { + users { + __typename + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "users": [ + { "__typename" : "users_row" } + ] + } + } + |] + + conn <- open $ unpack dbPath + execute_ + conn + [sql| + insert into users (name, email, created_utc) + values ('John', 'john@example.com', '2022-01-01T00:00Z') + |] + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "returns types" $ do + let + introspectionQuery = + [gql| + query { + __schema { + types { + kind + name + } + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__schema": { + "types": [ + { "kind": "OBJECT", "name": "users_row" }, + { "kind": "OBJECT", "name": "users_mutation_response" }, + { "kind": "INPUT_OBJECT", "name": "users_insert_input" }, + { "kind": "ENUM", "name": "users_column" }, + { "kind": "INPUT_OBJECT", "name": "users_upsert_on_conflict" }, + { "kind": "INPUT_OBJECT", "name": "users_set_input" }, + { "kind": "INPUT_OBJECT", "name": "users_filter" }, + { "kind": "INPUT_OBJECT", "name": "users_order_by" }, + + { "kind": "OBJECT", "name": "songs_row" }, + { "kind": "OBJECT", "name": "songs_mutation_response" }, + { "kind": "INPUT_OBJECT", "name": "songs_insert_input" }, + { "kind": "ENUM", "name": "songs_column" }, + { "kind": "INPUT_OBJECT", "name": "songs_upsert_on_conflict" }, + { "kind": "INPUT_OBJECT", "name": "songs_set_input" }, + { "kind": "INPUT_OBJECT", "name": "songs_filter" }, + { "kind": "INPUT_OBJECT", "name": "songs_order_by" }, + + { "kind": "INPUT_OBJECT", "name": "IntComparison" }, + { "kind": "INPUT_OBJECT", "name": "FloatComparison" }, + { "kind": "INPUT_OBJECT", "name": "StringComparison" }, + { "kind": "INPUT_OBJECT", "name": "BooleanComparison" }, + + { "kind": "ENUM", "name": "OrderingTerm" }, + + { "kind": "OBJECT", "name": "Query" }, + { "kind": "OBJECT", "name": "Mutation" }, + { "kind": "SCALAR", "name": "Boolean" }, + { "kind": "SCALAR", "name": "Int" }, + { "kind": "SCALAR", "name": "Float" }, + { "kind": "SCALAR", "name": "String" }, + { "kind": "SCALAR", "name": "ID" }, + { "kind": "SCALAR", "name": "Upload" }, + { "kind": "OBJECT", "name": "__Schema" }, + { "kind": "OBJECT", "name": "__Type" }, + { "kind": "ENUM", "name": "__TypeKind" }, + { "kind": "OBJECT", "name": "__Field" }, + { "kind": "OBJECT", "name": "__InputValue" }, + { "kind": "OBJECT", "name": "__EnumValue" }, + { "kind": "OBJECT", "name": "__Directive" }, + { "kind": "ENUM", "name": "__DirectiveLocation" } + ] + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "returns directives on __schema" $ do + let + introspectionQuery :: Text = + [gql| + query UsersTypeName { + __schema { + directives { + name + description + locations + args { + name + description + defaultValue + type { ...TypeRef } + } + } + } + } + + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } + } + |] + expected = + rmSpaces + [raw| + { + "data": { + "__schema": { + "directives": [ + { + "name": "skip", + "args": [ + { + "name": "if", + "defaultValue": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind":"SCALAR", + "name":"Boolean", + "ofType":null + } + }, + "description": "Skipped when true." + } + ], + "locations": [ + "INLINE_FRAGMENT", + "FRAGMENT_SPREAD", + "FIELD" + ], + "description": "Directs the executor to skip this field or fragment when the `if` argument is true." + }, + { + "name": "include", + "args": [ + { + "name": "if", + "defaultValue": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind":"SCALAR", + "name":"Boolean", + "ofType":null + } + }, + "description": "Included when true." + } + ], + "locations": [ + "INLINE_FRAGMENT", + "FRAGMENT_SPREAD", + "FIELD" + ], + "description": + "Directs the executor to include this field or fragment only when the `if` argument is true." + }, + { + "name": "deprecated", + "args": [ + { + "name": "reason", + "defaultValue": "\"No longer supported\"", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "description": + "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax (as specified by [CommonMark](https://commonmark.org/)." + } + ], + "locations": [ + "ENUM_VALUE", + "FIELD_DEFINITION" + ], + "description": + "Marks an element of a GraphQL schema as no longer supported." + } + ] + } + } + } + |] + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` expected + + it "supports a full introspection query" $ do + gqlFile <- makeAbsolute $ testRoot </> "introspection_query.gql" + introspectionQuery <- readFile gqlFile + + jsonFile <- makeAbsolute $ testRoot </> "introspection_result.json" + expected <- readFile jsonFile + + conn <- open $ unpack dbPath + tables <- getTables conn + schema <- getDerivedSchema defaultSchemaConf conn dbPath tables + + Right result <- graphql schema Nothing mempty introspectionQuery + + Ae.encode result `shouldBe` rmSpaces expected + + +deleteDbEntries :: Text -> IO () +deleteDbEntries databasePath = do + conn <- open $ unpack databasePath + execute_ conn "delete from users" + execute_ conn "delete from songs" + + +main :: IO () +main = do + cwd <- getWorkingDirectory + createDirectoryIfMissing True (".." </> "data" </> "TEST" </> "data") + changeWorkingDirectory (cwd </> ".." </> "data" </> "TEST") + + removeIfExists $ unpack dbPath + + withRetryConn (unpack dbPath) $ \conn -> do + createUsersTable conn + createSongsTable conn + + Hspec.hspec $ before_ (deleteDbEntries dbPath) testSuite diff --git a/tests/Tests/Utils.hs b/tests/Tests/Utils.hs new file mode 100644 index 0000000..df812ab --- /dev/null +++ b/tests/Tests/Utils.hs @@ -0,0 +1,41 @@ +module Tests.Utils ( + testRoot, + withDataDbConn, + withTestDbConn, +) where + +import Protolude ( + Bool (True), + FilePath, + IO, + ($), + (<>), + ) + +import Database.SQLite.Simple qualified as SS +import System.Directory (createDirectoryIfMissing, removePathForcibly) +import System.FilePath ((</>)) + +import AirGQL.Utils (removeIfExists, withRetryConn) + + +testRoot :: FilePath +testRoot = "../../airgql/tests" + + +-- | Get a connection to a database in the test database directory +withTestDbConn :: Bool -> FilePath -> (SS.Connection -> IO a) -> IO a +withTestDbConn shouldSaveDbs testDbPath callback = do + removeIfExists $ testRoot </> testDbPath + withRetryConn + (if shouldSaveDbs then testRoot </> testDbPath else ":memory:") + callback + + +-- | Get a connection to a test database in the main data directory +withDataDbConn :: FilePath -> (SS.Connection -> IO a) -> IO a +withDataDbConn testDbDir callback = do + let fullPath = "data" </> "databases" </> "_TEST_" <> testDbDir + removePathForcibly fullPath + createDirectoryIfMissing True fullPath + withRetryConn (fullPath </> "main.sqlite") callback diff --git a/tests/example.sqlite b/tests/example.sqlite new file mode 100644 index 0000000..7f87a69 Binary files /dev/null and b/tests/example.sqlite differ diff --git a/tests/introspection_query.gql b/tests/introspection_query.gql new file mode 100644 index 0000000..fb58c8c --- /dev/null +++ b/tests/introspection_query.gql @@ -0,0 +1,96 @@ +query IntrospectionQuery { + __schema { + + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + + locations + args { + ...InputValue + } + } + } +} + +fragment FullType on __Type { + kind + name + description + + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } +} + +fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue + + +} + +fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } +} diff --git a/tests/introspection_result.json b/tests/introspection_result.json new file mode 100644 index 0000000..7de2e2b --- /dev/null +++ b/tests/introspection_result.json @@ -0,0 +1,2798 @@ +{ + "data": { + "__schema": { + "directives": [ + { + "args": [ + { + "defaultValue": null, + "description": "Skipped when true.", + "name": "if", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + } + ], + "description": "Directs the executor to skip this field or fragment when the `if` argument is true.", + "locations": [ + "INLINE_FRAGMENT", + "FRAGMENT_SPREAD", + "FIELD" + ], + "name": "skip" + }, + { + "args": [ + { + "defaultValue": null, + "description": "Included when true.", + "name": "if", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + } + ], + "description": "Directs the executor to include this field or fragment only when the `if` argument is true.", + "locations": [ + "INLINE_FRAGMENT", + "FRAGMENT_SPREAD", + "FIELD" + ], + "name": "include" + }, + { + "args": [ + { + "defaultValue": "\"No longer supported\"", + "description": "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax (as specified by [CommonMark](https://commonmark.org/).", + "name": "reason", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "description": "Marks an element of a GraphQL schema as no longer supported.", + "locations": [ + "ENUM_VALUE", + "FIELD_DEFINITION" + ], + "name": "deprecated" + } + ], + "mutationType": { + "name": "Mutation" + }, + "queryType": { + "name": "Query" + }, + "subscriptionType": null, + "types": [ + { + "description": "Available columns for table \"users\"", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "name", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "email", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "created_utc", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "number_of_logins", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "progress", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "users_row", + "possibleTypes": null + }, + { + "description": "Mutation response for users", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "affected_rows", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "returning", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "users_row", + "ofType": null + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "users_mutation_response", + "possibleTypes": null + }, + { + "description": "Input object for users", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "email", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "", + "name": "created_utc", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "", + "name": "number_of_logins", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "progress", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "users_insert_input", + "possibleTypes": null + }, + { + "description": "This enum contains a variant for each column in the table", + "enumValues": [ + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "rowid" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "email" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "created_utc" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "number_of_logins" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "progress" + } + ], + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "ENUM", + "name": "users_column", + "possibleTypes": null + }, + { + "description": "Specifies how broken UNIQUE constraints for users should be handled", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "columns to handle conflicts of", + "name": "constraint", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_column", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "columns to override on conflict", + "name": "update_columns", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_column", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "filter specifying which conflicting columns to update", + "name": "where", + "type": { + "kind": "INPUT_OBJECT", + "name": "users_filter", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "users_upsert_on_conflict", + "possibleTypes": null + }, + { + "description": "Fields to set for users", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "email", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "created_utc", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "number_of_logins", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "progress", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "users_set_input", + "possibleTypes": null + }, + { + "description": "Filter object to select rows", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "INPUT_OBJECT", + "name": "IntComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "INPUT_OBJECT", + "name": "StringComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "email", + "type": { + "kind": "INPUT_OBJECT", + "name": "StringComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "created_utc", + "type": { + "kind": "INPUT_OBJECT", + "name": "StringComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "number_of_logins", + "type": { + "kind": "INPUT_OBJECT", + "name": "IntComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "progress", + "type": { + "kind": "INPUT_OBJECT", + "name": "FloatComparison", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "users_filter", + "possibleTypes": null + }, + { + "description": "Ordering options when selecting data from \"users\".", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "email", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "created_utc", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "number_of_logins", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "progress", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "users_order_by", + "possibleTypes": null + }, + { + "description": "Available columns for table \"songs\"", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "", + "isDeprecated": false, + "name": "duration_seconds", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "songs_row", + "possibleTypes": null + }, + { + "description": "Mutation response for songs", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "affected_rows", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "returning", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "songs_row", + "ofType": null + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "songs_mutation_response", + "possibleTypes": null + }, + { + "description": "Input object for songs", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "", + "name": "duration_seconds", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "songs_insert_input", + "possibleTypes": null + }, + { + "description": "This enum contains a variant for each column in the table", + "enumValues": [ + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "rowid" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name" + }, + { + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "duration_seconds" + } + ], + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "ENUM", + "name": "songs_column", + "possibleTypes": null + }, + { + "description": "Specifies how broken UNIQUE constraints for songs should be handled", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "columns to handle conflicts of", + "name": "constraint", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_column", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "columns to override on conflict", + "name": "update_columns", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_column", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "filter specifying which conflicting columns to update", + "name": "where", + "type": { + "kind": "INPUT_OBJECT", + "name": "songs_filter", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "songs_upsert_on_conflict", + "possibleTypes": null + }, + { + "description": "Fields to set for songs", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "duration_seconds", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "songs_set_input", + "possibleTypes": null + }, + { + "description": "Filter object to select rows", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "INPUT_OBJECT", + "name": "IntComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "INPUT_OBJECT", + "name": "StringComparison", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "duration_seconds", + "type": { + "kind": "INPUT_OBJECT", + "name": "IntComparison", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "songs_filter", + "possibleTypes": null + }, + { + "description": "Ordering options when selecting data from \"songs\".", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": "", + "name": "rowid", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "name", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "", + "name": "duration_seconds", + "type": { + "kind": "INPUT_OBJECT", + "name": "OrderingTerm", + "ofType": null + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "songs_order_by", + "possibleTypes": null + }, + { + "description": "Compare to an Int", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": null, + "name": "eq", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "neq", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gt", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gte", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lt", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lte", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "like", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "ilike", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "in", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": null, + "name": "nin", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "IntComparison", + "possibleTypes": null + }, + { + "description": "Compare to a Float", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": null, + "name": "eq", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "neq", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gt", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gte", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lt", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lte", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "like", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "ilike", + "type": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "in", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": null, + "name": "nin", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Float", + "ofType": null + } + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "FloatComparison", + "possibleTypes": null + }, + { + "description": "Compare to a String", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": null, + "name": "eq", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "neq", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gt", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gte", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lt", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lte", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "like", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "ilike", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "in", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": null, + "name": "nin", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "StringComparison", + "possibleTypes": null + }, + { + "description": "Compare to a Boolean", + "enumValues": null, + "fields": null, + "inputFields": [ + { + "defaultValue": null, + "description": null, + "name": "eq", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "neq", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gt", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "gte", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lt", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "lte", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "like", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "ilike", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + { + "defaultValue": null, + "description": null, + "name": "in", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": null, + "name": "nin", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + } + ], + "interfaces": [], + "kind": "INPUT_OBJECT", + "name": "BooleanComparison", + "possibleTypes": null + }, + { + "description": "Ordering options when ordering by a column", + "enumValues": [ + { + "deprecationReason": null, + "description": "In ascending order", + "isDeprecated": false, + "name": "ASC" + }, + { + "deprecationReason": "GraphQL spec recommends all caps for enums", + "description": "In ascending order", + "isDeprecated": true, + "name": "asc" + }, + { + "deprecationReason": null, + "description": "In descending order", + "isDeprecated": false, + "name": "DESC" + }, + { + "deprecationReason": "GraphQL spec recommends all caps for enums", + "description": "In descending order", + "isDeprecated": true, + "name": "desc" + } + ], + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "ENUM", + "name": "OrderingTerm", + "possibleTypes": null + }, + { + "description": null, + "enumValues": null, + "fields": [ + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select specific rows", + "name": "filter", + "type": { + "kind": "INPUT_OBJECT", + "name": "users_filter", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "Columns used to sort the data", + "name": "order_by", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_order_by", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "Limit the number of returned rows", + "name": "limit", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "The index to start returning rows from", + "name": "offset", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + ], + "deprecationReason": null, + "description": "Rows from the table \"users\"", + "isDeprecated": false, + "name": "users", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "users_row", + "ofType": null + } + } + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select specific rows", + "name": "filter", + "type": { + "kind": "INPUT_OBJECT", + "name": "songs_filter", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "Columns used to sort the data", + "name": "order_by", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_order_by", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "Limit the number of returned rows", + "name": "limit", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + { + "defaultValue": null, + "description": "The index to start returning rows from", + "name": "offset", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + } + ], + "deprecationReason": null, + "description": "Rows from the table \"songs\"", + "isDeprecated": false, + "name": "songs", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "songs_row", + "ofType": null + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "Query", + "possibleTypes": null + }, + { + "description": null, + "enumValues": null, + "fields": [ + { + "args": [ + { + "defaultValue": null, + "description": "Rows to be inserted", + "name": "objects", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_insert_input", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "Specifies how to handle broken UNIQUE constraints", + "name": "on_conflict", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_upsert_on_conflict", + "ofType": null + } + } + } + } + ], + "deprecationReason": null, + "description": "Insert new rows in table \"users\"", + "isDeprecated": false, + "name": "insert_users", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "users_mutation_response", + "ofType": null + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select rows to be updated", + "name": "filter", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_filter", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "Fields to be updated", + "name": "set", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_set_input", + "ofType": null + } + } + } + ], + "deprecationReason": null, + "description": "Update rows in table \"users\"", + "isDeprecated": false, + "name": "update_users", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "users_mutation_response", + "ofType": null + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select rows to be deleted", + "name": "filter", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "users_filter", + "ofType": null + } + } + } + ], + "deprecationReason": null, + "description": "Delete rows in table \"users\"", + "isDeprecated": false, + "name": "delete_users", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "users_mutation_response", + "ofType": null + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Rows to be inserted", + "name": "objects", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_insert_input", + "ofType": null + } + } + } + } + }, + { + "defaultValue": null, + "description": "Specifies how to handle broken UNIQUE constraints", + "name": "on_conflict", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_upsert_on_conflict", + "ofType": null + } + } + } + } + ], + "deprecationReason": null, + "description": "Insert new rows in table \"songs\"", + "isDeprecated": false, + "name": "insert_songs", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "songs_mutation_response", + "ofType": null + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select rows to be updated", + "name": "filter", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_filter", + "ofType": null + } + } + }, + { + "defaultValue": null, + "description": "Fields to be updated", + "name": "set", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_set_input", + "ofType": null + } + } + } + ], + "deprecationReason": null, + "description": "Update rows in table \"songs\"", + "isDeprecated": false, + "name": "update_songs", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "songs_mutation_response", + "ofType": null + } + } + }, + { + "args": [ + { + "defaultValue": null, + "description": "Filter to select rows to be deleted", + "name": "filter", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "songs_filter", + "ofType": null + } + } + } + ], + "deprecationReason": null, + "description": "Delete rows in table \"songs\"", + "isDeprecated": false, + "name": "delete_songs", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "songs_mutation_response", + "ofType": null + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "Mutation", + "possibleTypes": null + }, + { + "description": "The `Boolean` scalar type represents `true` or `false`.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "Boolean", + "possibleTypes": null + }, + { + "description": "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "Int", + "possibleTypes": null + }, + { + "description": "Signed double-precision floating-point value.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "Float", + "possibleTypes": null + }, + { + "description": "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "String", + "possibleTypes": null + }, + { + "description": "The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "ID", + "possibleTypes": null + }, + { + "description": "The `Upload` scalar type represents a file upload.", + "enumValues": null, + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "SCALAR", + "name": "Upload", + "possibleTypes": null + }, + { + "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": "A list of all types supported by this server.", + "isDeprecated": false, + "name": "types", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "The type that query operations will be rooted at.", + "isDeprecated": false, + "name": "queryType", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "If this server supports mutation, the type that mutation operations will be rooted at.", + "isDeprecated": false, + "name": "mutationType", + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "If this server support subscription, the type that subscription operations will be rooted at.", + "isDeprecated": false, + "name": "subscriptionType", + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": "A list of all directives supported by this server.", + "isDeprecated": false, + "name": "directives", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Directive", + "ofType": null + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__Schema", + "possibleTypes": null + }, + { + "description": "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "kind", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__TypeKind", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "description", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [ + { + "defaultValue": "false", + "description": null, + "name": "includeDeprecated", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + ], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "fields", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Field", + "ofType": null + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "interfaces", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "possibleTypes", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + { + "args": [ + { + "defaultValue": "false", + "description": null, + "name": "includeDeprecated", + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + ], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "enumValues", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__EnumValue", + "ofType": null + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "inputFields", + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "ofType", + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__Type", + "possibleTypes": null + }, + { + "description": "An enum describing what kind of type a given `__Type` is.", + "enumValues": [ + { + "deprecationReason": null, + "description": "Indicates this type is a scalar.", + "isDeprecated": false, + "name": "SCALAR" + }, + { + "deprecationReason": null, + "description": "Indicates this type is an object. `fields` and `interfaces` are valid fields.", + "isDeprecated": false, + "name": "OBJECT" + }, + { + "deprecationReason": null, + "description": "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", + "isDeprecated": false, + "name": "INTERFACE" + }, + { + "deprecationReason": null, + "description": "Indicates this type is a union. `possibleTypes` is a valid field.", + "isDeprecated": false, + "name": "UNION" + }, + { + "deprecationReason": null, + "description": "Indicates this type is an enum. `enumValues` is a valid field.", + "isDeprecated": false, + "name": "ENUM" + }, + { + "deprecationReason": null, + "description": "Indicates this type is an input object. `inputFields` is a valid field.", + "isDeprecated": false, + "name": "INPUT_OBJECT" + }, + { + "deprecationReason": null, + "description": "Indicates this type is a list. `ofType` is a valid field.", + "isDeprecated": false, + "name": "LIST" + }, + { + "deprecationReason": null, + "description": "Indicates this type is a non-null. `ofType` is a valid field.", + "isDeprecated": false, + "name": "NON_NULL" + } + ], + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "ENUM", + "name": "__TypeKind", + "possibleTypes": null + }, + { + "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "description", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "args", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "type", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "isDeprecated", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "deprecationReason", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__Field", + "possibleTypes": null + }, + { + "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "description", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "type", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": "A GraphQL-formatted string representing the default value for this input value.", + "isDeprecated": false, + "name": "defaultValue", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__InputValue", + "possibleTypes": null + }, + { + "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "description", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "isDeprecated", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "deprecationReason", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__EnumValue", + "possibleTypes": null + }, + { + "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", + "enumValues": null, + "fields": [ + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "name", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "description", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "locations", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__DirectiveLocation", + "ofType": null + } + } + } + } + }, + { + "args": [], + "deprecationReason": null, + "description": null, + "isDeprecated": false, + "name": "args", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + } + } + ], + "inputFields": null, + "interfaces": [], + "kind": "OBJECT", + "name": "__Directive", + "possibleTypes": null + }, + { + "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", + "enumValues": [ + { + "deprecationReason": null, + "description": "Location adjacent to a query operation.", + "isDeprecated": false, + "name": "QUERY" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a mutation operation.", + "isDeprecated": false, + "name": "MUTATION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a subscription operation.", + "isDeprecated": false, + "name": "SUBSCRIPTION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a field.", + "isDeprecated": false, + "name": "FIELD" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a fragment definition.", + "isDeprecated": false, + "name": "FRAGMENT_DEFINITION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a fragment spread.", + "isDeprecated": false, + "name": "FRAGMENT_SPREAD" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an inline fragment.", + "isDeprecated": false, + "name": "INLINE_FRAGMENT" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a variable definition.", + "isDeprecated": false, + "name": "VARIABLE_DEFINITION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a schema definition.", + "isDeprecated": false, + "name": "SCHEMA" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a scalar definition.", + "isDeprecated": false, + "name": "SCALAR" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an object type definition.", + "isDeprecated": false, + "name": "OBJECT" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a field definition.", + "isDeprecated": false, + "name": "FIELD_DEFINITION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an argument definition.", + "isDeprecated": false, + "name": "ARGUMENT_DEFINITION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an interface definition.", + "isDeprecated": false, + "name": "INTERFACE" + }, + { + "deprecationReason": null, + "description": "Location adjacent to a union definition.", + "isDeprecated": false, + "name": "UNION" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an enum definition.", + "isDeprecated": false, + "name": "ENUM" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an enum value definition.", + "isDeprecated": false, + "name": "ENUM_VALUE" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an input object type definition.", + "isDeprecated": false, + "name": "INPUT_OBJECT" + }, + { + "deprecationReason": null, + "description": "Location adjacent to an input object field definition.", + "isDeprecated": false, + "name": "INPUT_FIELD_DEFINITION" + } + ], + "fields": null, + "inputFields": null, + "interfaces": [], + "kind": "ENUM", + "name": "__DirectiveLocation", + "possibleTypes": null + } + ] + } + } +}