checkpoint during parser conversion to megaparsec
This commit is contained in:
parent
9396aa8cba
commit
ab687318fb
|
@ -3,6 +3,7 @@
|
||||||
-- Data types to represent different dialect options
|
-- Data types to represent different dialect options
|
||||||
|
|
||||||
{-# LANGUAGE DeriveDataTypeable #-}
|
{-# LANGUAGE DeriveDataTypeable #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.Dialect
|
module Language.SQL.SimpleSQL.Dialect
|
||||||
(Dialect(..)
|
(Dialect(..)
|
||||||
,ansi2011
|
,ansi2011
|
||||||
|
@ -12,6 +13,7 @@ module Language.SQL.SimpleSQL.Dialect
|
||||||
,sqlserver
|
,sqlserver
|
||||||
) where
|
) where
|
||||||
|
|
||||||
|
import Data.Text (Text)
|
||||||
import Data.Data
|
import Data.Data
|
||||||
|
|
||||||
-- | Used to set the dialect used for parsing and pretty printing,
|
-- | Used to set the dialect used for parsing and pretty printing,
|
||||||
|
@ -55,14 +57,14 @@ import Data.Data
|
||||||
|
|
||||||
data Dialect = Dialect
|
data Dialect = Dialect
|
||||||
{ -- | reserved keywords
|
{ -- | reserved keywords
|
||||||
diKeywords :: [String]
|
diKeywords :: [Text]
|
||||||
-- | keywords with identifier exception
|
-- | keywords with identifier exception
|
||||||
,diIdentifierKeywords :: [String]
|
,diIdentifierKeywords :: [Text]
|
||||||
-- | keywords with app exception
|
-- | keywords with app exception
|
||||||
,diAppKeywords :: [String]
|
,diAppKeywords :: [Text]
|
||||||
-- | keywords with type exception plus all the type names which
|
-- | keywords with type exception plus all the type names which
|
||||||
-- are multiple words
|
-- are multiple words
|
||||||
,diSpecialTypeNames :: [String]
|
,diSpecialTypeNames :: [Text]
|
||||||
-- | allow ansi fetch first syntax
|
-- | allow ansi fetch first syntax
|
||||||
,diFetchFirst :: Bool
|
,diFetchFirst :: Bool
|
||||||
-- | allow limit keyword (mysql, postgres,
|
-- | allow limit keyword (mysql, postgres,
|
||||||
|
@ -179,7 +181,7 @@ quoted. If you want to match one of these dialects exactly with this
|
||||||
parser, I think it will be a lot of work.
|
parser, I think it will be a lot of work.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
ansi2011ReservedKeywords :: [String]
|
ansi2011ReservedKeywords :: [Text]
|
||||||
ansi2011ReservedKeywords =
|
ansi2011ReservedKeywords =
|
||||||
[--"abs" -- function
|
[--"abs" -- function
|
||||||
"all" -- keyword only?
|
"all" -- keyword only?
|
||||||
|
@ -508,7 +510,7 @@ ansi2011ReservedKeywords =
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
ansi2011TypeNames :: [String]
|
ansi2011TypeNames :: [Text]
|
||||||
ansi2011TypeNames =
|
ansi2011TypeNames =
|
||||||
["double precision"
|
["double precision"
|
||||||
,"character varying"
|
,"character varying"
|
||||||
|
|
|
@ -1,53 +0,0 @@
|
||||||
|
|
||||||
-- | helpers to work with parsec errors more nicely
|
|
||||||
module Language.SQL.SimpleSQL.Errors
|
|
||||||
(ParseError(..)
|
|
||||||
--,formatError
|
|
||||||
,convParseError
|
|
||||||
) where
|
|
||||||
|
|
||||||
import Text.Parsec (sourceColumn,sourceLine,sourceName,errorPos)
|
|
||||||
import qualified Text.Parsec as P (ParseError)
|
|
||||||
|
|
||||||
-- | Type to represent parse errors.
|
|
||||||
data ParseError = ParseError
|
|
||||||
{peErrorString :: String
|
|
||||||
-- ^ contains the error message
|
|
||||||
,peFilename :: FilePath
|
|
||||||
-- ^ filename location for the error
|
|
||||||
,pePosition :: (Int,Int)
|
|
||||||
-- ^ line number and column number location for the error
|
|
||||||
,peFormattedError :: String
|
|
||||||
-- ^ formatted error with the position, error
|
|
||||||
-- message and source context
|
|
||||||
} deriving (Eq,Show)
|
|
||||||
|
|
||||||
convParseError :: String -> P.ParseError -> ParseError
|
|
||||||
convParseError src e =
|
|
||||||
ParseError
|
|
||||||
{peErrorString = show e
|
|
||||||
,peFilename = sourceName p
|
|
||||||
,pePosition = (sourceLine p, sourceColumn p)
|
|
||||||
,peFormattedError = formatError src e}
|
|
||||||
where
|
|
||||||
p = errorPos e
|
|
||||||
|
|
||||||
{-
|
|
||||||
format the error more nicely: emacs format for positioning, plus
|
|
||||||
context
|
|
||||||
-}
|
|
||||||
|
|
||||||
formatError :: String -> P.ParseError -> String
|
|
||||||
formatError src e =
|
|
||||||
sourceName p ++ ":" ++ show (sourceLine p)
|
|
||||||
++ ":" ++ show (sourceColumn p) ++ ":"
|
|
||||||
++ context
|
|
||||||
++ show e
|
|
||||||
where
|
|
||||||
context =
|
|
||||||
let lns = take 1 $ drop (sourceLine p - 1) $ lines src
|
|
||||||
in case lns of
|
|
||||||
[x] -> "\n" ++ x ++ "\n"
|
|
||||||
++ replicate (sourceColumn p - 1) ' ' ++ "^\n"
|
|
||||||
_ -> ""
|
|
||||||
p = errorPos e
|
|
|
@ -155,7 +155,7 @@ data Token
|
||||||
| LineComment Text
|
| LineComment Text
|
||||||
-- | A block comment, \/* stuff *\/, includes the comment delimiters
|
-- | A block comment, \/* stuff *\/, includes the comment delimiters
|
||||||
| BlockComment Text
|
| BlockComment Text
|
||||||
deriving (Eq,Show)
|
deriving (Eq,Show,Ord)
|
||||||
|
|
||||||
------------------------------------------------------------------------------
|
------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -15,6 +15,9 @@ TODO: there should be more comments in this file, especially the bits
|
||||||
which have been changed to try to improve the layout of the output.
|
which have been changed to try to improve the layout of the output.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
import Prelude hiding (show)
|
||||||
|
import qualified Prelude as P
|
||||||
|
|
||||||
import Prettyprinter (Doc
|
import Prettyprinter (Doc
|
||||||
,parens
|
,parens
|
||||||
,nest
|
,nest
|
||||||
|
@ -31,24 +34,24 @@ import Prettyprinter (Doc
|
||||||
)
|
)
|
||||||
import qualified Prettyprinter as P
|
import qualified Prettyprinter as P
|
||||||
|
|
||||||
import Prettyprinter.Render.Text (renderLazy)
|
import Prettyprinter.Render.Text (renderStrict)
|
||||||
|
|
||||||
import Data.Maybe (maybeToList, catMaybes)
|
import Data.Maybe (maybeToList, catMaybes)
|
||||||
import Data.List (intercalate)
|
import Data.List (intercalate)
|
||||||
|
|
||||||
import qualified Data.Text.Lazy as L
|
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
|
import Data.Text (Text)
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.Syntax
|
import Language.SQL.SimpleSQL.Syntax
|
||||||
import Language.SQL.SimpleSQL.Dialect
|
import Language.SQL.SimpleSQL.Dialect
|
||||||
|
|
||||||
|
|
||||||
-- | Convert a query expr ast to concrete syntax.
|
-- | Convert a query expr ast to concrete syntax.
|
||||||
prettyQueryExpr :: Dialect -> QueryExpr -> String
|
prettyQueryExpr :: Dialect -> QueryExpr -> Text
|
||||||
prettyQueryExpr d = render . queryExpr d
|
prettyQueryExpr d = render . queryExpr d
|
||||||
|
|
||||||
-- | Convert a value expr ast to concrete syntax.
|
-- | Convert a value expr ast to concrete syntax.
|
||||||
prettyScalarExpr :: Dialect -> ScalarExpr -> String
|
prettyScalarExpr :: Dialect -> ScalarExpr -> Text
|
||||||
prettyScalarExpr d = render . scalarExpr d
|
prettyScalarExpr d = render . scalarExpr d
|
||||||
|
|
||||||
-- | A terminating semicolon.
|
-- | A terminating semicolon.
|
||||||
|
@ -56,20 +59,20 @@ terminator :: Doc a
|
||||||
terminator = pretty ";\n"
|
terminator = pretty ";\n"
|
||||||
|
|
||||||
-- | Convert a statement ast to concrete syntax.
|
-- | Convert a statement ast to concrete syntax.
|
||||||
prettyStatement :: Dialect -> Statement -> String
|
prettyStatement :: Dialect -> Statement -> Text
|
||||||
prettyStatement _ EmptyStatement = render terminator
|
prettyStatement _ EmptyStatement = render terminator
|
||||||
prettyStatement d s = render (statement d s)
|
prettyStatement d s = render (statement d s)
|
||||||
|
|
||||||
-- | Convert a list of statements to concrete syntax. A semicolon
|
-- | Convert a list of statements to concrete syntax. A semicolon
|
||||||
-- is inserted after each statement.
|
-- is inserted after each statement.
|
||||||
prettyStatements :: Dialect -> [Statement] -> String
|
prettyStatements :: Dialect -> [Statement] -> Text
|
||||||
prettyStatements d = render . vsep . map prettyStatementWithSemicolon
|
prettyStatements d = render . vsep . map prettyStatementWithSemicolon
|
||||||
where
|
where
|
||||||
prettyStatementWithSemicolon :: Statement -> Doc a
|
prettyStatementWithSemicolon :: Statement -> Doc a
|
||||||
prettyStatementWithSemicolon s = statement d s <> terminator
|
prettyStatementWithSemicolon s = statement d s <> terminator
|
||||||
|
|
||||||
render :: Doc a -> String -- L.Text
|
render :: Doc a -> Text
|
||||||
render = L.unpack . renderLazy . layoutPretty defaultLayoutOptions
|
render = renderStrict . layoutPretty defaultLayoutOptions
|
||||||
|
|
||||||
-- = scalar expressions
|
-- = scalar expressions
|
||||||
|
|
||||||
|
@ -88,7 +91,7 @@ scalarExpr _ (IntervalLit s v f t) =
|
||||||
scalarExpr _ (Iden i) = names i
|
scalarExpr _ (Iden i) = names i
|
||||||
scalarExpr _ Star = pretty "*"
|
scalarExpr _ Star = pretty "*"
|
||||||
scalarExpr _ Parameter = pretty "?"
|
scalarExpr _ Parameter = pretty "?"
|
||||||
scalarExpr _ (PositionalArg n) = pretty $ "$" ++ show n
|
scalarExpr _ (PositionalArg n) = pretty $ T.cons '$' $ show n
|
||||||
scalarExpr _ (HostParameter p i) =
|
scalarExpr _ (HostParameter p i) =
|
||||||
pretty p
|
pretty p
|
||||||
<+> me (\i' -> pretty "indicator" <+> pretty i') i
|
<+> me (\i' -> pretty "indicator" <+> pretty i') i
|
||||||
|
@ -140,7 +143,7 @@ scalarExpr dia (SpecialOp nm [a,b,c]) | nm `elem` [[Name Nothing "between"]
|
||||||
,[Name Nothing "not between"]] =
|
,[Name Nothing "not between"]] =
|
||||||
sep [scalarExpr dia a
|
sep [scalarExpr dia a
|
||||||
,names nm <+> scalarExpr dia b
|
,names nm <+> scalarExpr dia b
|
||||||
,nest (length (unnames nm) + 1) $ pretty "and" <+> scalarExpr dia c]
|
,nest (T.length (unnames nm) + 1) $ pretty "and" <+> scalarExpr dia c]
|
||||||
|
|
||||||
scalarExpr d (SpecialOp [Name Nothing "rowctor"] as) =
|
scalarExpr d (SpecialOp [Name Nothing "rowctor"] as) =
|
||||||
parens $ commaSep $ map (scalarExpr d) as
|
parens $ commaSep $ map (scalarExpr d) as
|
||||||
|
@ -164,7 +167,7 @@ scalarExpr d e@(BinOp _ op _) | op `elem` [[Name Nothing "and"]
|
||||||
: map ((names op <+>) . scalarExpr d) es)
|
: map ((names op <+>) . scalarExpr d) es)
|
||||||
[] -> mempty -- shouldn't be possible
|
[] -> mempty -- shouldn't be possible
|
||||||
where
|
where
|
||||||
ands (BinOp a op' b) | op == op' = ands a ++ ands b
|
ands (BinOp a op' b) | op == op' = ands a <> ands b
|
||||||
ands x = [x]
|
ands x = [x]
|
||||||
-- special case for . we don't use whitespace
|
-- special case for . we don't use whitespace
|
||||||
scalarExpr d (BinOp e0 [Name Nothing "."] e1) =
|
scalarExpr d (BinOp e0 [Name Nothing "."] e1) =
|
||||||
|
@ -174,9 +177,9 @@ scalarExpr d (BinOp e0 f e1) =
|
||||||
|
|
||||||
scalarExpr dia (Case t ws els) =
|
scalarExpr dia (Case t ws els) =
|
||||||
sep $ [pretty "case" <+> me (scalarExpr dia) t]
|
sep $ [pretty "case" <+> me (scalarExpr dia) t]
|
||||||
++ map w ws
|
<> map w ws
|
||||||
++ maybeToList (fmap e els)
|
<> maybeToList (fmap e els)
|
||||||
++ [pretty "end"]
|
<> [pretty "end"]
|
||||||
where
|
where
|
||||||
w (t0,t1) =
|
w (t0,t1) =
|
||||||
pretty "when" <+> nest 5 (commaSep $ map (scalarExpr dia) t0)
|
pretty "when" <+> nest 5 (commaSep $ map (scalarExpr dia) t0)
|
||||||
|
@ -261,7 +264,7 @@ scalarExpr _ (NextValueFor ns) =
|
||||||
pretty "next value for" <+> names ns
|
pretty "next value for" <+> names ns
|
||||||
|
|
||||||
scalarExpr d (VEComment cmt v) =
|
scalarExpr d (VEComment cmt v) =
|
||||||
vsep $ map comment cmt ++ [scalarExpr d v]
|
vsep $ map comment cmt <> [scalarExpr d v]
|
||||||
|
|
||||||
scalarExpr _ (OdbcLiteral t s) =
|
scalarExpr _ (OdbcLiteral t s) =
|
||||||
pretty "{" <> lt t <+> squotes (pretty s) <> pretty "}"
|
pretty "{" <> lt t <+> squotes (pretty s) <> pretty "}"
|
||||||
|
@ -278,13 +281,13 @@ scalarExpr d (Convert t e Nothing) =
|
||||||
scalarExpr d (Convert t e (Just i)) =
|
scalarExpr d (Convert t e (Just i)) =
|
||||||
pretty "convert(" <> typeName t <> pretty "," <+> scalarExpr d e <> pretty "," <+> pretty (show i) <> pretty ")"
|
pretty "convert(" <> typeName t <> pretty "," <+> scalarExpr d e <> pretty "," <+> pretty (show i) <> pretty ")"
|
||||||
|
|
||||||
unname :: Name -> String
|
unname :: Name -> Text
|
||||||
unname (Name Nothing n) = n
|
unname (Name Nothing n) = n
|
||||||
unname (Name (Just (s,e)) n) =
|
unname (Name (Just (s,e)) n) =
|
||||||
s ++ n ++ e
|
s <> n <> e
|
||||||
|
|
||||||
unnames :: [Name] -> String
|
unnames :: [Name] -> Text
|
||||||
unnames ns = intercalate "." $ map unname ns
|
unnames ns = T.intercalate "." $ map unname ns
|
||||||
|
|
||||||
|
|
||||||
name :: Name -> Doc a
|
name :: Name -> Doc a
|
||||||
|
@ -404,7 +407,7 @@ queryExpr d (Values vs) =
|
||||||
<+> nest 7 (commaSep (map (parens . commaSep . map (scalarExpr d)) vs))
|
<+> nest 7 (commaSep (map (parens . commaSep . map (scalarExpr d)) vs))
|
||||||
queryExpr _ (Table t) = pretty "table" <+> names t
|
queryExpr _ (Table t) = pretty "table" <+> names t
|
||||||
queryExpr d (QEComment cmt v) =
|
queryExpr d (QEComment cmt v) =
|
||||||
vsep $ map comment cmt ++ [queryExpr d v]
|
vsep $ map comment cmt <> [queryExpr d v]
|
||||||
|
|
||||||
|
|
||||||
alias :: Alias -> Doc a
|
alias :: Alias -> Doc a
|
||||||
|
@ -450,10 +453,10 @@ from d ts =
|
||||||
pretty "using" <+> parens (commaSep $ map name es)
|
pretty "using" <+> parens (commaSep $ map name es)
|
||||||
joinCond Nothing = mempty
|
joinCond Nothing = mempty
|
||||||
|
|
||||||
maybeScalarExpr :: Dialect -> String -> Maybe ScalarExpr -> Doc a
|
maybeScalarExpr :: Dialect -> Text -> Maybe ScalarExpr -> Doc a
|
||||||
maybeScalarExpr d k = me
|
maybeScalarExpr d k = me
|
||||||
(\e -> sep [pretty k
|
(\e -> sep [pretty k
|
||||||
,nest (length k + 1) $ scalarExpr d e])
|
,nest (T.length k + 1) $ scalarExpr d e])
|
||||||
|
|
||||||
grpBy :: Dialect -> [GroupingExpr] -> Doc a
|
grpBy :: Dialect -> [GroupingExpr] -> Doc a
|
||||||
grpBy _ [] = mempty
|
grpBy _ [] = mempty
|
||||||
|
@ -725,7 +728,7 @@ columnDef d (ColumnDef n t mdef cons) =
|
||||||
pcon ColNullableConstraint = texts ["null"]
|
pcon ColNullableConstraint = texts ["null"]
|
||||||
pcon ColUniqueConstraint = pretty "unique"
|
pcon ColUniqueConstraint = pretty "unique"
|
||||||
pcon (ColPrimaryKeyConstraint autoincrement) =
|
pcon (ColPrimaryKeyConstraint autoincrement) =
|
||||||
texts $ ["primary","key"] ++ ["autoincrement"|autoincrement]
|
texts $ ["primary","key"] <> ["autoincrement"|autoincrement]
|
||||||
--pcon ColPrimaryKeyConstraint = texts ["primary","key"]
|
--pcon ColPrimaryKeyConstraint = texts ["primary","key"]
|
||||||
pcon (ColCheckConstraint v) = pretty "check" <+> parens (scalarExpr d v)
|
pcon (ColCheckConstraint v) = pretty "check" <+> parens (scalarExpr d v)
|
||||||
pcon (ColReferencesConstraint tb c m u del) =
|
pcon (ColReferencesConstraint tb c m u del) =
|
||||||
|
@ -757,7 +760,7 @@ refMatch m = case m of
|
||||||
MatchPartial -> texts ["match","partial"]
|
MatchPartial -> texts ["match","partial"]
|
||||||
MatchSimple -> texts ["match", "simple"]
|
MatchSimple -> texts ["match", "simple"]
|
||||||
|
|
||||||
refAct :: String -> ReferentialAction -> Doc a
|
refAct :: Text -> ReferentialAction -> Doc a
|
||||||
refAct t a = case a of
|
refAct t a = case a of
|
||||||
DefaultReferentialAction -> mempty
|
DefaultReferentialAction -> mempty
|
||||||
RefCascade -> texts ["on", t, "cascade"]
|
RefCascade -> texts ["on", t, "cascade"]
|
||||||
|
@ -863,11 +866,14 @@ me = maybe mempty
|
||||||
comment :: Comment -> Doc a
|
comment :: Comment -> Doc a
|
||||||
comment (BlockComment str) = pretty "/*" <+> pretty str <+> pretty "*/"
|
comment (BlockComment str) = pretty "/*" <+> pretty str <+> pretty "*/"
|
||||||
|
|
||||||
texts :: [String] -> Doc a
|
texts :: [Text] -> Doc a
|
||||||
texts ts = sep $ map pretty ts
|
texts ts = sep $ map pretty ts
|
||||||
|
|
||||||
-- regular pretty completely defeats the type checker when you want
|
-- regular pretty completely defeats the type checker when you want
|
||||||
-- to change the ast and get type errors, instead it just produces
|
-- to change the ast and get type errors, instead it just produces
|
||||||
-- incorrect code.
|
-- incorrect code.
|
||||||
pretty :: String -> Doc a
|
pretty :: Text -> Doc a
|
||||||
pretty = P.pretty . T.pack
|
pretty = P.pretty
|
||||||
|
|
||||||
|
show :: Show a => a -> Text
|
||||||
|
show = T.pack . P.show
|
||||||
|
|
|
@ -62,6 +62,8 @@ module Language.SQL.SimpleSQL.Syntax
|
||||||
,Comment(..)
|
,Comment(..)
|
||||||
) where
|
) where
|
||||||
|
|
||||||
|
import Data.Text (Text)
|
||||||
|
|
||||||
import Data.Data
|
import Data.Data
|
||||||
|
|
||||||
-- | Represents a value expression. This is used for the expressions
|
-- | Represents a value expression. This is used for the expressions
|
||||||
|
@ -82,21 +84,21 @@ data ScalarExpr
|
||||||
-- * 1e5
|
-- * 1e5
|
||||||
--
|
--
|
||||||
-- * 12.34e-6
|
-- * 12.34e-6
|
||||||
NumLit String
|
NumLit Text
|
||||||
-- | string literal, with the start and end quote
|
-- | string literal, with the start and end quote
|
||||||
-- e.g. 'test' -> StringLit "'" "'" "test"
|
-- e.g. 'test' -> TextLit "'" "'" "test"
|
||||||
| StringLit String String String
|
| StringLit Text Text Text
|
||||||
-- | text of interval literal, units of interval precision,
|
-- | text of interval literal, units of interval precision,
|
||||||
-- e.g. interval 3 days (3)
|
-- e.g. interval 3 days (3)
|
||||||
| IntervalLit
|
| IntervalLit
|
||||||
{ilSign :: Maybe Sign -- ^ if + or - used
|
{ilSign :: Maybe Sign -- ^ if + or - used
|
||||||
,ilLiteral :: String -- ^ literal text
|
,ilLiteral :: Text -- ^ literal text
|
||||||
,ilFrom :: IntervalTypeField
|
,ilFrom :: IntervalTypeField
|
||||||
,ilTo :: Maybe IntervalTypeField
|
,ilTo :: Maybe IntervalTypeField
|
||||||
}
|
}
|
||||||
|
|
||||||
-- | prefix 'typed literal', e.g. int '42'
|
-- | prefix 'typed literal', e.g. int '42'
|
||||||
| TypedLit TypeName String
|
| TypedLit TypeName Text
|
||||||
|
|
||||||
-- | identifier with parts separated by dots
|
-- | identifier with parts separated by dots
|
||||||
| Iden [Name]
|
| Iden [Name]
|
||||||
|
@ -105,9 +107,9 @@ data ScalarExpr
|
||||||
|
|
||||||
| Parameter -- ^ Represents a ? in a parameterized query
|
| Parameter -- ^ Represents a ? in a parameterized query
|
||||||
| PositionalArg Int -- ^ Represents an e.g. $1 in a parameterized query
|
| PositionalArg Int -- ^ Represents an e.g. $1 in a parameterized query
|
||||||
| HostParameter String (Maybe String) -- ^ represents a host
|
| HostParameter Text (Maybe Text) -- ^ represents a host
|
||||||
-- parameter, e.g. :a. The
|
-- parameter, e.g. :a. The
|
||||||
-- Maybe String is for the
|
-- Maybe Text is for the
|
||||||
-- indicator, e.g. :var
|
-- indicator, e.g. :var
|
||||||
-- indicator :nl
|
-- indicator :nl
|
||||||
|
|
||||||
|
@ -163,7 +165,7 @@ data ScalarExpr
|
||||||
-- of commas. The maybe is for the first unnamed argument
|
-- of commas. The maybe is for the first unnamed argument
|
||||||
-- if it is present, and the list is for the keyword argument
|
-- if it is present, and the list is for the keyword argument
|
||||||
-- pairs.
|
-- pairs.
|
||||||
| SpecialOpK [Name] (Maybe ScalarExpr) [(String,ScalarExpr)]
|
| SpecialOpK [Name] (Maybe ScalarExpr) [(Text,ScalarExpr)]
|
||||||
|
|
||||||
-- | cast(a as typename)
|
-- | cast(a as typename)
|
||||||
| Cast ScalarExpr TypeName
|
| Cast ScalarExpr TypeName
|
||||||
|
@ -215,7 +217,7 @@ in other places
|
||||||
| MultisetQueryCtor QueryExpr
|
| MultisetQueryCtor QueryExpr
|
||||||
| NextValueFor [Name]
|
| NextValueFor [Name]
|
||||||
| VEComment [Comment] ScalarExpr
|
| VEComment [Comment] ScalarExpr
|
||||||
| OdbcLiteral OdbcLiteralType String
|
| OdbcLiteral OdbcLiteralType Text
|
||||||
-- ^ an odbc literal e.g. {d '2000-01-01'}
|
-- ^ an odbc literal e.g. {d '2000-01-01'}
|
||||||
| OdbcFunc ScalarExpr
|
| OdbcFunc ScalarExpr
|
||||||
-- ^ an odbc function call e.g. {fn CHARACTER_LENGTH('test')}
|
-- ^ an odbc function call e.g. {fn CHARACTER_LENGTH('test')}
|
||||||
|
@ -228,7 +230,7 @@ in other places
|
||||||
-- * "test" -> Name (Just "\"","\"") "test"
|
-- * "test" -> Name (Just "\"","\"") "test"
|
||||||
-- * `something` -> Name (Just ("`","`") "something"
|
-- * `something` -> Name (Just ("`","`") "something"
|
||||||
-- * [ms] -> Name (Just ("[","]") "ms"
|
-- * [ms] -> Name (Just ("[","]") "ms"
|
||||||
data Name = Name (Maybe (String,String)) String
|
data Name = Name (Maybe (Text,Text)) Text
|
||||||
deriving (Eq,Show,Read,Data,Typeable)
|
deriving (Eq,Show,Read,Data,Typeable)
|
||||||
|
|
||||||
-- | Represents a type name, used in casts.
|
-- | Represents a type name, used in casts.
|
||||||
|
@ -246,7 +248,7 @@ data TypeName
|
||||||
| MultisetTypeName TypeName
|
| MultisetTypeName TypeName
|
||||||
deriving (Eq,Show,Read,Data,Typeable)
|
deriving (Eq,Show,Read,Data,Typeable)
|
||||||
|
|
||||||
data IntervalTypeField = Itf String (Maybe (Integer, Maybe Integer))
|
data IntervalTypeField = Itf Text (Maybe (Integer, Maybe Integer))
|
||||||
deriving (Eq,Show,Read,Data,Typeable)
|
deriving (Eq,Show,Read,Data,Typeable)
|
||||||
|
|
||||||
data Sign = Plus | Minus
|
data Sign = Plus | Minus
|
||||||
|
@ -739,6 +741,6 @@ data PrivilegeAction =
|
||||||
|
|
||||||
-- | Comment. Useful when generating SQL code programmatically. The
|
-- | Comment. Useful when generating SQL code programmatically. The
|
||||||
-- parser doesn't produce these.
|
-- parser doesn't produce these.
|
||||||
newtype Comment = BlockComment String
|
newtype Comment = BlockComment Text
|
||||||
deriving (Eq,Show,Read,Data,Typeable)
|
deriving (Eq,Show,Read,Data,Typeable)
|
||||||
|
|
||||||
|
|
|
@ -31,11 +31,6 @@ Flag parserexe
|
||||||
Description: Build SimpleSqlParserTool exe
|
Description: Build SimpleSqlParserTool exe
|
||||||
Default: False
|
Default: False
|
||||||
|
|
||||||
Flag fixitytest
|
|
||||||
Description: Build fixity test exe
|
|
||||||
Default: False
|
|
||||||
|
|
||||||
|
|
||||||
common shared-properties
|
common shared-properties
|
||||||
default-language: Haskell2010
|
default-language: Haskell2010
|
||||||
build-depends: base >=4 && <5,
|
build-depends: base >=4 && <5,
|
||||||
|
@ -44,7 +39,8 @@ common shared-properties
|
||||||
parsec,
|
parsec,
|
||||||
mtl >=2.1 && <2.4,
|
mtl >=2.1 && <2.4,
|
||||||
prettyprinter >= 1.7 && < 1.8,
|
prettyprinter >= 1.7 && < 1.8,
|
||||||
text >= 2.1 && < 2.2
|
text >= 2.1 && < 2.2,
|
||||||
|
containers
|
||||||
|
|
||||||
ghc-options: -Wall
|
ghc-options: -Wall
|
||||||
|
|
||||||
|
@ -56,8 +52,6 @@ library
|
||||||
Language.SQL.SimpleSQL.Lex,
|
Language.SQL.SimpleSQL.Lex,
|
||||||
Language.SQL.SimpleSQL.Syntax,
|
Language.SQL.SimpleSQL.Syntax,
|
||||||
Language.SQL.SimpleSQL.Dialect
|
Language.SQL.SimpleSQL.Dialect
|
||||||
Other-Modules: Language.SQL.SimpleSQL.Errors,
|
|
||||||
Language.SQL.SimpleSQL.Combinators
|
|
||||||
|
|
||||||
Test-Suite Tests
|
Test-Suite Tests
|
||||||
import: shared-properties
|
import: shared-properties
|
||||||
|
@ -104,17 +98,3 @@ executable SimpleSqlParserTool
|
||||||
buildable: True
|
buildable: True
|
||||||
else
|
else
|
||||||
buildable: False
|
buildable: False
|
||||||
|
|
||||||
executable Fixity
|
|
||||||
import: shared-properties
|
|
||||||
main-is: Fixity.hs
|
|
||||||
hs-source-dirs: tools
|
|
||||||
Build-Depends: simple-sql-parser,
|
|
||||||
pretty-show >= 1.6 && < 1.10,
|
|
||||||
tasty >= 1.1 && < 1.6,
|
|
||||||
tasty-hunit >= 0.9 && < 0.11
|
|
||||||
if flag(fixitytest)
|
|
||||||
buildable: True
|
|
||||||
else
|
|
||||||
buildable: False
|
|
||||||
|
|
||||||
|
|
720
tools/Fixity.hs
720
tools/Fixity.hs
|
@ -1,720 +0,0 @@
|
||||||
|
|
||||||
{-
|
|
||||||
= Fixity fixups
|
|
||||||
|
|
||||||
The point of this code is to be able to take a table of fixity
|
|
||||||
information for unary and binary operators, then adjust an ast to
|
|
||||||
match these fixities. The standard way of handling this is handling
|
|
||||||
fixities at the parsing stage.
|
|
||||||
|
|
||||||
For the SQL parser, this is difficult because there is lots of weird
|
|
||||||
syntax for operators (such as prefix and postfix multiple keyword
|
|
||||||
operators, between, etc.).
|
|
||||||
|
|
||||||
An alterative idea which is used in some places is to parse the tree
|
|
||||||
regarding all the operators to have the same precedence and left
|
|
||||||
associativity, then correct the fixity in a pass over the ast after
|
|
||||||
parsing. Would also like to use this to fix the fixity for the join
|
|
||||||
trees, and set operations, after parsing them. TODO: anything else?
|
|
||||||
|
|
||||||
|
|
||||||
Approach
|
|
||||||
|
|
||||||
Really not sure how to get this correct. So: lots of testing
|
|
||||||
|
|
||||||
Basic testing idea: create an expression, then write down manually how
|
|
||||||
the expression should parse with correct fixity. Can write down the
|
|
||||||
expression in concrete syntax, and the correct fixity version using
|
|
||||||
parens.
|
|
||||||
|
|
||||||
Then can parse the expression, fix it, parse the fixed expression,
|
|
||||||
remove the parens and compare them to make sure they are equal.
|
|
||||||
|
|
||||||
Second layer of testing. For each source expression parsed, run it
|
|
||||||
through a generator which will generate every version of that tree by
|
|
||||||
choosing all possibilities of fixities on a token by token basis. This
|
|
||||||
will ensure the fixity fixer is robust. An alternative approach is to
|
|
||||||
guarantee the parser will produce trees where all the fixities are
|
|
||||||
known (e.g. unary operators always bind tighter than binary, binary
|
|
||||||
are all left associative, prefix unary bind tighter than postfix. This
|
|
||||||
way, the fix code can make some assumptions and have less code. We
|
|
||||||
will stick with the full general version which is more robust.
|
|
||||||
|
|
||||||
Another testing approach is to parse the tree with our non fixity
|
|
||||||
respecting parser then fix it, and also parse it with a fixity
|
|
||||||
respecting expression parser, and check the results are the same. This
|
|
||||||
is difficult with the parsec build expression parser which doesn't
|
|
||||||
handle nested unary operators, so have to find or write another build
|
|
||||||
expression parser. We can test the fixer with simple operators (single
|
|
||||||
symbol prefix, postfix and binary ops) and then use it on the complex
|
|
||||||
sql ast trees.
|
|
||||||
|
|
||||||
Can also try to generate trees ala quickcheck/smallcheck, then check
|
|
||||||
them with the fixer and the build expression parser.
|
|
||||||
|
|
||||||
generate a tree:
|
|
||||||
|
|
||||||
start with a term
|
|
||||||
then roll dice:
|
|
||||||
add a prefix
|
|
||||||
add a postfix
|
|
||||||
do nothing
|
|
||||||
then roll dice
|
|
||||||
add a binary op
|
|
||||||
for the second arg, recurse the algo
|
|
||||||
|
|
||||||
|
|
||||||
algorithm:
|
|
||||||
|
|
||||||
consider possible cases:
|
|
||||||
binop with two binops args
|
|
||||||
binop with prefix on left
|
|
||||||
binop with postfix on right
|
|
||||||
postfix with prefix inside
|
|
||||||
prefix with postfix inside
|
|
||||||
postfix with binop inside
|
|
||||||
prefix with binop inside
|
|
||||||
|
|
||||||
write a function to deal with each case and try to compose
|
|
||||||
|
|
||||||
Tasks:
|
|
||||||
|
|
||||||
write unary op tests: on each other, and with binary ops
|
|
||||||
figure out how to generate trees
|
|
||||||
do the step one tests (write the fixity with parens)
|
|
||||||
check out parsers expression parser
|
|
||||||
see if can generate trees using smallcheck
|
|
||||||
try to test these trees against expression parser
|
|
||||||
otherwise, generate tree, generate variations, check fixity always
|
|
||||||
produces same result
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
todo:
|
|
||||||
|
|
||||||
1. more tests for unary operators with each other
|
|
||||||
2. moving unary operators inside and outside binary operators:
|
|
||||||
have to think about how this will work in general case
|
|
||||||
3. ways to generate lots of tests and check them
|
|
||||||
-> what about creating a parser which parses to a list of all possible
|
|
||||||
parses with different fixities for each operator it sees?
|
|
||||||
4. ambiguous fixity cases - need position annotation to do these nicely
|
|
||||||
5. real sql: how to work with a variety of ast nodes
|
|
||||||
6. plug into simple-sql-parser
|
|
||||||
7. refactor the simple-sql-parser parsing code
|
|
||||||
8. simple-sql-parser todo for sqream: add other dml, dialects,
|
|
||||||
procedural?
|
|
||||||
9. testing idea: write big expressions with explicit parens everywhere
|
|
||||||
parse this
|
|
||||||
remove the parens
|
|
||||||
pretty print, then parse and fixfixity to see if same
|
|
||||||
then generate all variations of tree as if the fixities are different
|
|
||||||
and then fixfixity to check it restores the original
|
|
||||||
|
|
||||||
|
|
||||||
write fixity tests
|
|
||||||
write code to do the fixing
|
|
||||||
add error cases: put it in the either monad to report these
|
|
||||||
|
|
||||||
check the descend
|
|
||||||
then: move to real sql
|
|
||||||
different abstract representations of binops, etc.
|
|
||||||
what is the best way to deal with this? typeclass? conversion to and
|
|
||||||
from a generic tree?
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
can the binops be fixed on their own (precedence and assocativity)
|
|
||||||
and then the prefix and postfix ops in separate passes
|
|
||||||
|
|
||||||
what about a pass which puts the tree into canonical form:
|
|
||||||
all left associative, all unary ops tight as possible?
|
|
||||||
then the fixer can be easier?
|
|
||||||
-}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{-# LANGUAGE DeriveDataTypeable,TupleSections #-}
|
|
||||||
import Data.Data
|
|
||||||
|
|
||||||
import Text.Parsec.String (Parser)
|
|
||||||
import Text.Parsec (try)
|
|
||||||
import Text.Parsec.Char
|
|
||||||
import Text.Parsec.Combinator
|
|
||||||
import Text.Parsec (parse,ParseError)
|
|
||||||
import Control.Applicative ((<|>),many) -- ((<**>),(<$>),(<*), (*>),(<*>), (<$), (<|>), many)
|
|
||||||
--import qualified Text.Parsec.String.Expr as E
|
|
||||||
import Control.Monad
|
|
||||||
--import Data.List (intercalate)
|
|
||||||
import Data.Maybe ()
|
|
||||||
--import qualified Test.HUnit as H
|
|
||||||
--import FunctionsAndTypesForParsing
|
|
||||||
import Debug.Trace
|
|
||||||
import Text.Show.Pretty
|
|
||||||
import Data.List
|
|
||||||
import Control.Applicative
|
|
||||||
|
|
||||||
import qualified Test.Tasty as T
|
|
||||||
import qualified Test.Tasty.HUnit as H
|
|
||||||
|
|
||||||
|
|
||||||
data Expr = BinOp Expr String Expr
|
|
||||||
| PrefOp String Expr
|
|
||||||
| PostOp String Expr
|
|
||||||
| Iden String
|
|
||||||
| Lit String
|
|
||||||
| App String [Expr]
|
|
||||||
| Parens Expr
|
|
||||||
deriving (Eq,Show,Data,Typeable)
|
|
||||||
|
|
||||||
{-
|
|
||||||
--------
|
|
||||||
|
|
||||||
quick parser
|
|
||||||
-}
|
|
||||||
|
|
||||||
parensValue :: Parser Expr
|
|
||||||
parensValue = Parens <$> parens valueExpr
|
|
||||||
|
|
||||||
idenApp :: Parser Expr
|
|
||||||
idenApp = try $ do
|
|
||||||
i <- identifier
|
|
||||||
guard (i `notElem` ["not", "and", "or", "is"])
|
|
||||||
choice [do
|
|
||||||
args <- parens (commaSep valueExpr)
|
|
||||||
return $ App i args
|
|
||||||
,return $ Iden i
|
|
||||||
]
|
|
||||||
|
|
||||||
lit :: Parser Expr
|
|
||||||
lit = stringLit <|> numLit
|
|
||||||
where
|
|
||||||
stringLit = Lit <$> lexeme (char '\'' *> manyTill anyChar (char '\''))
|
|
||||||
numLit = do
|
|
||||||
x <- lexeme (many1 digit)
|
|
||||||
let y :: Integer
|
|
||||||
y = read x
|
|
||||||
return $ Lit $ show y
|
|
||||||
|
|
||||||
prefOp :: Parser Expr
|
|
||||||
prefOp = sym <|> kw
|
|
||||||
where
|
|
||||||
sym = do
|
|
||||||
let prefOps = ["+", "-"]
|
|
||||||
s <- choice $ map symbol prefOps
|
|
||||||
v <- term
|
|
||||||
return $ PrefOp s v
|
|
||||||
kw = do
|
|
||||||
let prefOps = ["not"]
|
|
||||||
i <- identifier
|
|
||||||
guard (i `elem` prefOps)
|
|
||||||
v <- term
|
|
||||||
return $ PrefOp i v
|
|
||||||
|
|
||||||
postOp :: Parser (Expr -> Expr)
|
|
||||||
postOp = try $ do
|
|
||||||
let kws = ["is null"]
|
|
||||||
kwsp = map (\a -> try $ do
|
|
||||||
let x :: [String]
|
|
||||||
x = words a
|
|
||||||
mapM_ keyword_ x
|
|
||||||
return $ PostOp a
|
|
||||||
) kws
|
|
||||||
choice kwsp
|
|
||||||
|
|
||||||
binOp :: Parser (Expr -> Expr -> Expr)
|
|
||||||
binOp = symbolBinOp <|> kwBinOp
|
|
||||||
where
|
|
||||||
symbolBinOp = do
|
|
||||||
let binOps = ["+", "-", "*", "/"]
|
|
||||||
s <- choice $ map symbol binOps
|
|
||||||
return $ \a b -> BinOp a s b
|
|
||||||
kwBinOp = do
|
|
||||||
let kwBinOps = ["and", "or"]
|
|
||||||
i <- identifier
|
|
||||||
guard (i `elem` kwBinOps)
|
|
||||||
return $ \a b -> BinOp a i b
|
|
||||||
|
|
||||||
term :: Parser Expr
|
|
||||||
term = (parensValue
|
|
||||||
<|> try prefOp
|
|
||||||
<|> idenApp
|
|
||||||
<|> lit)
|
|
||||||
<??*> postOp
|
|
||||||
|
|
||||||
-- (<??>) :: Parser a -> Parser (a -> a) -> Parser a
|
|
||||||
-- p <??> q = p <**> option id q
|
|
||||||
|
|
||||||
(<??*>) :: Parser a -> Parser (a -> a) -> Parser a
|
|
||||||
p <??*> q = foldr ($) <$> p <*> (reverse <$> many q)
|
|
||||||
|
|
||||||
valueExpr :: Parser Expr
|
|
||||||
valueExpr = chainl1 term binOp
|
|
||||||
|
|
||||||
|
|
||||||
parens :: Parser a -> Parser a
|
|
||||||
parens = between openParen closeParen
|
|
||||||
|
|
||||||
openParen :: Parser Char
|
|
||||||
openParen = lexeme $ char '('
|
|
||||||
closeParen :: Parser Char
|
|
||||||
closeParen = lexeme $ char ')'
|
|
||||||
|
|
||||||
symbol :: String -> Parser String
|
|
||||||
symbol s = try $ lexeme $ do
|
|
||||||
u <- many1 (oneOf "<>=+-^%/*!|")
|
|
||||||
guard (s == u)
|
|
||||||
return s
|
|
||||||
|
|
||||||
identifier :: Parser String
|
|
||||||
identifier = lexeme ((:) <$> firstChar <*> many nonFirstChar)
|
|
||||||
where
|
|
||||||
firstChar = letter <|> char '_'
|
|
||||||
nonFirstChar = digit <|> firstChar
|
|
||||||
|
|
||||||
keyword :: String -> Parser String
|
|
||||||
keyword k = try $ do
|
|
||||||
i <- identifier
|
|
||||||
guard (i == k)
|
|
||||||
return k
|
|
||||||
|
|
||||||
keyword_ :: String -> Parser ()
|
|
||||||
keyword_ = void . keyword
|
|
||||||
|
|
||||||
whitespace :: Parser ()
|
|
||||||
whitespace =
|
|
||||||
choice [simpleWhitespace *> whitespace
|
|
||||||
,lineComment *> whitespace
|
|
||||||
,blockComment *> whitespace
|
|
||||||
,return ()]
|
|
||||||
where
|
|
||||||
lineComment = try (string "--")
|
|
||||||
*> manyTill anyChar (void (char '\n') <|> eof)
|
|
||||||
blockComment = try (string "/*")
|
|
||||||
*> manyTill anyChar (try $ string "*/")
|
|
||||||
simpleWhitespace = void $ many1 (oneOf " \t\n")
|
|
||||||
lexeme :: Parser a -> Parser a
|
|
||||||
lexeme p = p <* whitespace
|
|
||||||
comma :: Parser Char
|
|
||||||
comma = lexeme $ char ','
|
|
||||||
|
|
||||||
commaSep :: Parser a -> Parser [a]
|
|
||||||
commaSep = (`sepBy` comma)
|
|
||||||
|
|
||||||
parseExpr :: String -> Either ParseError Expr
|
|
||||||
parseExpr = parse (whitespace *> valueExpr <* eof) ""
|
|
||||||
|
|
||||||
-- --------------
|
|
||||||
|
|
||||||
data Assoc = AssocLeft | AssocRight | AssocNone deriving (Eq,Show)
|
|
||||||
|
|
||||||
type Fixities = [(String, (Int, Assoc))]
|
|
||||||
|
|
||||||
fixFixity :: Fixities -> Expr -> Expr
|
|
||||||
fixFixity fixities = fixBinOpPrecedence . fixBinOpAssociativity . fixNestedPrefPostPrec
|
|
||||||
where
|
|
||||||
fixBinOpAssociativity e = case e of
|
|
||||||
BinOp a op b ->
|
|
||||||
let a' = fixBinOpAssociativity a
|
|
||||||
b' = fixBinOpAssociativity b
|
|
||||||
def = BinOp a' op b'
|
|
||||||
in case (a',b') of
|
|
||||||
-- both
|
|
||||||
-- a1 op1 a2 op b1 op2 b2
|
|
||||||
(BinOp a1 op1 a2
|
|
||||||
,BinOp b1 op2 b2)
|
|
||||||
| Just (_p,opa) <- lookupFixity op
|
|
||||||
, Just (_p,op1a) <- lookupFixity op1
|
|
||||||
, Just (_p,op2a) <- lookupFixity op2
|
|
||||||
-> case (opa, op1a, op2a) of
|
|
||||||
(AssocRight, AssocRight, AssocRight) ->
|
|
||||||
BinOp a1 op1 (BinOp a2 op (BinOp b1 op2 b2))
|
|
||||||
(AssocLeft, AssocLeft, AssocLeft) ->
|
|
||||||
BinOp (BinOp (BinOp a1 op1 a2) op b1) op2 b2
|
|
||||||
--todo: other cases
|
|
||||||
_ -> def
|
|
||||||
-- just left side
|
|
||||||
(BinOp a1 op1 a2, _)
|
|
||||||
-- a1 op1 a2 op b'
|
|
||||||
| Just (_p,opa) <- lookupFixity op
|
|
||||||
, Just (_p,op1a) <- lookupFixity op1
|
|
||||||
-> case (opa, op1a) of
|
|
||||||
(AssocRight, AssocRight) ->
|
|
||||||
BinOp a1 op1 (BinOp a2 op b')
|
|
||||||
(AssocLeft, AssocLeft) ->
|
|
||||||
BinOp (BinOp a1 op1 a2) op b'
|
|
||||||
_ -> def
|
|
||||||
|
|
||||||
-- just right side
|
|
||||||
(_, BinOp b1 op2 b2)
|
|
||||||
-- e op b1 op2 b2
|
|
||||||
| Just (_p,opa) <- lookupFixity op
|
|
||||||
, Just (_p,op2a) <- lookupFixity op2
|
|
||||||
-> case (opa, op2a) of
|
|
||||||
(AssocRight, AssocRight) ->
|
|
||||||
BinOp a' op (BinOp b1 op2 b2)
|
|
||||||
(AssocLeft, AssocLeft) ->
|
|
||||||
BinOp (BinOp a' op b1) op2 b2
|
|
||||||
_ -> def
|
|
||||||
_ -> def
|
|
||||||
_ -> e
|
|
||||||
|
|
||||||
fixBinOpPrecedence e = case e of
|
|
||||||
BinOp a op b ->
|
|
||||||
let a' = fixBinOpPrecedence a
|
|
||||||
b' = fixBinOpPrecedence b
|
|
||||||
def = BinOp a' op b'
|
|
||||||
in case (a',b') of
|
|
||||||
-- both
|
|
||||||
-- a1 op1 a2 op b1 op2 b2
|
|
||||||
-- all equal
|
|
||||||
-- p > or < p1 == p2
|
|
||||||
-- p == p1 < or > p2
|
|
||||||
(BinOp a1 op1 a2
|
|
||||||
,BinOp b1 op2 b2)
|
|
||||||
| Just (p,_opa) <- lookupFixity op
|
|
||||||
, Just (p1,_op1a) <- lookupFixity op1
|
|
||||||
, Just (p2,_op2a) <- lookupFixity op2
|
|
||||||
-> case () of
|
|
||||||
-- _ | trace ("both prec " ++ show (p,p1,p2)) False -> undefined
|
|
||||||
_ | p == p1 && p1 == p2 -> def
|
|
||||||
_ | p > p1 && p1 == p2 -> BinOp a1 op1 b'
|
|
||||||
_ | p < p1 && p1 == p2 -> BinOp (BinOp a1 op1 a2) op b'
|
|
||||||
_ | p == p1 && p2 > p1 -> BinOp a' op (BinOp b1 op2 b2)
|
|
||||||
_ | p == p1 && p2 < p1 -> def -- todo
|
|
||||||
_ | otherwise -> def
|
|
||||||
-- just left side
|
|
||||||
(BinOp a1 op1 a2, _)
|
|
||||||
-- a1 op1 a2 op b'
|
|
||||||
| Just (p,_opa) <- lookupFixity op
|
|
||||||
, Just (p1,_op1a) <- lookupFixity op1
|
|
||||||
-> case () of
|
|
||||||
-- _ | trace ("left prec " ++ show (p,p1)) False -> undefined
|
|
||||||
_ | p < p1 -> {-trace "b1" $ -}BinOp (BinOp a1 op1 a2) op b'
|
|
||||||
| p > p1 -> {-trace "b2" $ -}BinOp a1 op1 (BinOp a2 op b')
|
|
||||||
| otherwise -> def
|
|
||||||
|
|
||||||
-- just right side
|
|
||||||
(_, BinOp b1 op2 b2)
|
|
||||||
-- a' op b1 op2 b2
|
|
||||||
| Just (p,_opa) <- lookupFixity op
|
|
||||||
, Just (p2,_op1a) <- lookupFixity op2
|
|
||||||
-> case () of
|
|
||||||
-- _ | trace ("right prec " ++ show (p,p2)) False -> undefined
|
|
||||||
_ | p > p2 -> {-trace "b1" $ -}BinOp (BinOp a' op b1) op2 b2
|
|
||||||
| p < p2 -> {-trace "b2" $ -}BinOp a' op (BinOp b1 op2 b2)
|
|
||||||
| otherwise -> {-trace "def" $ -} def
|
|
||||||
_ -> def
|
|
||||||
_ -> e
|
|
||||||
|
|
||||||
fixNestedPrefPostPrec e = case e of
|
|
||||||
PrefOp op a ->
|
|
||||||
let a' = fixNestedPrefPostPrec a
|
|
||||||
in case a' of
|
|
||||||
PostOp op1 b | Just (p,_) <- lookupFixity op
|
|
||||||
, Just (p1,_) <- lookupFixity op1
|
|
||||||
, p > p1 -> PostOp op1 (PrefOp op b)
|
|
||||||
_ -> PrefOp op a'
|
|
||||||
PostOp op a ->
|
|
||||||
let a' = fixNestedPrefPostPrec a
|
|
||||||
in case a' of
|
|
||||||
PrefOp op1 b | Just (p,_) <- lookupFixity op
|
|
||||||
, Just (p1,_) <- lookupFixity op1
|
|
||||||
, p > p1 -> PrefOp op1 (PostOp op b)
|
|
||||||
_ -> PostOp op a'
|
|
||||||
_ -> e
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
lookupFixity :: String -> Maybe (Int,Assoc)
|
|
||||||
lookupFixity s = maybe (trace ("didn't find " ++ s ++ "\n" ++ ppShow fixities) Nothing)
|
|
||||||
Just $ lookup s fixities
|
|
||||||
|
|
||||||
|
|
||||||
sqlFixity :: [(String, (Int, Assoc))]
|
|
||||||
sqlFixity = [(".", (13, AssocLeft))
|
|
||||||
,("[]", (12, AssocNone))
|
|
||||||
|
|
||||||
{-
|
|
||||||
unary + -
|
|
||||||
todo: split the fixity table into prefix, binary and postfix
|
|
||||||
|
|
||||||
todo: don't have explicit precedence numbers in the table??
|
|
||||||
-}
|
|
||||||
|
|
||||||
,("^", (10, AssocNone))]
|
|
||||||
++ m ["*", "/", "%"] (9, AssocLeft)
|
|
||||||
++ m ["+","-"] (8, AssocLeft)
|
|
||||||
++ m ["<", ">", "=", "<=", ">=", "<>"] (4, AssocNone)
|
|
||||||
++ [("is null", (3, AssocNone))
|
|
||||||
,("not", (2, AssocRight))
|
|
||||||
,("and", (1, AssocLeft))
|
|
||||||
,("or", (0, AssocLeft))]
|
|
||||||
|
|
||||||
where
|
|
||||||
m l a = map (,a) l
|
|
||||||
|
|
||||||
{-
|
|
||||||
-------
|
|
||||||
|
|
||||||
some simple parser tests
|
|
||||||
-}
|
|
||||||
|
|
||||||
data Test = Group String [Test]
|
|
||||||
| ParserTest String Expr
|
|
||||||
| FixityTest Fixities Expr Expr
|
|
||||||
|
|
||||||
parserTests :: Test
|
|
||||||
parserTests = Group "parserTests" $ map (uncurry ParserTest) $
|
|
||||||
[("a", Iden "a")
|
|
||||||
,("'test'", Lit "test")
|
|
||||||
,("34", Lit "34")
|
|
||||||
,("f()", App "f" [])
|
|
||||||
,("f(3)", App "f" [Lit "3"])
|
|
||||||
,("(7)", Parens (Lit "7"))
|
|
||||||
,("a + 3", BinOp (Iden "a") "+" (Lit "3"))
|
|
||||||
,("1 + 2 + 3", BinOp (BinOp (Lit "1") "+" (Lit "2")) "+" (Lit "3"))
|
|
||||||
|
|
||||||
,("a or b", BinOp (Iden "a") "or" (Iden "b"))
|
|
||||||
,("-1", PrefOp "-" (Lit "1"))
|
|
||||||
,("not a", PrefOp "not" (Iden "a"))
|
|
||||||
,("not not a", PrefOp "not" (PrefOp "not" (Iden "a")))
|
|
||||||
,("a is null", PostOp "is null" (Iden "a"))
|
|
||||||
,("a is null is null", PostOp "is null" (PostOp "is null" (Iden "a")))
|
|
||||||
,("-a+3", BinOp (PrefOp "-" (Iden "a")) "+" (Lit "3"))
|
|
||||||
,("a is null and b is null", BinOp (PostOp "is null" (Iden "a"))
|
|
||||||
"and"
|
|
||||||
(PostOp "is null" (Iden "b")))
|
|
||||||
]
|
|
||||||
|
|
||||||
makeParserTest :: String -> Expr -> T.TestTree
|
|
||||||
makeParserTest s e = H.testCase s $ do
|
|
||||||
let a = parseExpr s
|
|
||||||
if (Right e == a)
|
|
||||||
then putStrLn $ s ++ " OK"
|
|
||||||
else putStrLn $ "bad parse " ++ s ++ " " ++ show a
|
|
||||||
|
|
||||||
{-
|
|
||||||
------
|
|
||||||
|
|
||||||
fixity checks
|
|
||||||
|
|
||||||
test cases:
|
|
||||||
-}
|
|
||||||
|
|
||||||
|
|
||||||
fixityTests :: Test
|
|
||||||
fixityTests = Group "fixityTests" $
|
|
||||||
map (\(f,s,e) -> FixityTest f s e) $
|
|
||||||
[
|
|
||||||
|
|
||||||
-- 2 bin ops wrong associativity left + null versions
|
|
||||||
|
|
||||||
(sqlFixity
|
|
||||||
,i "a" `plus` (i "b" `plus` i "c")
|
|
||||||
,(i "a" `plus` i "b") `plus` i "c")
|
|
||||||
,(sqlFixity
|
|
||||||
,(i "a" `plus` i "b") `plus` i "c"
|
|
||||||
,(i "a" `plus` i "b") `plus` i "c")
|
|
||||||
|
|
||||||
-- 2 bin ops wrong associativity right
|
|
||||||
|
|
||||||
,(timesRight
|
|
||||||
,i "a" `times` (i "b" `times` i "c")
|
|
||||||
,i "a" `times` (i "b" `times` i "c"))
|
|
||||||
,(timesRight
|
|
||||||
,(i "a" `times` i "b") `times` i "c"
|
|
||||||
,i "a" `times` (i "b" `times` i "c"))
|
|
||||||
|
|
||||||
|
|
||||||
-- 2 bin ops wrong precedence left
|
|
||||||
|
|
||||||
,(sqlFixity
|
|
||||||
,i "a" `plus` (i "b" `times` i "c")
|
|
||||||
,i "a" `plus` (i "b" `times` i "c"))
|
|
||||||
|
|
||||||
,(sqlFixity
|
|
||||||
,(i "a" `plus` i "b") `times` i "c"
|
|
||||||
,i "a" `plus` (i "b" `times` i "c"))
|
|
||||||
|
|
||||||
-- 2 bin ops wrong precedence right
|
|
||||||
|
|
||||||
,(sqlFixity
|
|
||||||
,(i "a" `times` i "b") `plus` i "c"
|
|
||||||
,(i "a" `times` i "b") `plus` i "c")
|
|
||||||
|
|
||||||
,(sqlFixity
|
|
||||||
,i "a" `times` (i "b" `plus` i "c")
|
|
||||||
,(i "a" `times` i "b") `plus` i "c")
|
|
||||||
|
|
||||||
{-
|
|
||||||
a + b * c + d
|
|
||||||
a * b + c * d
|
|
||||||
|
|
||||||
check all variations
|
|
||||||
-}
|
|
||||||
|
|
||||||
] ++
|
|
||||||
(let t = (i "a" `plus` i "b")
|
|
||||||
`times`
|
|
||||||
(i "c" `plus` i "d")
|
|
||||||
trs = generateTrees $ splitTree t
|
|
||||||
in [(sqlFixity, x
|
|
||||||
,i "a" `plus` (i "b" `times` i "c")
|
|
||||||
`plus` i "d")
|
|
||||||
| x <- trs])
|
|
||||||
++
|
|
||||||
(let t = (i "a" `times` i "b")
|
|
||||||
`plus`
|
|
||||||
(i "c" `times` i "d")
|
|
||||||
trs = generateTrees $ splitTree t
|
|
||||||
in [(sqlFixity, x
|
|
||||||
,(i "a" `times` i "b")
|
|
||||||
`plus`
|
|
||||||
(i "c" `times` i "d"))
|
|
||||||
| x <- trs])
|
|
||||||
|
|
||||||
|
|
||||||
++ [
|
|
||||||
|
|
||||||
-- prefix then postfix wrong precedence
|
|
||||||
|
|
||||||
([("+", (9, AssocNone))
|
|
||||||
,("is null", (3, AssocNone))]
|
|
||||||
,PrefOp "+" (PostOp "is null" (i "a"))
|
|
||||||
,PostOp "is null" (PrefOp "+" (i "a")))
|
|
||||||
|
|
||||||
,([("+", (9, AssocNone))
|
|
||||||
,("is null", (3, AssocNone))]
|
|
||||||
,PostOp "is null" (PrefOp "+" (i "a"))
|
|
||||||
,PostOp "is null" (PrefOp "+" (i "a")))
|
|
||||||
|
|
||||||
,([("+", (3, AssocNone))
|
|
||||||
,("is null", (9, AssocNone))]
|
|
||||||
,PrefOp "+" (PostOp "is null" (i "a"))
|
|
||||||
,PrefOp "+" (PostOp "is null" (i "a")))
|
|
||||||
|
|
||||||
,([("+", (3, AssocNone))
|
|
||||||
,("is null", (9, AssocNone))]
|
|
||||||
,PostOp "is null" (PrefOp "+" (i "a"))
|
|
||||||
,PrefOp "+" (PostOp "is null" (i "a")))
|
|
||||||
|
|
||||||
{-
|
|
||||||
3-way unary operator movement:
|
|
||||||
take a starting point and generate variations
|
|
||||||
|
|
||||||
postfix on first arg of binop (cannot move) make sure precedence wants
|
|
||||||
it to move
|
|
||||||
|
|
||||||
prefix on second arg of binop (cannot move)
|
|
||||||
|
|
||||||
prefix on binop, precedence wrong
|
|
||||||
postfix on binop precedence wrong
|
|
||||||
prefix on first arg of binop, precedence wrong
|
|
||||||
postfix on second arg of binop, precedence wrong
|
|
||||||
|
|
||||||
ambiguous fixity tests
|
|
||||||
|
|
||||||
sanity check: parens stops rearrangement
|
|
||||||
|
|
||||||
check nesting 1 + f(expr)
|
|
||||||
-}
|
|
||||||
|
|
||||||
]
|
|
||||||
where
|
|
||||||
plus a b = BinOp a "+" b
|
|
||||||
times a b = BinOp a "*" b
|
|
||||||
i a = Iden a
|
|
||||||
timesRight = [("*", (9, AssocRight))]
|
|
||||||
|
|
||||||
-- testCase
|
|
||||||
|
|
||||||
makeFixityTest :: Fixities -> Expr -> Expr -> T.TestTree
|
|
||||||
makeFixityTest fs s e = H.testCase (show s) $ do
|
|
||||||
let s' = fixFixity fs s
|
|
||||||
H.assertEqual "" s' e
|
|
||||||
{-if (s' == e)
|
|
||||||
then putStrLn $ show s ++ " OK"
|
|
||||||
else putStrLn $ "ERROR\nstart: " ++ show s ++ "\nfixed: " ++ show s' ++ "\nshould be: " ++ show e-}
|
|
||||||
|
|
||||||
tests :: Test
|
|
||||||
tests = Group "Tests" [parserTests, fixityTests]
|
|
||||||
|
|
||||||
makeTest :: Test -> T.TestTree
|
|
||||||
makeTest (Group n ts) = T.testGroup n $ map makeTest ts
|
|
||||||
makeTest (ParserTest s e) = makeParserTest s e
|
|
||||||
makeTest (FixityTest f s e) = makeFixityTest f s e
|
|
||||||
|
|
||||||
{-
|
|
||||||
--------
|
|
||||||
|
|
||||||
> tests :: T.TestTree
|
|
||||||
> tests = T.testGroup "Tests" $ map makeFixityTest fixityTests
|
|
||||||
-}
|
|
||||||
|
|
||||||
main :: IO ()
|
|
||||||
main = T.defaultMain $ makeTest tests
|
|
||||||
{-do
|
|
||||||
mapM_ checkTest tests
|
|
||||||
mapM_ checkFixity fixityTests
|
|
||||||
let plus a b = BinOp a "+" b
|
|
||||||
times a b = BinOp a "*" b
|
|
||||||
i a = Iden a
|
|
||||||
let t = (i "a" `plus` i "b")
|
|
||||||
`times`
|
|
||||||
(i "c" `plus` i "d")
|
|
||||||
spl = splitTree t
|
|
||||||
trs = generateTrees spl
|
|
||||||
--putStrLn $ "\nSplit\n"
|
|
||||||
--putStrLn $ ppShow (fst spl, length $ snd spl)
|
|
||||||
--putStrLn $ show $ length trs
|
|
||||||
--putStrLn $ "\nTrees\n"
|
|
||||||
--putStrLn $ intercalate "\n" $ map show trs
|
|
||||||
return ()-}
|
|
||||||
|
|
||||||
{-
|
|
||||||
generating trees
|
|
||||||
|
|
||||||
1. tree -> list
|
|
||||||
val op val op val op ...
|
|
||||||
(has to be two lists?
|
|
||||||
|
|
||||||
generate variations:
|
|
||||||
pick numbers from 0 to n - 1 (n is the number of ops)
|
|
||||||
choose the op at this position to be the root
|
|
||||||
recurse on the two sides
|
|
||||||
-}
|
|
||||||
|
|
||||||
splitTree :: Expr -> ([Expr], [Expr->Expr->Expr])
|
|
||||||
splitTree (BinOp a op b) = let (x,y) = splitTree a
|
|
||||||
(z,w) = splitTree b
|
|
||||||
in (x++z, y++ [\a b -> BinOp a op b] ++ w)
|
|
||||||
splitTree x = ([x],[])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
generateTrees :: ([Expr], [Expr->Expr->Expr]) -> [Expr]
|
|
||||||
generateTrees (es,ops) | length es /= length ops + 1 =
|
|
||||||
error $ "mismatch in lengths " ++ show (length es, length ops)
|
|
||||||
++"\n" ++ ppShow es ++ "\n"
|
|
||||||
generateTrees ([a,b], [op]) = [op a b]
|
|
||||||
generateTrees ([a], []) = [a]
|
|
||||||
generateTrees (vs, ops) =
|
|
||||||
let n = length ops
|
|
||||||
in --trace ("generating " ++ show (length vs, n) ++ "trees\n") $
|
|
||||||
concat $ flip map [0..n-1] $ \m ->
|
|
||||||
let (v1,v2) = splitAt (m + 1) vs
|
|
||||||
(ops1,op':ops2) = splitAt m ops
|
|
||||||
r = [op' t u | t <- generateTrees (v1,ops1)
|
|
||||||
, u <- generateTrees (v2,ops2)]
|
|
||||||
in -- trace ("generated " ++ show (length r) ++ " trees")
|
|
||||||
r
|
|
||||||
generateTrees ([],[]) = []
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.CreateIndex where
|
module Language.SQL.SimpleSQL.CreateIndex where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.Syntax
|
import Language.SQL.SimpleSQL.Syntax
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.CustomDialect (customDialectTests) where
|
module Language.SQL.SimpleSQL.CustomDialect (customDialectTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.EmptyStatement where
|
module Language.SQL.SimpleSQL.EmptyStatement where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.Syntax
|
import Language.SQL.SimpleSQL.Syntax
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
-- Some tests for parsing full queries.
|
-- Some tests for parsing full queries.
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.FullQueries (fullQueriesTests) where
|
module Language.SQL.SimpleSQL.FullQueries (fullQueriesTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
-- Here are the tests for the group by component of query exprs
|
-- Here are the tests for the group by component of query exprs
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.GroupBy (groupByTests) where
|
module Language.SQL.SimpleSQL.GroupBy (groupByTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
-- Tests for mysql dialect parsing
|
-- Tests for mysql dialect parsing
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.MySQL (mySQLTests) where
|
module Language.SQL.SimpleSQL.MySQL (mySQLTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.Odbc (odbcTests) where
|
module Language.SQL.SimpleSQL.Odbc (odbcTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
-- Tests for oracle dialect parsing
|
-- Tests for oracle dialect parsing
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.Oracle (oracleTests) where
|
module Language.SQL.SimpleSQL.Oracle (oracleTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -5,6 +5,7 @@ all of the postgres specific syntax has been skipped, this can be
|
||||||
revisited when the dialect support is added.
|
revisited when the dialect support is added.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.Postgres (postgresTests) where
|
module Language.SQL.SimpleSQL.Postgres (postgresTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -7,6 +7,7 @@ table refs which are in a separate file.
|
||||||
These are a few misc tests which don't fit anywhere else.
|
These are a few misc tests which don't fit anywhere else.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.QueryExprComponents (queryExprComponentTests) where
|
module Language.SQL.SimpleSQL.QueryExprComponents (queryExprComponentTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -4,6 +4,7 @@ These are the tests for the queryExprs parsing which parses multiple
|
||||||
query expressions from one string.
|
query expressions from one string.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.QueryExprs (queryExprsTests) where
|
module Language.SQL.SimpleSQL.QueryExprs (queryExprsTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -6,6 +6,7 @@ grant, etc
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.SQL2011AccessControl (sql2011AccessControlTests) where
|
module Language.SQL.SimpleSQL.SQL2011AccessControl (sql2011AccessControlTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -7,6 +7,7 @@ commit, savepoint, etc.), and session management (set).
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.SQL2011Bits (sql2011BitsTests) where
|
module Language.SQL.SimpleSQL.SQL2011Bits (sql2011BitsTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
-- Section 14 in Foundation
|
-- Section 14 in Foundation
|
||||||
|
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.SQL2011DataManipulation (sql2011DataManipulationTests) where
|
module Language.SQL.SimpleSQL.SQL2011DataManipulation (sql2011DataManipulationTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -31,10 +31,14 @@ some areas getting more comprehensive coverage tests, and also to note
|
||||||
which parts aren't currently supported.
|
which parts aren't currently supported.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.SQL2011Queries (sql2011QueryTests) where
|
module Language.SQL.SimpleSQL.SQL2011Queries (sql2011QueryTests) where
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
import Language.SQL.SimpleSQL.Syntax
|
import Language.SQL.SimpleSQL.Syntax
|
||||||
|
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Data.Text (Text)
|
||||||
|
|
||||||
sql2011QueryTests :: TestItem
|
sql2011QueryTests :: TestItem
|
||||||
sql2011QueryTests = Group "sql 2011 query tests"
|
sql2011QueryTests = Group "sql 2011 query tests"
|
||||||
[literals
|
[literals
|
||||||
|
@ -1050,14 +1054,15 @@ new multipliers
|
||||||
create a list of type name variations:
|
create a list of type name variations:
|
||||||
-}
|
-}
|
||||||
|
|
||||||
typeNames :: ([(String,TypeName)],[(String,TypeName)])
|
typeNames :: ([(Text,TypeName)],[(Text,TypeName)])
|
||||||
typeNames =
|
typeNames =
|
||||||
(basicTypes, concatMap makeArray basicTypes
|
(basicTypes, concatMap makeArray basicTypes
|
||||||
++ map makeMultiset basicTypes)
|
<> map makeMultiset basicTypes)
|
||||||
where
|
where
|
||||||
makeArray (s,t) = [(s ++ " array", ArrayTypeName t Nothing)
|
makeArray (s,t) = [(s <> " array", ArrayTypeName t Nothing)
|
||||||
,(s ++ " array[5]", ArrayTypeName t (Just 5))]
|
,(s <> " array[5]", ArrayTypeName t (Just 5))]
|
||||||
makeMultiset (s,t) = (s ++ " multiset", MultisetTypeName t)
|
makeMultiset (s,t) = (s <> " multiset", MultisetTypeName t)
|
||||||
|
basicTypes :: [(Text, TypeName)]
|
||||||
basicTypes =
|
basicTypes =
|
||||||
-- example of every standard type name
|
-- example of every standard type name
|
||||||
map (\t -> (t,TypeName [Name Nothing t]))
|
map (\t -> (t,TypeName [Name Nothing t]))
|
||||||
|
@ -1102,7 +1107,7 @@ typeNames =
|
||||||
-- array -- not allowed on own
|
-- array -- not allowed on own
|
||||||
-- multiset -- not allowed on own
|
-- multiset -- not allowed on own
|
||||||
|
|
||||||
++
|
<>
|
||||||
[-- 1 single prec + 1 with multiname
|
[-- 1 single prec + 1 with multiname
|
||||||
("char(5)", PrecTypeName [Name Nothing "char"] 5)
|
("char(5)", PrecTypeName [Name Nothing "char"] 5)
|
||||||
,("char varying(5)", PrecTypeName [Name Nothing "char varying"] 5)
|
,("char varying(5)", PrecTypeName [Name Nothing "char varying"] 5)
|
||||||
|
@ -1224,12 +1229,12 @@ typeNameTests = Group "type names"
|
||||||
$ concatMap makeTests $ snd typeNames]
|
$ concatMap makeTests $ snd typeNames]
|
||||||
where
|
where
|
||||||
makeSimpleTests (ctn, stn) =
|
makeSimpleTests (ctn, stn) =
|
||||||
[(ctn ++ " 'test'", TypedLit stn "test")
|
[(ctn <> " 'test'", TypedLit stn "test")
|
||||||
]
|
]
|
||||||
makeCastTests (ctn, stn) =
|
makeCastTests (ctn, stn) =
|
||||||
[("cast('test' as " ++ ctn ++ ")", Cast (StringLit "'" "'" "test") stn)
|
[("cast('test' as " <> ctn <> ")", Cast (StringLit "'" "'" "test") stn)
|
||||||
]
|
]
|
||||||
makeTests a = makeSimpleTests a ++ makeCastTests a
|
makeTests a = makeSimpleTests a <> makeCastTests a
|
||||||
|
|
||||||
|
|
||||||
{-
|
{-
|
||||||
|
@ -3590,7 +3595,7 @@ comparisonPredicates :: TestItem
|
||||||
comparisonPredicates = Group "comparison predicates"
|
comparisonPredicates = Group "comparison predicates"
|
||||||
$ map (uncurry (TestScalarExpr ansi2011))
|
$ map (uncurry (TestScalarExpr ansi2011))
|
||||||
$ map mkOp ["=", "<>", "<", ">", "<=", ">="]
|
$ map mkOp ["=", "<>", "<", ">", "<=", ">="]
|
||||||
++ [("ROW(a) = ROW(b)"
|
<> [("ROW(a) = ROW(b)"
|
||||||
,BinOp (App [Name Nothing "ROW"] [a])
|
,BinOp (App [Name Nothing "ROW"] [a])
|
||||||
[Name Nothing "="]
|
[Name Nothing "="]
|
||||||
(App [Name Nothing "ROW"] [b]))
|
(App [Name Nothing "ROW"] [b]))
|
||||||
|
@ -3600,7 +3605,7 @@ comparisonPredicates = Group "comparison predicates"
|
||||||
(SpecialOp [Name Nothing "rowctor"] [Iden [Name Nothing "c"], Iden [Name Nothing "d"]]))
|
(SpecialOp [Name Nothing "rowctor"] [Iden [Name Nothing "c"], Iden [Name Nothing "d"]]))
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
mkOp nm = ("a " ++ nm ++ " b"
|
mkOp nm = ("a " <> nm <> " b"
|
||||||
,BinOp a [Name Nothing nm] b)
|
,BinOp a [Name Nothing nm] b)
|
||||||
a = Iden [Name Nothing "a"]
|
a = Iden [Name Nothing "a"]
|
||||||
b = Iden [Name Nothing "b"]
|
b = Iden [Name Nothing "b"]
|
||||||
|
@ -3911,7 +3916,7 @@ matchPredicate = Group "match predicate"
|
||||||
{qeSelectList = [(Iden [Name Nothing "a"],Nothing)]
|
{qeSelectList = [(Iden [Name Nothing "a"],Nothing)]
|
||||||
,qeFrom = [TRSimple [Name Nothing "t"]]}
|
,qeFrom = [TRSimple [Name Nothing "t"]]}
|
||||||
qea = qe {qeSelectList = qeSelectList qe
|
qea = qe {qeSelectList = qeSelectList qe
|
||||||
++ [(Iden [Name Nothing "b"],Nothing)]}
|
<> [(Iden [Name Nothing "b"],Nothing)]}
|
||||||
|
|
||||||
{-
|
{-
|
||||||
TODO: simple, partial and full
|
TODO: simple, partial and full
|
||||||
|
@ -4397,7 +4402,7 @@ aggregateFunction = Group "aggregate function"
|
||||||
,AggregateApp [Name Nothing "count"]
|
,AggregateApp [Name Nothing "count"]
|
||||||
All
|
All
|
||||||
[Iden [Name Nothing "a"]] [] fil)
|
[Iden [Name Nothing "a"]] [] fil)
|
||||||
] ++ concatMap mkSimpleAgg
|
] <> concatMap mkSimpleAgg
|
||||||
["avg","max","min","sum"
|
["avg","max","min","sum"
|
||||||
,"every", "any", "some"
|
,"every", "any", "some"
|
||||||
,"stddev_pop","stddev_samp","var_samp","var_pop"
|
,"stddev_pop","stddev_samp","var_samp","var_pop"
|
||||||
|
@ -4405,7 +4410,7 @@ aggregateFunction = Group "aggregate function"
|
||||||
|
|
||||||
-- bsf
|
-- bsf
|
||||||
|
|
||||||
++ concatMap mkBsf
|
<> concatMap mkBsf
|
||||||
["COVAR_POP","COVAR_SAMP","CORR","REGR_SLOPE"
|
["COVAR_POP","COVAR_SAMP","CORR","REGR_SLOPE"
|
||||||
,"REGR_INTERCEPT","REGR_COUNT","REGR_R2"
|
,"REGR_INTERCEPT","REGR_COUNT","REGR_R2"
|
||||||
,"REGR_AVGX","REGR_AVGY"
|
,"REGR_AVGX","REGR_AVGY"
|
||||||
|
@ -4413,15 +4418,15 @@ aggregateFunction = Group "aggregate function"
|
||||||
|
|
||||||
-- osf
|
-- osf
|
||||||
|
|
||||||
++
|
<>
|
||||||
[("rank(a,c) within group (order by b)"
|
[("rank(a,c) within group (order by b)"
|
||||||
,AggregateAppGroup [Name Nothing "rank"]
|
,AggregateAppGroup [Name Nothing "rank"]
|
||||||
[Iden [Name Nothing "a"], Iden [Name Nothing "c"]]
|
[Iden [Name Nothing "a"], Iden [Name Nothing "c"]]
|
||||||
ob)]
|
ob)]
|
||||||
++ map mkGp ["dense_rank","percent_rank"
|
<> map mkGp ["dense_rank","percent_rank"
|
||||||
,"cume_dist", "percentile_cont"
|
,"cume_dist", "percentile_cont"
|
||||||
,"percentile_disc"]
|
,"percentile_disc"]
|
||||||
++ [("array_agg(a)", App [Name Nothing "array_agg"] [Iden [Name Nothing "a"]])
|
<> [("array_agg(a)", App [Name Nothing "array_agg"] [Iden [Name Nothing "a"]])
|
||||||
,("array_agg(a order by z)"
|
,("array_agg(a order by z)"
|
||||||
,AggregateApp [Name Nothing "array_agg"]
|
,AggregateApp [Name Nothing "array_agg"]
|
||||||
SQDefault
|
SQDefault
|
||||||
|
@ -4433,20 +4438,20 @@ aggregateFunction = Group "aggregate function"
|
||||||
where
|
where
|
||||||
fil = Just $ BinOp (Iden [Name Nothing "something"]) [Name Nothing ">"] (NumLit "5")
|
fil = Just $ BinOp (Iden [Name Nothing "something"]) [Name Nothing ">"] (NumLit "5")
|
||||||
ob = [SortSpec (Iden [Name Nothing "b"]) DirDefault NullsOrderDefault]
|
ob = [SortSpec (Iden [Name Nothing "b"]) DirDefault NullsOrderDefault]
|
||||||
mkGp nm = (nm ++ "(a) within group (order by b)"
|
mkGp nm = (nm <> "(a) within group (order by b)"
|
||||||
,AggregateAppGroup [Name Nothing nm]
|
,AggregateAppGroup [Name Nothing nm]
|
||||||
[Iden [Name Nothing "a"]]
|
[Iden [Name Nothing "a"]]
|
||||||
ob)
|
ob)
|
||||||
|
|
||||||
mkSimpleAgg nm =
|
mkSimpleAgg nm =
|
||||||
[(nm ++ "(a)",App [Name Nothing nm] [Iden [Name Nothing "a"]])
|
[(nm <> "(a)",App [Name Nothing nm] [Iden [Name Nothing "a"]])
|
||||||
,(nm ++ "(distinct a)"
|
,(nm <> "(distinct a)"
|
||||||
,AggregateApp [Name Nothing nm]
|
,AggregateApp [Name Nothing nm]
|
||||||
Distinct
|
Distinct
|
||||||
[Iden [Name Nothing "a"]] [] Nothing)]
|
[Iden [Name Nothing "a"]] [] Nothing)]
|
||||||
mkBsf nm =
|
mkBsf nm =
|
||||||
[(nm ++ "(a,b)",App [Name Nothing nm] [Iden [Name Nothing "a"],Iden [Name Nothing "b"]])
|
[(nm <> "(a,b)",App [Name Nothing nm] [Iden [Name Nothing "a"],Iden [Name Nothing "b"]])
|
||||||
,(nm ++"(a,b) filter (where something > 5)"
|
,(nm <> "(a,b) filter (where something > 5)"
|
||||||
,AggregateApp [Name Nothing nm]
|
,AggregateApp [Name Nothing nm]
|
||||||
SQDefault
|
SQDefault
|
||||||
[Iden [Name Nothing "a"],Iden [Name Nothing "b"]] [] fil)]
|
[Iden [Name Nothing "a"],Iden [Name Nothing "b"]] [] fil)]
|
||||||
|
|
|
@ -5,6 +5,7 @@ Section 11 in Foundation
|
||||||
This module covers the tests for parsing schema and DDL statements.
|
This module covers the tests for parsing schema and DDL statements.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.SQL2011Schema (sql2011SchemaTests) where
|
module Language.SQL.SimpleSQL.SQL2011Schema (sql2011SchemaTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
|
|
||||||
-- Tests for parsing scalar expressions
|
-- Tests for parsing scalar expressions
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.ScalarExprs (scalarExprTests) where
|
module Language.SQL.SimpleSQL.ScalarExprs (scalarExprTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
import Language.SQL.SimpleSQL.Syntax
|
import Language.SQL.SimpleSQL.Syntax
|
||||||
|
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
scalarExprTests :: TestItem
|
scalarExprTests :: TestItem
|
||||||
scalarExprTests = Group "scalarExprTests"
|
scalarExprTests = Group "scalarExprTests"
|
||||||
[literals
|
[literals
|
||||||
|
@ -428,5 +431,5 @@ functionsWithReservedNames = Group "functionsWithReservedNames" $ map t
|
||||||
,"char_length"
|
,"char_length"
|
||||||
]
|
]
|
||||||
where
|
where
|
||||||
t fn = TestScalarExpr ansi2011 (fn ++ "(a)") $ App [Name Nothing fn] [Iden [Name Nothing "a"]]
|
t fn = TestScalarExpr ansi2011 (fn <> "(a)") $ App [Name Nothing fn] [Iden [Name Nothing "a"]]
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ These are the tests for parsing focusing on the from part of query
|
||||||
expression
|
expression
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.TableRefs (tableRefTests) where
|
module Language.SQL.SimpleSQL.TableRefs (tableRefTests) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
|
@ -22,11 +22,11 @@ mentioned give a parse error. Not sure if this will be too awkward due
|
||||||
to lots of tricky exceptions/variationsx.
|
to lots of tricky exceptions/variationsx.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
data TestItem = Group String [TestItem]
|
data TestItem = Group Text [TestItem]
|
||||||
| TestScalarExpr Dialect String ScalarExpr
|
| TestScalarExpr Dialect Text ScalarExpr
|
||||||
| TestQueryExpr Dialect String QueryExpr
|
| TestQueryExpr Dialect Text QueryExpr
|
||||||
| TestStatement Dialect String Statement
|
| TestStatement Dialect Text Statement
|
||||||
| TestStatements Dialect String [Statement]
|
| TestStatements Dialect Text [Statement]
|
||||||
|
|
||||||
{-
|
{-
|
||||||
this just checks the sql parses without error, mostly just a
|
this just checks the sql parses without error, mostly just a
|
||||||
|
@ -34,12 +34,12 @@ intermediate when I'm too lazy to write out the parsed AST. These
|
||||||
should all be TODO to convert to a testqueryexpr test.
|
should all be TODO to convert to a testqueryexpr test.
|
||||||
-}
|
-}
|
||||||
|
|
||||||
| ParseQueryExpr Dialect String
|
| ParseQueryExpr Dialect Text
|
||||||
|
|
||||||
-- check that the string given fails to parse
|
-- check that the string given fails to parse
|
||||||
|
|
||||||
| ParseQueryExprFails Dialect String
|
| ParseQueryExprFails Dialect Text
|
||||||
| ParseScalarExprFails Dialect String
|
| ParseScalarExprFails Dialect Text
|
||||||
| LexTest Dialect Text [Token]
|
| LexTest Dialect Text [Token]
|
||||||
| LexFails Dialect String
|
| LexFails Dialect Text
|
||||||
deriving (Eq,Show)
|
deriving (Eq,Show)
|
||||||
|
|
|
@ -87,7 +87,7 @@ tests = itemToTest testData
|
||||||
|
|
||||||
itemToTest :: TestItem -> T.TestTree
|
itemToTest :: TestItem -> T.TestTree
|
||||||
itemToTest (Group nm ts) =
|
itemToTest (Group nm ts) =
|
||||||
T.testGroup nm $ map itemToTest ts
|
T.testGroup (T.unpack nm) $ map itemToTest ts
|
||||||
itemToTest (TestScalarExpr d str expected) =
|
itemToTest (TestScalarExpr d str expected) =
|
||||||
toTest parseScalarExpr prettyScalarExpr d str expected
|
toTest parseScalarExpr prettyScalarExpr d str expected
|
||||||
itemToTest (TestQueryExpr d str expected) =
|
itemToTest (TestQueryExpr d str expected) =
|
||||||
|
@ -116,65 +116,64 @@ makeLexerTest d s ts = H.testCase (T.unpack s) $ do
|
||||||
let s' = Lex.prettyTokens d $ ts1
|
let s' = Lex.prettyTokens d $ ts1
|
||||||
H.assertEqual "pretty print" s s'
|
H.assertEqual "pretty print" s s'
|
||||||
|
|
||||||
makeLexingFailsTest :: Dialect -> String -> T.TestTree
|
makeLexingFailsTest :: Dialect -> Text -> T.TestTree
|
||||||
makeLexingFailsTest d s = H.testCase s $ do
|
makeLexingFailsTest d s = H.testCase (T.unpack s) $ do
|
||||||
undefined {-case lexSQL d "" Nothing s of
|
undefined {-case lexSQL d "" Nothing s of
|
||||||
Right x -> H.assertFailure $ "lexing should have failed: " ++ s ++ "\ngot: " ++ show x
|
Right x -> H.assertFailure $ "lexing should have failed: " ++ s ++ "\ngot: " ++ show x
|
||||||
Left _ -> return ()-}
|
Left _ -> return ()-}
|
||||||
|
|
||||||
|
|
||||||
toTest :: (Eq a, Show a) =>
|
toTest :: (Eq a, Show a) =>
|
||||||
(Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
|
(Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
|
||||||
-> (Dialect -> a -> String)
|
-> (Dialect -> a -> Text)
|
||||||
-> Dialect
|
-> Dialect
|
||||||
-> String
|
-> Text
|
||||||
-> a
|
-> a
|
||||||
-> T.TestTree
|
-> T.TestTree
|
||||||
toTest parser pp d str expected = H.testCase str $ do
|
toTest parser pp d str expected = H.testCase (T.unpack str) $ do
|
||||||
let egot = parser d "" Nothing str
|
let egot = parser d "" Nothing str
|
||||||
case egot of
|
case egot of
|
||||||
Left e -> H.assertFailure $ peFormattedError e
|
Left e -> H.assertFailure $ T.unpack $ prettyError e
|
||||||
Right got -> do
|
Right got -> do
|
||||||
H.assertEqual "" expected got
|
H.assertEqual "" expected got
|
||||||
let str' = pp d got
|
let str' = pp d got
|
||||||
let egot' = parser d "" Nothing str'
|
let egot' = parser d "" Nothing str'
|
||||||
case egot' of
|
case egot' of
|
||||||
Left e' -> H.assertFailure $ "pp roundtrip"
|
Left e' -> H.assertFailure $ "pp roundtrip"
|
||||||
++ "\n" ++ str'
|
++ "\n" ++ (T.unpack str')
|
||||||
++ peFormattedError e'
|
++ (T.unpack $ prettyError e')
|
||||||
Right got' -> H.assertEqual
|
Right got' -> H.assertEqual
|
||||||
("pp roundtrip" ++ "\n" ++ str')
|
("pp roundtrip" ++ "\n" ++ T.unpack str')
|
||||||
expected got'
|
expected got'
|
||||||
|
|
||||||
toPTest :: (Eq a, Show a) =>
|
toPTest :: (Eq a, Show a) =>
|
||||||
(Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
|
(Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
|
||||||
-> (Dialect -> a -> String)
|
-> (Dialect -> a -> Text)
|
||||||
-> Dialect
|
-> Dialect
|
||||||
-> String
|
-> Text
|
||||||
-> T.TestTree
|
-> T.TestTree
|
||||||
toPTest parser pp d str = H.testCase str $ do
|
toPTest parser pp d str = H.testCase (T.unpack str) $ do
|
||||||
let egot = parser d "" Nothing str
|
let egot = parser d "" Nothing str
|
||||||
case egot of
|
case egot of
|
||||||
Left e -> H.assertFailure $ peFormattedError e
|
Left e -> H.assertFailure $ T.unpack $ prettyError e
|
||||||
Right got -> do
|
Right got -> do
|
||||||
let str' = pp d got
|
let str' = pp d got
|
||||||
let egot' = parser d "" Nothing str'
|
let egot' = parser d "" Nothing str'
|
||||||
case egot' of
|
case egot' of
|
||||||
Left e' -> H.assertFailure $ "pp roundtrip "
|
Left e' -> H.assertFailure $ "pp roundtrip "
|
||||||
++ "\n" ++ str' ++ "\n"
|
++ "\n" ++ T.unpack str' ++ "\n"
|
||||||
++ peFormattedError e'
|
++ T.unpack (prettyError e')
|
||||||
Right _got' -> return ()
|
Right _got' -> return ()
|
||||||
|
|
||||||
|
|
||||||
toFTest :: (Eq a, Show a) =>
|
toFTest :: (Eq a, Show a) =>
|
||||||
(Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
|
(Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
|
||||||
-> (Dialect -> a -> String)
|
-> (Dialect -> a -> Text)
|
||||||
-> Dialect
|
-> Dialect
|
||||||
-> String
|
-> Text
|
||||||
-> T.TestTree
|
-> T.TestTree
|
||||||
toFTest parser _pp d str = H.testCase str $ do
|
toFTest parser _pp d str = H.testCase (T.unpack str) $ do
|
||||||
let egot = parser d "" Nothing str
|
let egot = parser d "" Nothing str
|
||||||
case egot of
|
case egot of
|
||||||
Left _e -> return ()
|
Left _e -> return ()
|
||||||
Right _got ->
|
Right _got ->
|
||||||
H.assertFailure $ "parse didn't fail: " ++ show d ++ "\n" ++ str
|
H.assertFailure $ "parse didn't fail: " ++ show d ++ "\n" ++ T.unpack str
|
||||||
|
|
|
@ -8,16 +8,19 @@ The changes made to the official syntax are:
|
||||||
using a common table expression
|
using a common table expression
|
||||||
-}
|
-}
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Language.SQL.SimpleSQL.Tpch (tpchTests,tpchQueries) where
|
module Language.SQL.SimpleSQL.Tpch (tpchTests,tpchQueries) where
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.TestTypes
|
import Language.SQL.SimpleSQL.TestTypes
|
||||||
|
|
||||||
|
import Data.Text (Text)
|
||||||
|
|
||||||
tpchTests :: TestItem
|
tpchTests :: TestItem
|
||||||
tpchTests =
|
tpchTests =
|
||||||
Group "parse tpch"
|
Group "parse tpch"
|
||||||
$ map (ParseQueryExpr ansi2011 . snd) tpchQueries
|
$ map (ParseQueryExpr ansi2011 . snd) tpchQueries
|
||||||
|
|
||||||
tpchQueries :: [(String,String)]
|
tpchQueries :: [(String,Text)]
|
||||||
tpchQueries =
|
tpchQueries =
|
||||||
[("Q1","\n\
|
[("Q1","\n\
|
||||||
\select\n\
|
\select\n\
|
||||||
|
|
|
@ -7,20 +7,30 @@ Commands:
|
||||||
parse: parse sql from file, stdin or from command line
|
parse: parse sql from file, stdin or from command line
|
||||||
lex: lex sql same
|
lex: lex sql same
|
||||||
indent: parse then pretty print sql
|
indent: parse then pretty print sql
|
||||||
|
|
||||||
|
TODO: this is supposed to be a simple example, but it's a total mess
|
||||||
|
write some simple helpers so it's all in text?
|
||||||
|
|
||||||
-}
|
-}
|
||||||
|
|
||||||
{-# LANGUAGE TupleSections #-}
|
{-# LANGUAGE TupleSections #-}
|
||||||
import System.Environment
|
import System.Environment (getArgs)
|
||||||
import Control.Monad
|
import Control.Monad (forM_, when)
|
||||||
import Data.Maybe
|
import Data.Maybe (isJust)
|
||||||
import System.Exit
|
import System.Exit (exitFailure)
|
||||||
import Data.List
|
import Data.List (intercalate)
|
||||||
import Text.Show.Pretty
|
import Text.Show.Pretty (ppShow)
|
||||||
--import Control.Applicative
|
--import Control.Applicative
|
||||||
|
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
import Language.SQL.SimpleSQL.Pretty
|
import Language.SQL.SimpleSQL.Pretty
|
||||||
|
(prettyStatements)
|
||||||
import Language.SQL.SimpleSQL.Parse
|
import Language.SQL.SimpleSQL.Parse
|
||||||
import Language.SQL.SimpleSQL.Lex
|
(parseStatements
|
||||||
|
,prettyError)
|
||||||
|
import qualified Language.SQL.SimpleSQL.Lex as L
|
||||||
|
import Language.SQL.SimpleSQL.Dialect (ansi2011)
|
||||||
|
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
|
@ -67,9 +77,9 @@ parseCommand =
|
||||||
("parse SQL from file/stdin/command line (use -c to parse from command line)"
|
("parse SQL from file/stdin/command line (use -c to parse from command line)"
|
||||||
,\args -> do
|
,\args -> do
|
||||||
(f,src) <- getInput args
|
(f,src) <- getInput args
|
||||||
either (error . peFormattedError)
|
either (error . T.unpack . prettyError)
|
||||||
(putStrLn . ppShow)
|
(putStrLn . ppShow)
|
||||||
$ parseStatements ansi2011 f Nothing src
|
$ parseStatements ansi2011 (T.pack f) Nothing (T.pack src)
|
||||||
)
|
)
|
||||||
|
|
||||||
lexCommand :: (String,[String] -> IO ())
|
lexCommand :: (String,[String] -> IO ())
|
||||||
|
@ -77,9 +87,9 @@ lexCommand =
|
||||||
("lex SQL from file/stdin/command line (use -c to parse from command line)"
|
("lex SQL from file/stdin/command line (use -c to parse from command line)"
|
||||||
,\args -> do
|
,\args -> do
|
||||||
(f,src) <- getInput args
|
(f,src) <- getInput args
|
||||||
either (error . peFormattedError)
|
either (error . T.unpack . L.prettyError)
|
||||||
(putStrLn . intercalate ",\n" . map show)
|
(putStrLn . intercalate ",\n" . map show)
|
||||||
$ lexSQL ansi2011 f Nothing src
|
$ L.lexSQL ansi2011 (T.pack f) Nothing (T.pack src)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -88,8 +98,8 @@ indentCommand =
|
||||||
("parse then pretty print SQL from file/stdin/command line (use -c to parse from command line)"
|
("parse then pretty print SQL from file/stdin/command line (use -c to parse from command line)"
|
||||||
,\args -> do
|
,\args -> do
|
||||||
(f,src) <- getInput args
|
(f,src) <- getInput args
|
||||||
either (error . peFormattedError)
|
either (error . T.unpack . prettyError)
|
||||||
(putStrLn . prettyStatements ansi2011)
|
(putStrLn . T.unpack . prettyStatements ansi2011)
|
||||||
$ parseStatements ansi2011 f Nothing src
|
$ parseStatements ansi2011 (T.pack f) Nothing (T.pack src)
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,43 +1,52 @@
|
||||||
|
|
||||||
-- Converts the test data to asciidoc
|
-- Converts the test data to asciidoc
|
||||||
|
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
import Language.SQL.SimpleSQL.Tests
|
import Language.SQL.SimpleSQL.Tests
|
||||||
import Text.Show.Pretty
|
import Text.Show.Pretty
|
||||||
import Control.Monad.State
|
import Control.Monad.State
|
||||||
import Language.SQL.SimpleSQL.Parse
|
import qualified Language.SQL.SimpleSQL.Parse as P
|
||||||
import Language.SQL.SimpleSQL.Lex
|
import qualified Language.SQL.SimpleSQL.Lex as L
|
||||||
import Data.List
|
import Data.List
|
||||||
import Control.Monad (when, unless)
|
import Control.Monad (when, unless)
|
||||||
|
import Data.Text (Text)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
data TableItem = Heading Int String
|
import Prelude hiding (putStrLn)
|
||||||
| Row String String
|
import Data.Text.IO (putStrLn)
|
||||||
|
|
||||||
|
data TableItem = Heading Int Text
|
||||||
|
| Row Text Text
|
||||||
|
|
||||||
doc :: Int -> TestItem -> [TableItem]
|
doc :: Int -> TestItem -> [TableItem]
|
||||||
-- filter out some groups of tests
|
-- filter out some groups of tests
|
||||||
doc n (Group nm _) | "generated" `isInfixOf` nm = []
|
doc n (Group nm _) | "generated" `T.isInfixOf` nm = []
|
||||||
doc n (Group nm is) =
|
doc n (Group nm is) =
|
||||||
Heading n nm
|
Heading n nm
|
||||||
: concatMap (doc (n + 1)) is
|
: concatMap (doc (n + 1)) is
|
||||||
doc _ (TestScalarExpr _ str e) =
|
doc _ (TestScalarExpr _ str e) =
|
||||||
[Row str (ppShow e)]
|
[Row str (T.pack $ ppShow e)]
|
||||||
doc _ (TestQueryExpr _ str e) =
|
doc _ (TestQueryExpr _ str e) =
|
||||||
[Row str (ppShow e)]
|
[Row str (T.pack $ ppShow e)]
|
||||||
doc _ (TestStatement _ str e) =
|
doc _ (TestStatement _ str e) =
|
||||||
[Row str (ppShow e)]
|
[Row str (T.pack $ ppShow e)]
|
||||||
doc _ (TestStatements _ str e) =
|
doc _ (TestStatements _ str e) =
|
||||||
[Row str (ppShow e)]
|
[Row str (T.pack $ ppShow e)]
|
||||||
doc _ (ParseQueryExpr d str) =
|
doc _ (ParseQueryExpr d str) =
|
||||||
[Row str (ppShow $ parseQueryExpr d "" Nothing str)]
|
[Row str (showResult $ P.parseQueryExpr d "" Nothing str)]
|
||||||
doc _ (ParseQueryExprFails d str) =
|
doc _ (ParseQueryExprFails d str) =
|
||||||
[Row str (ppShow $ parseQueryExpr d "" Nothing str)]
|
[Row str (showResult $ P.parseQueryExpr d "" Nothing str)]
|
||||||
doc _ (ParseScalarExprFails d str) =
|
doc _ (ParseScalarExprFails d str) =
|
||||||
[Row str (ppShow $ parseScalarExpr d "" Nothing str)]
|
[Row str (showResult $ P.parseScalarExpr d "" Nothing str)]
|
||||||
|
|
||||||
doc _ (LexTest d str t) =
|
doc _ (LexTest d str t) =
|
||||||
[Row str (ppShow $ lexSQL d "" Nothing str)]
|
[Row str (T.pack $ ppShow $ L.lexSQL d "" Nothing str)]
|
||||||
|
|
||||||
doc _ (LexFails d str) =
|
doc _ (LexFails d str) =
|
||||||
[Row str (ppShow $ lexSQL d "" Nothing str)]
|
[Row str (T.pack $ ppShow $ L.lexSQL d "" Nothing str)]
|
||||||
|
|
||||||
|
showResult :: Show a => Either P.ParseError a -> Text
|
||||||
|
showResult = either P.prettyError (T.pack . ppShow)
|
||||||
|
|
||||||
-- TODO: should put the dialect in the html output
|
-- TODO: should put the dialect in the html output
|
||||||
|
|
||||||
|
@ -49,20 +58,21 @@ render = go False
|
||||||
when t $ putStrLn "|==="
|
when t $ putStrLn "|==="
|
||||||
-- slight hack
|
-- slight hack
|
||||||
when (level > 1) $
|
when (level > 1) $
|
||||||
putStrLn $ "\n" ++ replicate level '=' ++ " " ++ title
|
putStrLn $ "\n" <> T.replicate level "=" <> " " <> title
|
||||||
go False is
|
go False is
|
||||||
go t (Row sql hask : is) = do
|
go t (Row sql hask : is) = do
|
||||||
unless t $ putStrLn "[cols=\"2\"]\n|==="
|
unless t $ putStrLn "[cols=\"2\"]\n|==="
|
||||||
let sql' = "\n[source,sql]\n----\n" ++ sql ++ "\n----\n"
|
let sql' = "\n[source,sql]\n----\n" <> sql <> "\n----\n"
|
||||||
hask' = "\n[source,haskell]\n----\n" ++ hask ++ "\n----\n"
|
hask' = "\n[source,haskell]\n----\n" <> hask <> "\n----\n"
|
||||||
putStrLn $ "a| " ++ escapePipe sql'
|
putStrLn $ "a| " <> escapePipe sql'
|
||||||
++ "a| " ++ escapePipe hask' ++ " "
|
<> "a| " <> escapePipe hask' <> " "
|
||||||
go True is
|
go True is
|
||||||
go t [] = when t $ putStrLn "|==="
|
go t [] = when t $ putStrLn "|==="
|
||||||
escapePipe [] = []
|
escapePipe t = T.pack $ escapePipe' $ T.unpack t
|
||||||
escapePipe ('\\':'|':xs) = '\\' : '\\' : '\\' : '|' : escapePipe xs
|
escapePipe' [] = []
|
||||||
escapePipe ('|':xs) = '\\' : '|' : escapePipe xs
|
escapePipe' ('\\':'|':xs) = '\\' : '\\' : '\\' : '|' : escapePipe' xs
|
||||||
escapePipe (x:xs) = x : escapePipe xs
|
escapePipe' ('|':xs) = '\\' : '|' : escapePipe' xs
|
||||||
|
escapePipe' (x:xs) = x : escapePipe' xs
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = do
|
main = do
|
||||||
|
|
Loading…
Reference in a new issue