diff --git a/Language/SQL/SimpleSQL/Dialect.hs b/Language/SQL/SimpleSQL/Dialect.hs
index 4c54e11..8d3d5c5 100644
--- a/Language/SQL/SimpleSQL/Dialect.hs
+++ b/Language/SQL/SimpleSQL/Dialect.hs
@@ -3,6 +3,7 @@
 -- Data types to represent different dialect options
 
 {-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.Dialect
     (Dialect(..)
     ,ansi2011
@@ -12,6 +13,7 @@ module Language.SQL.SimpleSQL.Dialect
     ,sqlserver
     ) where
 
+import Data.Text (Text)
 import Data.Data
 
 -- | Used to set the dialect used for parsing and pretty printing,
@@ -55,14 +57,14 @@ import Data.Data
 
 data Dialect = Dialect
     { -- | reserved keywords
-     diKeywords :: [String]
+     diKeywords :: [Text]
       -- | keywords with identifier exception
-    ,diIdentifierKeywords :: [String]
+    ,diIdentifierKeywords :: [Text]
       -- | keywords with app exception
-    ,diAppKeywords :: [String]
+    ,diAppKeywords :: [Text]
      -- | keywords with type exception plus all the type names which
      -- are multiple words
-    ,diSpecialTypeNames :: [String]
+    ,diSpecialTypeNames :: [Text]
      -- | allow ansi fetch first syntax
     ,diFetchFirst :: Bool
      -- | allow limit keyword (mysql, postgres,
@@ -179,7 +181,7 @@ quoted. If you want to match one of these dialects exactly with this
 parser, I think it will be a lot of work.
 -}
 
-ansi2011ReservedKeywords :: [String]
+ansi2011ReservedKeywords :: [Text]
 ansi2011ReservedKeywords =
     [--"abs" -- function
      "all" -- keyword only?
@@ -508,7 +510,7 @@ ansi2011ReservedKeywords =
     ]
 
 
-ansi2011TypeNames :: [String]
+ansi2011TypeNames :: [Text]
 ansi2011TypeNames =
     ["double precision"
     ,"character varying"
diff --git a/Language/SQL/SimpleSQL/Errors.hs b/Language/SQL/SimpleSQL/Errors.hs
deleted file mode 100644
index 6bd67bd..0000000
--- a/Language/SQL/SimpleSQL/Errors.hs
+++ /dev/null
@@ -1,53 +0,0 @@
-
--- | helpers to work with parsec errors more nicely
-module Language.SQL.SimpleSQL.Errors
-    (ParseError(..)
-    --,formatError
-    ,convParseError
-    ) where
-
-import Text.Parsec (sourceColumn,sourceLine,sourceName,errorPos)
-import qualified Text.Parsec as P (ParseError)
-
--- | Type to represent parse errors.
-data ParseError = ParseError
-                  {peErrorString :: String
-                   -- ^ contains the error message
-                  ,peFilename :: FilePath
-                   -- ^ filename location for the error
-                  ,pePosition :: (Int,Int)
-                   -- ^ line number and column number location for the error
-                  ,peFormattedError :: String
-                   -- ^ formatted error with the position, error
-                   -- message and source context
-                  } deriving (Eq,Show)
-
-convParseError :: String -> P.ParseError -> ParseError
-convParseError src e =
-    ParseError
-    {peErrorString = show e
-    ,peFilename = sourceName p
-    ,pePosition = (sourceLine p, sourceColumn p)
-    ,peFormattedError = formatError src e}
-  where
-    p = errorPos e
-
-{-
-format the error more nicely: emacs format for positioning, plus
-context
--}
-
-formatError :: String -> P.ParseError -> String
-formatError src e =
-    sourceName p ++ ":" ++ show (sourceLine p)
-    ++ ":" ++ show (sourceColumn p) ++ ":"
-    ++ context
-    ++ show e
-  where
-    context =
-        let lns = take 1 $ drop (sourceLine p - 1) $ lines src
-        in case lns of
-             [x] -> "\n" ++ x ++ "\n"
-                    ++ replicate (sourceColumn p - 1) ' ' ++ "^\n"
-             _ -> ""
-    p = errorPos e
diff --git a/Language/SQL/SimpleSQL/Lex.hs b/Language/SQL/SimpleSQL/Lex.hs
index 103a354..f804d57 100644
--- a/Language/SQL/SimpleSQL/Lex.hs
+++ b/Language/SQL/SimpleSQL/Lex.hs
@@ -155,7 +155,7 @@ data Token
     | LineComment Text
     -- | A block comment, \/* stuff *\/, includes the comment delimiters
     | BlockComment Text
-      deriving (Eq,Show)
+      deriving (Eq,Show,Ord)
 
 ------------------------------------------------------------------------------
 
diff --git a/Language/SQL/SimpleSQL/Parse.hs b/Language/SQL/SimpleSQL/Parse.hs
index 40fc8f3..4d582b0 100644
--- a/Language/SQL/SimpleSQL/Parse.hs
+++ b/Language/SQL/SimpleSQL/Parse.hs
@@ -177,95 +177,139 @@ fixing them in the syntax but leaving them till the semantic checking
 -}
 
 {-# LANGUAGE TupleSections #-}
+{-# LANGUAGE LambdaCase        #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards   #-}
+{-# LANGUAGE TypeFamilies      #-}
 -- | This is the module with the parser functions.
 module Language.SQL.SimpleSQL.Parse
     (parseQueryExpr
     ,parseScalarExpr
     ,parseStatement
     ,parseStatements
-    ,ParseError(..)) where
+    ,ParseError(..)
+    ,prettyError
+    ) where
 
+import Text.Megaparsec
+    (Parsec
+    ,ParsecT
+    ,Stream(..)
+    ,PosState(..)
+    ,TraversableStream(..)
+    ,VisualStream(..)
+    ,ErrorItem(Tokens)
+
+    ,initialPos
+    ,sourceLine
+
+    ,ParseErrorBundle(..)
+    ,errorBundlePretty
+    ,parse
+
+    ,(<?>)
+    ,(<|>)
+    ,token
+    ,choice
+    ,eof
+    ,try
+    ,sepBy
+    ,sepBy1
+    ,optional
+    ,option
+    ,some
+    ,many
+    ,between
+    )
+import qualified Control.Monad.Combinators.Expr as E
+
+import qualified Data.List          as DL
+import qualified Data.List.NonEmpty as NE
+import qualified Data.Set           as Set
 import Control.Monad.Identity (Identity)
+import Data.Proxy (Proxy(..))
+import Data.Void (Void)
+
 import Control.Monad (guard, void)
 import Control.Applicative ((<**>))
 import Data.Char (toLower, isDigit)
-import Text.Parsec (setPosition,setSourceColumn,setSourceLine,getPosition
-                   ,option,between,sepBy,sepBy1
-                   ,try,many,many1,(<|>),choice,eof
-                   ,optionMaybe,optional,runParser
-                   ,chainl1, chainr1,(<?>))
-import Text.Parsec.Perm (permute,(<$?>), (<|?>))
-import Text.Parsec.Prim (getState, token)
-import Text.Parsec.Pos (newPos)
-import qualified Text.Parsec.Expr as E
 import Data.List (intercalate,sort,groupBy)
 import Data.Function (on)
-import Data.Maybe
-import Text.Parsec.String (GenParser)
+import Data.Maybe (catMaybes, isJust)
+import Data.Text (Text)
+import qualified Data.Text as T
 
 import Language.SQL.SimpleSQL.Syntax
-import Language.SQL.SimpleSQL.Combinators
-import Language.SQL.SimpleSQL.Errors
 import Language.SQL.SimpleSQL.Dialect
 import qualified Language.SQL.SimpleSQL.Lex as L
 
 
+------------------------------------------------------------------------------
+
 -- = Public API
 
 -- | Parses a query expr, trailing semicolon optional.
-parseQueryExpr :: Dialect
-                  -- ^ dialect of SQL to use
-               -> FilePath
-                  -- ^ filename to use in error messages
-               -> Maybe (Int,Int)
-                  -- ^ line number and column number of the first character
-                  -- in the source to use in error messages
-               -> String
-                  -- ^ the SQL source to parse
-               -> Either ParseError QueryExpr
+parseQueryExpr
+    :: Dialect
+    -- ^ dialect of SQL to use
+    -> Text
+    -- ^ filename to use in error messages
+    -> Maybe (Int,Int)
+    -- ^ line number and column number of the first character
+    -- in the source to use in error messages
+    -> Text
+    -- ^ the SQL source to parse
+    -> Either ParseError QueryExpr
 parseQueryExpr = wrapParse topLevelQueryExpr
 
 -- | Parses a statement, trailing semicolon optional.
-parseStatement :: Dialect
-                  -- ^ dialect of SQL to use
-               -> FilePath
-                  -- ^ filename to use in error messages
-               -> Maybe (Int,Int)
-                  -- ^ line number and column number of the first character
-                  -- in the source to use in error messages
-               -> String
-                  -- ^ the SQL source to parse
-               -> Either ParseError Statement
+parseStatement
+    :: Dialect
+    -- ^ dialect of SQL to use
+    -> Text
+    -- ^ filename to use in error messages
+    -> Maybe (Int,Int)
+    -- ^ line number and column number of the first character
+    -- in the source to use in error messages
+    -> Text
+    -- ^ the SQL source to parse
+    -> Either ParseError Statement
 parseStatement = wrapParse topLevelStatement
 
-
 -- | Parses a list of statements, with semi colons between
 -- them. The final semicolon is optional.
-parseStatements :: Dialect
-                  -- ^ dialect of SQL to use
-                -> FilePath
-                   -- ^ filename to use in error messages
-                -> Maybe (Int,Int)
-                   -- ^ line number and column number of the first character
-                   -- in the source to use in error messages
-                -> String
-                   -- ^ the SQL source to parse
-                -> Either ParseError [Statement]
+parseStatements
+    :: Dialect
+    -- ^ dialect of SQL to use
+    -> Text
+    -- ^ filename to use in error messages
+    -> Maybe (Int,Int)
+    -- ^ line number and column number of the first character
+    -- in the source to use in error messages
+    -> Text
+    -- ^ the SQL source to parse
+    -> Either ParseError [Statement]
 parseStatements = wrapParse statements
 
 -- | Parses a scalar expression.
-parseScalarExpr :: Dialect
-                   -- ^ dialect of SQL to use
-                -> FilePath
-                   -- ^ filename to use in error messages
-                -> Maybe (Int,Int)
-                   -- ^ line number and column number of the first character
-                   -- in the source to use in error messages
-                -> String
-                   -- ^ the SQL source to parse
-                -> Either ParseError ScalarExpr
+parseScalarExpr
+    :: Dialect
+    -- ^ dialect of SQL to use
+    -> Text
+    -- ^ filename to use in error messages
+    -> Maybe (Int,Int)
+    -- ^ line number and column number of the first character
+    -- in the source to use in error messages
+    -> Text
+    -- ^ the SQL source to parse
+    -> Either ParseError ScalarExpr
 parseScalarExpr = wrapParse scalarExpr
 
+type ParseError = ParseErrorBundle MyStream Void
+
+prettyError :: ParseError -> Text
+prettyError = T.pack . errorBundlePretty
+
 {-
 This helper function takes the parser given and:
 
@@ -277,25 +321,26 @@ converts the error return to the nice wrapper
 
 wrapParse :: Parser a
           -> Dialect
-          -> FilePath
+          -> Text
           -> Maybe (Int,Int)
-          -> String
+          -> Text
           -> Either ParseError a
-wrapParse parser d f p src = undefined {-do
-    let (l,c) = fromMaybe (1,1) p
-    lx <- L.lexSQL d f (Just (l,c)) src
-    either (Left . convParseError src) Right
-      $ runParser (setPos p *> parser <* eof)
-                  d f $ filter keep lx
+wrapParse parser d f p src =
+    let lx = either (error . show) id $ L.lexSQL d f p src
+    in parse (parser <* (eof <?> "")) (T.unpack f)
+           $ MyStream (T.unpack src) $ filter notSpace lx
   where
-    setPos Nothing = pure ()
-    setPos (Just (l,c)) = fmap up getPosition >>= setPosition
-      where up = flip setSourceColumn c . flip setSourceLine l
-    keep (_,L.Whitespace {}) = False
-    keep (_,L.LineComment {}) = False
-    keep (_,L.BlockComment {}) = False
-    keep _ = True-}
+    notSpace = notSpace' . L.tokenVal
+    notSpace' (L.Whitespace {}) = False
+    notSpace' (L.LineComment {}) = False
+    notSpace' (L.BlockComment {}) = False
+    notSpace' _ = True
 
+------------------------------------------------------------------------------
+
+-- parsing code
+
+type Parser = Parsec Void MyStream
 
 {-
 ------------------------------------------------
@@ -336,8 +381,8 @@ u&"example quoted"
 
 name :: Parser Name
 name = do
-    d <- getState
-    uncurry Name <$> identifierTok (blacklist d)
+    bl <- queryDialect diKeywords
+    uncurry Name <$> identifierTok bl
 
 -- todo: replace (:[]) with a named function all over
 
@@ -489,7 +534,7 @@ typeName =
     precScaleTypeName = (comma *> unsignedInteger) <$$$> PrecScaleTypeName
     precLengthTypeName =
         Just <$> lobPrecSuffix
-        <**> (optionMaybe lobUnits <$$$$> PrecLengthTypeName)
+        <**> (optional lobUnits <$$$$> PrecLengthTypeName)
         <|> pure Nothing <**> ((Just <$> lobUnits) <$$$$> PrecLengthTypeName)
     timeTypeName = tz <$$$> TimeTypeName
     ----------------------------
@@ -512,14 +557,14 @@ typeName =
     tnSuffix = multiset <|> array
     multiset = MultisetTypeName <$ keyword_ "multiset"
     array = keyword_ "array" *>
-        (optionMaybe (brackets unsignedInteger) <$$> ArrayTypeName)
+        (optional (brackets unsignedInteger) <$$> ArrayTypeName)
     ----------------------------
     -- this parser handles the fixed set of multi word
     -- type names, plus all the type names which are
     -- reserved words
     reservedTypeNames = do
-        d <- getState
-        (:[]) . Name Nothing . unwords <$> makeKeywordTree (diSpecialTypeNames d)
+        stn <- queryDialect diSpecialTypeNames
+        (:[]) . Name Nothing . T.unwords <$> makeKeywordTree stn
         
 
 {-
@@ -565,7 +610,7 @@ parameter = choice
     [Parameter <$ questionMark
     ,HostParameter
      <$> hostParamTok
-     <*> optionMaybe (keyword "indicator" *> hostParamTok)]
+     <*> optional (keyword "indicator" *> hostParamTok)]
 
 -- == positional arg
 
@@ -597,9 +642,9 @@ syntax can start with the same keyword.
 
 caseExpr :: Parser ScalarExpr
 caseExpr =
-    Case <$> (keyword_ "case" *> optionMaybe scalarExpr)
-         <*> many1 whenClause
-         <*> optionMaybe elseClause
+    Case <$> (keyword_ "case" *> optional scalarExpr)
+         <*> some whenClause
+         <*> optional elseClause
          <* keyword_ "end"
   where
    whenClause = (,) <$> (keyword_ "when" *> commaSep1 scalarExpr)
@@ -627,7 +672,7 @@ convertSqlServer :: Parser ScalarExpr
 convertSqlServer = guardDialect diConvertFunction
                    *> keyword_ "convert" *>
                    parens (Convert <$> typeName <*> (comma *> scalarExpr)
-                      <*> optionMaybe (comma *> unsignedInteger))
+                      <*> optional (comma *> unsignedInteger))
 
 {-
 === exists, unique
@@ -691,10 +736,10 @@ this. also fix the monad -> applicative
 
 intervalLit :: Parser ScalarExpr
 intervalLit = try (keyword_ "interval" >> do
-    s <- optionMaybe $ choice [Plus <$ symbol_ "+"
+    s <- optional $ choice [Plus <$ symbol_ "+"
                               ,Minus <$ symbol_ "-"]
     lit <- singleQuotesOnlyStringTok
-    q <- optionMaybe intervalQualifier
+    q <- optional intervalQualifier
     mkIt s lit q)
   where
     mkIt Nothing val Nothing = pure $ TypedLit (TypeName [Name Nothing "interval"]) val
@@ -728,9 +773,9 @@ idenExpr =
     -- special cases for keywords that can be parsed as an iden or app
     keywordFunctionOrIden = try $ do
         x <- unquotedIdentifierTok [] Nothing
-        d <- getState
-        let i = map toLower x `elem` diIdentifierKeywords d
-            a = map toLower x `elem` diAppKeywords d
+        d <- queryDialect id
+        let i = T.toLower x `elem` diIdentifierKeywords d
+            a = T.toLower x `elem` diAppKeywords d
         case () of
             _  | i && a -> pure [Name Nothing x] <**> option Iden app
                | i -> pure (Iden [Name Nothing x])
@@ -755,9 +800,9 @@ data SpecialOpKFirstArg = SOKNone
                         | SOKOptional
                         | SOKMandatory
 
-specialOpK :: String -- name of the operator
+specialOpK :: Text -- name of the operator
            -> SpecialOpKFirstArg -- has a first arg without a keyword
-           -> [(String,Bool)] -- the other args with their keywords
+           -> [(Text,Bool)] -- the other args with their keywords
                               -- and whether they are optional
            -> Parser ScalarExpr
 specialOpK opName firstArg kws =
@@ -769,13 +814,13 @@ specialOpK opName firstArg kws =
               -- keyword as an identifier
               case (e,kws) of
                   (Iden [Name Nothing i], (k,_):_)
-                      | map toLower i == k ->
-                          fail $ "cannot use keyword here: " ++ i
-                  _ -> return ()
+                      | T.toLower i == k ->
+                          fail $ "cannot use keyword here: " ++ T.unpack i
+                  _ -> pure ()
               pure e
     fa <- case firstArg of
          SOKNone -> pure Nothing
-         SOKOptional -> optionMaybe (try pfa)
+         SOKOptional -> optional (try pfa)
          SOKMandatory -> Just <$> pfa
     as <- mapM parseArg kws
     void closeParen
@@ -785,7 +830,7 @@ specialOpK opName firstArg kws =
         let p = keyword_ nm >> scalarExpr
         in fmap (nm,) <$> if mand
                           then Just <$> p
-                          else optionMaybe (try p)
+                          else optional (try p)
 
 {-
 The actual operators:
@@ -881,7 +926,7 @@ app =
     [duplicates
      <**> (commaSep1 scalarExpr
            <**> (((option [] orderBy) <* closeParen)
-                 <**> (optionMaybe afilter <$$$$$> AggregateApp)))
+                 <**> (optional afilter <$$$$$> AggregateApp)))
      -- separate cases with no all or distinct which must have at
      -- least one scalar expr
     ,commaSep1 scalarExpr
@@ -892,7 +937,7 @@ app =
                          ,(Just <$> afilter) <$$$> aggAppWithoutDupeOrd
                          ,pure (flip App)]
           ,orderBy <* closeParen
-           <**> (optionMaybe afilter <$$$$> aggAppWithoutDupe)]
+           <**> (optional afilter <$$$$> aggAppWithoutDupe)]
      -- no scalarExprs: duplicates and order by not allowed
     ,([] <$ closeParen) <**> option (flip App) (window <|> withinGroup)
     ]
@@ -924,7 +969,7 @@ window :: Parser ([ScalarExpr] -> [Name] -> ScalarExpr)
 window =
   keyword_ "over" *> openParen *> option [] partitionBy
   <**> (option [] orderBy
-        <**> (((optionMaybe frameClause) <* closeParen) <$$$$$> WindowApp))
+        <**> (((optional frameClause) <* closeParen) <$$$$$> WindowApp))
   where
     partitionBy = keywords_ ["partition","by"] *> commaSep1 scalarExpr
     frameClause =
@@ -1062,7 +1107,7 @@ escapeSuffix = do
     escapeChar = (identifierTok [] Nothing <|> symbolTok Nothing) >>= oneOnly
     oneOnly :: String -> Parser Char
     oneOnly c = case c of
-                   [c'] -> return c'
+                   [c'] -> pure c'
                    _ -> fail "escape char must be single char"
 -}
 
@@ -1113,8 +1158,8 @@ wanted to avoid extensibility and to not be concerned with parse error
 messages, but both of these are too important.
 -}
 
-opTable :: Bool -> [[E.Operator [Token] ParseState Identity ScalarExpr]]
-opTable bExpr =
+opTable :: Bool -> [[E.Operator (ParsecT Void MyStream Identity) ScalarExpr]]
+opTable bExpr = [] {-
         [-- parse match and quantified comparisons as postfix ops
           -- todo: left factor the quantified comparison with regular
           -- binary comparison, somehow
@@ -1220,7 +1265,7 @@ opTable bExpr =
     -- ok: "x is not true is not true"
     -- no work: "x is not true is not null"
     prefix'  p = E.Prefix  . chainl1 p $ pure       (.)
-    postfix' p = E.Postfix . chainl1 p $ pure (flip (.))
+    postfix' p = E.Postfix . chainl1 p $ pure (flip (.))-}
 
 {-
 == scalar expression top level
@@ -1233,7 +1278,7 @@ documenting/fixing.
 -}
 
 scalarExpr :: Parser ScalarExpr
-scalarExpr = E.buildExpressionParser (opTable False) term
+scalarExpr = E.makeExprParser term (opTable False)
 
 term :: Parser ScalarExpr
 term = choice [simpleLiteral
@@ -1257,7 +1302,7 @@ term = choice [simpleLiteral
 -- expose the b expression for window frame clause range between
 
 scalarExprB :: Parser ScalarExpr
-scalarExprB = E.buildExpressionParser (opTable True) term
+scalarExprB = E.makeExprParser term (opTable True)
 
 {-
 == helper parsers
@@ -1268,21 +1313,21 @@ This is used in interval literals and in interval type names.
 intervalQualifier :: Parser (IntervalTypeField,Maybe IntervalTypeField)
 intervalQualifier =
     (,) <$> intervalField
-        <*> optionMaybe (keyword_ "to" *> intervalField)
+        <*> optional (keyword_ "to" *> intervalField)
   where
     intervalField =
         Itf
         <$> datetimeField
-        <*> optionMaybe
+        <*> optional
             (parens ((,) <$> unsignedInteger
-                         <*> optionMaybe (comma *> unsignedInteger)))
+                         <*> optional (comma *> unsignedInteger)))
 
 {-
 TODO: use datetime field in extract also
 use a data type for the datetime field?
 -}
 
-datetimeField :: Parser String
+datetimeField :: Parser Text
 datetimeField = choice (map keyword ["year","month","day"
                                     ,"hour","minute","second"])
                 <?> "datetime field"
@@ -1306,7 +1351,7 @@ duplicates =
 -}
 
 selectItem :: Parser (ScalarExpr,Maybe Name)
-selectItem = (,) <$> scalarExpr <*> optionMaybe als
+selectItem = (,) <$> scalarExpr <*> optional als
   where als = optional (keyword_ "as") *> name
 
 selectList :: Parser [(ScalarExpr,Maybe Name)]
@@ -1353,7 +1398,7 @@ from = keyword_ "from" *> commaSep1 tref
         (TRJoin t <$> option False (True <$ keyword_ "natural")
                   <*> joinType
                   <*> nonJoinTref
-                  <*> optionMaybe joinCondition)
+                  <*> optional joinCondition)
         >>= optionSuffix joinTrefSuffix
 
 {-
@@ -1385,7 +1430,7 @@ fromAlias :: Parser Alias
 fromAlias = Alias <$> tableAlias <*> columnAliases
   where
     tableAlias = optional (keyword_ "as") *> name
-    columnAliases = optionMaybe $ parens $ commaSep1 name
+    columnAliases = optional $ parens $ commaSep1 name
 
 {-
 == simple other parts
@@ -1433,8 +1478,8 @@ allows offset and fetch in either order
 -}
 
 offsetFetch :: Parser (Maybe ScalarExpr, Maybe ScalarExpr)
-offsetFetch = permute ((,) <$?> (Nothing, Just <$> offset)
-                           <|?> (Nothing, Just <$> fetch))
+offsetFetch = undefined {-permute ((,) <$?> (Nothing, Just <$> offset)
+                           <|?> (Nothing, Just <$> fetch))-}
 
 offset :: Parser ScalarExpr
 offset = keyword_ "offset" *> scalarExpr
@@ -1462,7 +1507,7 @@ with = keyword_ "with" >>
     withQuery = (,) <$> (withAlias <* keyword_ "as")
                     <*> parens queryExpr
     withAlias = Alias <$> name <*> columnAliases
-    columnAliases = optionMaybe $ parens $ commaSep1 name
+    columnAliases = optional $ parens $ commaSep1 name
 
 
 {-
@@ -1473,15 +1518,15 @@ and union, etc..
 -}
 
 queryExpr :: Parser QueryExpr
-queryExpr = choice
+queryExpr = select {-choice
     [with
-    ,chainr1 (choice [values,table, select]) setOp]
+    ,undefined {-chainr1-} (choice [values,table, select]) setOp]-}
   where
     select = keyword_ "select" >>
         mkSelect
         <$> option SQDefault duplicates
         <*> selectList
-        <*> optionMaybe tableExpression
+        <*> optional tableExpression
     mkSelect d sl Nothing =
         makeSelect{qeSetQuantifier = d, qeSelectList = sl}
     mkSelect d sl (Just (TableExpression f w g h od ofs fe)) =
@@ -1508,9 +1553,9 @@ data TableExpression
 
 tableExpression :: Parser TableExpression
 tableExpression = mkTe <$> from
-                       <*> optionMaybe whereClause
+                       <*> optional whereClause
                        <*> option [] groupByClause
-                       <*> optionMaybe having
+                       <*> optional having
                        <*> option [] orderBy
                        <*> offsetFetch
  where
@@ -1592,13 +1637,13 @@ createSchema = keyword_ "schema" >>
 
 createTable :: Parser Statement
 createTable = do 
-  d <- getState
+  d <- queryDialect id
   let 
     parseColumnDef = TableColumnDef <$> columnDef 
     parseConstraintDef = uncurry TableConstraintDef <$> tableConstraintDef
     separator = if diNonCommaSeparatedConstraints d
       then optional comma
-      else void comma
+      else Just <$> comma
     constraints = sepBy parseConstraintDef separator
     entries = ((:) <$> parseColumnDef <*> ((comma >> entries) <|> pure [])) <|> constraints
 
@@ -1618,8 +1663,8 @@ createIndex =
 
 columnDef :: Parser ColumnDef
 columnDef = ColumnDef <$> name <*> typeName
-            <*> optionMaybe defaultClause
-            <*> option [] (many1 colConstraintDef)
+            <*> optional defaultClause
+            <*> option [] (some colConstraintDef)
   where
     defaultClause = choice [
         keyword_ "default" >>
@@ -1638,7 +1683,7 @@ columnDef = ColumnDef <$> name <*> typeName
 tableConstraintDef :: Parser (Maybe [Name], TableConstraint)
 tableConstraintDef =
     (,)
-    <$> (optionMaybe (keyword_ "constraint" *> names))
+    <$> (optional (keyword_ "constraint" *> names))
     <*> (unique <|> primaryKey <|> check <|> references)
   where
     unique = keyword_ "unique" >>
@@ -1650,7 +1695,7 @@ tableConstraintDef =
         (\cs ft ftcs m (u,d) -> TableReferencesConstraint cs ft ftcs m u d)
         <$> parens (commaSep1 name)
         <*> (keyword_ "references" *> names)
-        <*> optionMaybe (parens $ commaSep1 name)
+        <*> optional (parens $ commaSep1 name)
         <*> refMatch
         <*> refActions
 
@@ -1661,8 +1706,8 @@ refMatch = option DefaultReferenceMatch
                     ,MatchPartial <$ keyword_ "partial"
                     ,MatchSimple <$ keyword_ "simple"])
 refActions :: Parser (ReferentialAction,ReferentialAction)
-refActions = permute ((,) <$?> (DefaultReferentialAction, onUpdate)
-                          <|?> (DefaultReferentialAction, onDelete))
+refActions = undefined {-permute ((,) <$?> (DefaultReferentialAction, onUpdate)
+                          <|?> (DefaultReferentialAction, onDelete))-}
   where
     -- todo: left factor?
     onUpdate = try (keywords_ ["on", "update"]) *> referentialAction
@@ -1678,7 +1723,7 @@ refActions = permute ((,) <$?> (DefaultReferentialAction, onUpdate)
 colConstraintDef :: Parser ColConstraintDef
 colConstraintDef =
     ColConstraintDef
-    <$> optionMaybe (keyword_ "constraint" *> names)
+    <$> optional (keyword_ "constraint" *> names)
     <*> (nullable <|> notNull <|> unique <|> primaryKey <|> check <|> references)
   where
     nullable = ColNullableConstraint <$ keyword "null"
@@ -1686,16 +1731,16 @@ colConstraintDef =
     unique = ColUniqueConstraint <$ keyword_ "unique"
     primaryKey = do
       keywords_ ["primary", "key"] 
-      d <- getState
+      d <- queryDialect id
       autoincrement <- if diAutoincrement d 
-        then optionMaybe (keyword_ "autoincrement")
+        then optional (keyword_ "autoincrement")
         else pure Nothing
       pure $ ColPrimaryKeyConstraint $ isJust autoincrement
     check = keyword_ "check" >> ColCheckConstraint <$> parens scalarExpr
     references = keyword_ "references" >>
         (\t c m (ou,od) -> ColReferencesConstraint t c m ou od)
         <$> names
-        <*> optionMaybe (parens name)
+        <*> optional (parens name)
         <*> refMatch
         <*> refActions
 
@@ -1712,7 +1757,7 @@ sequenceGeneratorOptions =
          -- such as cycle and nocycle
          -- sort out options which are sometimes not allowed
          -- as datatype, and restart with
-    permute ((\a b c d e f g h j k -> catMaybes [a,b,c,d,e,f,g,h,j,k])
+    undefined {-permute ((\a b c d e f g h j k -> catMaybes [a,b,c,d,e,f,g,h,j,k])
                   <$?> nj startWith
                   <|?> nj dataType
                   <|?> nj restart
@@ -1723,7 +1768,7 @@ sequenceGeneratorOptions =
                   <|?> nj noMinValue
                   <|?> nj scycle
                   <|?> nj noCycle
-                 )
+                 )-}
   where
     nj p = (Nothing,Just <$> p)
     startWith = keywords_ ["start", "with"] >>
@@ -1731,7 +1776,7 @@ sequenceGeneratorOptions =
     dataType = keyword_ "as" >>
                SGODataType <$> typeName
     restart = keyword_ "restart" >>
-              SGORestart <$> optionMaybe (keyword_ "with" *> signedInteger)
+              SGORestart <$> optional (keyword_ "with" *> signedInteger)
     incrementBy = keywords_ ["increment", "by"] >>
                 SGOIncrementBy <$> signedInteger
     maxValue = keyword_ "maxvalue" >>
@@ -1794,9 +1839,9 @@ createView =
     CreateView
     <$> (option False (True <$ keyword_ "recursive") <* keyword_ "view")
     <*> names
-    <*> optionMaybe (parens (commaSep1 name))
+    <*> optional (parens (commaSep1 name))
     <*> (keyword_ "as" *> queryExpr)
-    <*> optionMaybe (choice [
+    <*> optional (choice [
             -- todo: left factor
             DefaultCheckOption <$ try (keywords_ ["with", "check", "option"])
            ,CascadedCheckOption <$ try (keywords_ ["with", "cascaded", "check", "option"])
@@ -1812,10 +1857,10 @@ createDomain = keyword_ "domain" >>
     CreateDomain
     <$> names
     <*> (optional (keyword_ "as") *> typeName)
-    <*> optionMaybe (keyword_ "default" *> scalarExpr)
+    <*> optional (keyword_ "default" *> scalarExpr)
     <*> many con
   where
-    con = (,) <$> optionMaybe (keyword_ "constraint" *> names)
+    con = (,) <$> optional (keyword_ "constraint" *> names)
           <*> (keyword_ "check" *> parens scalarExpr)
 
 alterDomain :: Parser Statement
@@ -1828,7 +1873,7 @@ alterDomain = keyword_ "domain" >>
     setDefault = keywords_ ["set", "default"] >> ADSetDefault <$> scalarExpr
     constraint = keyword_ "add" >>
        ADAddConstraint
-       <$> optionMaybe (keyword_ "constraint" *> names)
+       <$> optional (keyword_ "constraint" *> names)
        <*> (keyword_ "check" *> parens scalarExpr)
     dropDefault = ADDropDefault <$ keyword_ "default"
     dropConstraint = keyword_ "constraint" >> ADDropConstraint <$> names
@@ -1874,8 +1919,8 @@ delete :: Parser Statement
 delete = keywords_ ["delete","from"] >>
     Delete
     <$> names
-    <*> optionMaybe (optional (keyword_ "as") *> name)
-    <*> optionMaybe (keyword_ "where" *> scalarExpr)
+    <*> optional (optional (keyword_ "as") *> name)
+    <*> optional (keyword_ "where" *> scalarExpr)
 
 truncateSt :: Parser Statement
 truncateSt = keywords_ ["truncate", "table"] >>
@@ -1889,7 +1934,7 @@ insert :: Parser Statement
 insert = keywords_ ["insert", "into"] >>
     Insert
     <$> names
-    <*> optionMaybe (parens $ commaSep1 name)
+    <*> optional (parens $ commaSep1 name)
     <*> (DefaultInsertValues <$ keywords_ ["default", "values"]
          <|> InsertQuery <$> queryExpr)
 
@@ -1897,9 +1942,9 @@ update :: Parser Statement
 update = keywords_ ["update"] >>
     Update
     <$> names
-    <*> optionMaybe (optional (keyword_ "as") *> name)
+    <*> optional (optional (keyword_ "as") *> name)
     <*> (keyword_ "set" *> commaSep1 setClause)
-    <*> optionMaybe (keyword_ "where" *> scalarExpr)
+    <*> optional (keyword_ "where" *> scalarExpr)
   where
     setClause = multipleSet <|> singleSet
     multipleSet = SetMultiple
@@ -1937,7 +1982,7 @@ commit = Commit <$ keyword_ "commit" <* optional (keyword_ "work")
 
 rollback :: Parser Statement
 rollback = keyword_ "rollback" >> optional (keyword_ "work") >>
-    Rollback <$> optionMaybe (keywords_ ["to", "savepoint"] *> name)
+    Rollback <$> optional (keywords_ ["to", "savepoint"] *> name)
 
 
 {-
@@ -2048,16 +2093,16 @@ There is probably a simpler way of doing this but I am a bit
 thick.
 -}
 
-makeKeywordTree :: [String] -> Parser [String]
+makeKeywordTree :: [Text] -> Parser [Text]
 makeKeywordTree sets =
-    parseTrees (sort $ map words sets)
+    parseTrees (sort $ map T.words sets)
   where
-    parseTrees :: [[String]] -> Parser [String]
+    parseTrees :: [[Text]] -> Parser [Text]
     parseTrees ws = do
-      let gs :: [[[String]]]
+      let gs :: [[[Text]]]
           gs = groupBy ((==) `on` safeHead) ws
       choice $ map parseGroup gs
-    parseGroup :: [[String]] -> Parser [String]
+    parseGroup :: [[Text]] -> Parser [Text]
     parseGroup l@((k:_):_) = do
         keyword_ k
         let tls = catMaybes $ map safeTail l
@@ -2071,108 +2116,80 @@ makeKeywordTree sets =
     safeTail (_:x) = Just x
     safeTail [] = Nothing
 
-{-
-------------------------------------------------
+------------------------------------------------------------------------------
 
-= lexing
+-- parser helpers
 
-TODO: push checks into here:
-keyword blacklists
-unsigned integer match
-symbol matching
-keyword matching
--}
+(<$$>) :: Applicative f =>
+      f b -> (a -> b -> c) -> f (a -> c)
+(<$$>) pa c = pa <**> pure (flip c)
 
-stringTok :: Parser (String,String,String)
-stringTok = undefined {-mytoken (\tok ->
-    case tok of
-      L.SqlString s e t -> Just (s,e,t)
-      _ -> Nothing)-}
+(<$$$>) :: Applicative f =>
+          f c -> (a -> b -> c -> t) -> f (b -> a -> t)
+p <$$$> c = p <**> pure (flip3 c)
 
-singleQuotesOnlyStringTok :: Parser String
-singleQuotesOnlyStringTok = undefined {-mytoken (\tok ->
-    case tok of
-      L.SqlString "'" "'" t -> Just t
-      _ -> Nothing)-}
+(<$$$$>) :: Applicative f =>
+          f d -> (a -> b -> c -> d -> t) -> f (c -> b -> a -> t)
+p <$$$$> c = p <**> pure (flip4 c)
+
+(<$$$$$>) :: Applicative f =>
+          f e -> (a -> b -> c -> d -> e -> t) -> f (d -> c -> b -> a -> t)
+p <$$$$$> c = p <**> pure (flip5 c)
+
+optionSuffix :: (a -> Parser a) -> a -> Parser a
+optionSuffix p a = option a (p a)
 
 {-
-This is to support SQL strings where you can write
-'part of a string' ' another part'
-and it will parse as a single string
+parses an optional postfix element and applies its result to its left
+hand result, taken from uu-parsinglib
 
-It is only allowed when all the strings are quoted with ' atm.
+TODO: make sure the precedence higher than <|> and lower than the
+other operators so it can be used nicely
 -}
 
-stringTokExtend :: Parser (String,String,String)
-stringTokExtend = undefined {-do
-    (s,e,x) <- stringTok
-    choice [
-         do
-         guard (s == "'" && e == "'")
-         (s',e',y) <- stringTokExtend
-         guard (s' == "'" && e' == "'")
-         return $ (s,e,x ++ y)
-        ,return (s,e,x)
-        ]-}
+(<??>) :: Parser a -> Parser (a -> a) -> Parser a
+p <??> q = p <**> option id q
 
-hostParamTok :: Parser String
-hostParamTok = undefined {-mytoken (\tok ->
-    case tok of
-      L.PrefixedVariable c p -> Just (c:p)
-      _ -> Nothing)-}
+(<??.>) :: Parser (a -> a) -> Parser (a -> a) -> Parser (a -> a)
+(<??.>) pa pb = (.) `c` pa <*> option id pb
+  -- todo: fix this mess
+  where c = (<$>) . flip
 
-positionalArgTok :: Parser Int
-positionalArgTok = undefined {-mytoken (\tok ->
-    case tok of
-      L.PositionalArg p -> Just p
-      _ -> Nothing)-}
+-- 0 to many repeated applications of suffix parser
 
+(<??*>) :: Parser a -> Parser (a -> a) -> Parser a
+p <??*> q = foldr ($) <$> p <*> (reverse <$> many q)
 
-sqlNumberTok :: Bool -> Parser String
-sqlNumberTok intOnly = undefined {-mytoken (\tok ->
-    case tok of
-      L.SqlNumber p | not intOnly || all isDigit p -> Just p
-      _ -> Nothing)-}
+{-
+These are to help with left factored parsers:
 
+a <**> (b <**> (c <**> pure (flip3 ctor)))
 
-symbolTok :: Maybe String -> Parser String
-symbolTok sym = undefined {-mytoken (\tok ->
-    case (sym,tok) of
-      (Nothing, L.Symbol p) -> Just p
-      (Just s, L.Symbol p) | s == p -> Just p
-      _ -> Nothing)-}
+Not sure the names are correct, but they follow a pattern with flip
+a <**> (b <**> pure (flip ctor))
+-}
 
-identifierTok :: [String] -> Parser (Maybe (String,String), String)
-identifierTok blackList = undefined {-mytoken (\tok ->
-    case tok of
-      L.Identifier q@(Just {}) p -> Just (q,p)
-      L.Identifier q p | map toLower p `notElem` blackList -> Just (q,p)
-      _ -> Nothing)-}
+flip3 :: (a -> b -> c -> t) -> c -> b -> a -> t
+flip3 f a b c = f c b a
 
-unquotedIdentifierTok :: [String] -> Maybe String -> Parser String
-unquotedIdentifierTok blackList kw = undefined {-mytoken (\tok ->
-    case (kw,tok) of
-      (Nothing, L.Identifier Nothing p) | map toLower p `notElem` blackList -> Just p
-      (Just k, L.Identifier Nothing p) | k == map toLower p -> Just p
-      _ -> Nothing)-}
+flip4 :: (a -> b -> c -> d -> t) -> d -> c -> b -> a -> t
+flip4 f a b c d = f d c b a
 
-mytoken :: (L.Token -> Maybe a) -> Parser a
-mytoken test = token showToken posToken testToken
-  where
-    showToken (_,tok)   = show tok
-    posToken  ((a,b,c),_)  = newPos a b c
-    testToken (_,tok)   = test tok
+flip5 :: (a -> b -> c -> d -> e -> t) -> e -> d -> c -> b -> a -> t
+flip5 f a b c d e = f e d c b a
+
+--------------------------------------
 
 unsignedInteger :: Parser Integer
-unsignedInteger = read <$> sqlNumberTok True <?> "natural number"
+unsignedInteger = read . T.unpack <$> sqlNumberTok True <?> "natural number"
 
 -- todo: work out the symbol parsing better
 
-symbol :: String -> Parser String
-symbol s = symbolTok (Just s) <?> s
+symbol :: Text -> Parser Text
+symbol s = symbolTok (Just s) <?> (T.unpack s)
 
 singleCharSymbol :: Char -> Parser Char
-singleCharSymbol c = c <$ symbol [c]
+singleCharSymbol c = c <$ symbol (T.singleton c)
 
 questionMark :: Parser Char
 questionMark = singleCharSymbol '?' <?> "question mark"
@@ -2198,13 +2215,13 @@ semi = singleCharSymbol ';'
 
 -- = helper functions
 
-keyword :: String -> Parser String
-keyword k = unquotedIdentifierTok [] (Just k) <?> k
+keyword :: Text -> Parser Text
+keyword k = unquotedIdentifierTok [] (Just k) <?> (T.unpack k)
 
 -- helper function to improve error messages
 
-keywords_ :: [String] -> Parser ()
-keywords_ ks = mapM_ keyword_ ks <?> intercalate " " ks
+keywords_ :: [Text] -> Parser ()
+keywords_ ks = mapM_ keyword_ ks <?> (T.unpack (T.unwords ks))
 
 
 parens :: Parser a -> Parser a
@@ -2216,20 +2233,94 @@ brackets = between openBracket closeBracket
 commaSep :: Parser a -> Parser [a]
 commaSep = (`sepBy` comma)
 
-keyword_ :: String -> Parser ()
+keyword_ :: Text -> Parser ()
 keyword_ = void . keyword
 
-symbol_ :: String -> Parser ()
+symbol_ :: Text -> Parser ()
 symbol_ = void . symbol
 
 commaSep1 :: Parser a -> Parser [a]
 commaSep1 = (`sepBy1` comma)
 
-blacklist :: Dialect -> [String]
-blacklist d = diKeywords d
+------------------------------------------------------------------------------
+
+-- interfacing with the lexing
+{-
+TODO: push checks into here:
+keyword blacklists
+unsigned integer match
+symbol matching
+keyword matching
+
+-}
+stringTok :: Parser (Text,Text,Text)
+stringTok = token test Set.empty <?> "string literal"
+  where
+    test (L.WithPos _ _ _ (L.SqlString s e t)) = Just (s,e,t)
+    test _ = Nothing
+
+singleQuotesOnlyStringTok :: Parser Text
+singleQuotesOnlyStringTok = token test Set.empty <?> "string literal"
+  where
+    test (L.WithPos _ _ _ (L.SqlString "'" "'" t)) = Just t
+    test _ = Nothing
 
 {-
-These blacklisted names are mostly needed when we parse something with
+This is to support SQL strings where you can write
+'part of a string' ' another part'
+and it will parse as a single string
+
+It is only allowed when all the strings are quoted with ' atm.
+-}
+
+stringTokExtend :: Parser (Text,Text,Text)
+stringTokExtend = do
+    (s,e,x) <- stringTok
+    choice [
+         do
+         guard (s == "'" && e == "'")
+         (s',e',y) <- stringTokExtend
+         guard (s' == "'" && e' == "'")
+         pure $ (s,e,x <> y)
+        ,pure (s,e,x)
+        ]
+
+hostParamTok :: Parser Text
+hostParamTok = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.PrefixedVariable c p)) = Just $ T.cons c p
+    test _ = Nothing
+
+positionalArgTok :: Parser Int
+positionalArgTok = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.PositionalArg p)) = Just p
+    test _ = Nothing
+
+sqlNumberTok :: Bool -> Parser Text
+sqlNumberTok intOnly = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.SqlNumber p)) | not intOnly || T.all isDigit p = Just p
+    test _ = Nothing
+
+     {-mytoken (\tok ->
+    case tok of
+      L.SqlNumber p | not intOnly || all isDigit p -> Just p
+      _ -> Nothing)-}
+
+
+symbolTok :: Maybe Text -> Parser Text
+symbolTok sym = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.Symbol p)) =
+        case sym of
+            Nothing -> Just p
+            Just sym' | sym' == p -> Just p
+            _ -> Nothing
+    test _ = Nothing
+
+{-
+The blacklisted names are mostly needed when we parse something with
 an optional alias, e.g. select a a from t. If we write select a from
 t, we have to make sure the from isn't parsed as an alias. I'm not
 sure what other places strictly need the blacklist, and in theory it
@@ -2262,27 +2353,120 @@ keyword from this list, and still want to parse statements using it
 as a keyword - for instance, removing things like 'from' or 'as',
 will likely mean many things don't parse anymore.
 
-
-
------------
-
-Used to make the dialect available during parsing so different parsers
-can be used for different dialects. Not sure if this is the best way
-to do it, but it's convenient
 -}
 
-type ParseState = Dialect
+identifierTok :: [Text] -> Parser (Maybe (Text,Text), Text)
+identifierTok blackList = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.Identifier q@(Just {}) p)) = Just (q,p)
+    test (L.WithPos _ _ _ (L.Identifier q p))
+        | T.toLower p `notElem` blackList = Just (q,p)
+    test _ = Nothing
 
-type Token = ((String,Int,Int),L.Token)
+unquotedIdentifierTok :: [Text] -> Maybe Text -> Parser Text
+unquotedIdentifierTok blackList kw = token test Set.empty <?> ""
+  where
+    test (L.WithPos _ _ _ (L.Identifier Nothing p)) =
+        case kw of
+            Nothing | T.toLower p `notElem` blackList -> Just p
+            Just k | k == T.toLower p -> Just p
+            _ -> Nothing
+    test _ = Nothing
 
-type Parser = GenParser Token ParseState
+------------------------------------------------------------------------------
+
+-- dialect
 
 guardDialect :: (Dialect -> Bool) -> Parser ()
-guardDialect f = do
-    d <- getState
-    guard (f d)
+guardDialect = error "guardDialect"
+
+queryDialect :: (Dialect -> a) -> Parser a
+queryDialect = error "queryDialect"
+
+
+------------------------------------------------------------------------------
+
+-- parsec stream boilerplate
+
+data MyStream = MyStream
+  { myStreamInput :: String
+  , unMyStream :: [L.WithPos L.Token]
+  }
+
+instance Stream MyStream where
+  type Token  MyStream = L.WithPos L.Token
+  type Tokens MyStream = [L.WithPos L.Token]
+
+  tokenToChunk Proxy x = [x]
+  tokensToChunk Proxy xs = xs
+  chunkToTokens Proxy = id
+  chunkLength Proxy = length
+  chunkEmpty Proxy = null
+  take1_ (MyStream _ []) = Nothing
+  take1_ (MyStream str (t:ts)) = Just
+    ( t
+    , MyStream (drop (tokensLength pxy (t NE.:|[])) str) ts
+    )
+  takeN_ n (MyStream str s)
+    | n <= 0    = Just ([], MyStream str s)
+    | null s    = Nothing
+    | otherwise =
+        let (x, s') = splitAt n s
+        in case NE.nonEmpty x of
+          Nothing -> Just (x, MyStream str s')
+          Just nex -> Just (x, MyStream (drop (tokensLength pxy nex) str) s')
+  takeWhile_ f (MyStream str s) =
+    let (x, s') = DL.span f s
+    in case NE.nonEmpty x of
+      Nothing -> (x, MyStream str s')
+      Just nex -> (x, MyStream (drop (tokensLength pxy nex) str) s')
+
+instance VisualStream MyStream where
+  showTokens Proxy = DL.intercalate " "
+    . NE.toList
+    . fmap (showMyToken . L.tokenVal)
+  tokensLength Proxy xs = sum (L.tokenLength <$> xs)
+
+instance TraversableStream MyStream where
+  reachOffset o PosState {..} =
+    ( Just (prefix ++ restOfLine)
+    , PosState
+        { pstateInput = MyStream
+            { myStreamInput = postStr
+            , unMyStream = post
+            }
+        , pstateOffset = max pstateOffset o
+        , pstateSourcePos = newSourcePos
+        , pstateTabWidth = pstateTabWidth
+        , pstateLinePrefix = prefix
+        }
+    )
+    where
+      prefix =
+        if sameLine
+          then pstateLinePrefix ++ preLine
+          else preLine
+      sameLine = sourceLine newSourcePos == sourceLine pstateSourcePos
+      newSourcePos =
+        case post of
+          [] -> case unMyStream pstateInput of
+            [] -> pstateSourcePos
+            xs -> L.endPos (last xs)
+          (x:_) -> L.startPos x
+      (pre, post) = splitAt (o - pstateOffset) (unMyStream pstateInput)
+      (preStr, postStr) = splitAt tokensConsumed (myStreamInput pstateInput)
+      preLine = reverse . takeWhile (/= '\n') . reverse $ preStr
+      tokensConsumed =
+        case NE.nonEmpty pre of
+          Nothing -> 0
+          Just nePre -> tokensLength pxy nePre
+      restOfLine = takeWhile (/= '\n') postStr
+
+pxy :: Proxy MyStream
+pxy = Proxy
+
+showMyToken :: L.Token -> String
+-- todo: how to do this properly?
+showMyToken = T.unpack . L.prettyToken ansi2011
+
 
-{-
-The dialect stuff could also be used for custom options: e.g. to only
-parse dml for instance.
--}
diff --git a/Language/SQL/SimpleSQL/Pretty.hs b/Language/SQL/SimpleSQL/Pretty.hs
index 8774f26..1d942b6 100644
--- a/Language/SQL/SimpleSQL/Pretty.hs
+++ b/Language/SQL/SimpleSQL/Pretty.hs
@@ -15,6 +15,9 @@ TODO: there should be more comments in this file, especially the bits
 which have been changed to try to improve the layout of the output.
 -}
 
+import Prelude hiding (show)
+import qualified Prelude as P
+
 import Prettyprinter (Doc
                      ,parens
                      ,nest
@@ -31,24 +34,24 @@ import Prettyprinter (Doc
                      )
 import qualified Prettyprinter as P
 
-import Prettyprinter.Render.Text (renderLazy)
+import Prettyprinter.Render.Text (renderStrict)
 
 import Data.Maybe (maybeToList, catMaybes)
 import Data.List (intercalate)
 
-import qualified Data.Text.Lazy as L
 import qualified Data.Text as T
+import Data.Text (Text)
 
 import Language.SQL.SimpleSQL.Syntax
 import Language.SQL.SimpleSQL.Dialect
 
 
 -- | Convert a query expr ast to concrete syntax.
-prettyQueryExpr :: Dialect -> QueryExpr -> String
+prettyQueryExpr :: Dialect -> QueryExpr -> Text
 prettyQueryExpr d = render . queryExpr d
 
 -- | Convert a value expr ast to concrete syntax.
-prettyScalarExpr :: Dialect -> ScalarExpr -> String
+prettyScalarExpr :: Dialect -> ScalarExpr -> Text
 prettyScalarExpr d = render . scalarExpr d
 
 -- | A terminating semicolon.
@@ -56,20 +59,20 @@ terminator :: Doc a
 terminator = pretty ";\n"
 
 -- | Convert a statement ast to concrete syntax.
-prettyStatement :: Dialect -> Statement -> String
+prettyStatement :: Dialect -> Statement -> Text
 prettyStatement _ EmptyStatement = render terminator
 prettyStatement d s = render (statement d s)
 
 -- | Convert a list of statements to concrete syntax. A semicolon
 -- is inserted after each statement.
-prettyStatements :: Dialect -> [Statement] -> String
+prettyStatements :: Dialect -> [Statement] -> Text
 prettyStatements d = render . vsep . map prettyStatementWithSemicolon
   where
     prettyStatementWithSemicolon :: Statement -> Doc a
     prettyStatementWithSemicolon s = statement d s <> terminator
 
-render :: Doc a -> String -- L.Text
-render = L.unpack . renderLazy . layoutPretty defaultLayoutOptions
+render :: Doc a -> Text
+render = renderStrict . layoutPretty defaultLayoutOptions
 
 -- = scalar expressions
 
@@ -88,7 +91,7 @@ scalarExpr _ (IntervalLit s v f t) =
 scalarExpr _ (Iden i) = names i
 scalarExpr _ Star = pretty "*"
 scalarExpr _ Parameter = pretty "?"
-scalarExpr _ (PositionalArg n) = pretty $ "$" ++ show n
+scalarExpr _ (PositionalArg n) = pretty $ T.cons '$' $ show n
 scalarExpr _ (HostParameter p i) =
     pretty p
     <+> me (\i' -> pretty "indicator" <+> pretty i') i
@@ -140,7 +143,7 @@ scalarExpr dia (SpecialOp nm [a,b,c]) | nm `elem` [[Name Nothing "between"]
                                                  ,[Name Nothing "not between"]] =
   sep [scalarExpr dia a
       ,names nm <+> scalarExpr dia b
-      ,nest (length (unnames nm) + 1) $ pretty "and" <+> scalarExpr dia c]
+      ,nest (T.length (unnames nm) + 1) $ pretty "and" <+> scalarExpr dia c]
 
 scalarExpr d (SpecialOp [Name Nothing "rowctor"] as) =
     parens $ commaSep $ map (scalarExpr d) as
@@ -164,7 +167,7 @@ scalarExpr d e@(BinOp _ op _) | op `elem` [[Name Nothing "and"]
                        : map ((names op <+>) . scalarExpr d) es)
       [] -> mempty -- shouldn't be possible
   where
-    ands (BinOp a op' b) | op == op' = ands a ++ ands b
+    ands (BinOp a op' b) | op == op' = ands a <> ands b
     ands x = [x]
 -- special case for . we don't use whitespace
 scalarExpr d (BinOp e0 [Name Nothing "."] e1) =
@@ -174,9 +177,9 @@ scalarExpr d (BinOp e0 f e1) =
 
 scalarExpr dia (Case t ws els) =
     sep $ [pretty "case" <+> me (scalarExpr dia) t]
-          ++ map w ws
-          ++ maybeToList (fmap e els)
-          ++ [pretty "end"]
+          <> map w ws
+          <> maybeToList (fmap e els)
+          <> [pretty "end"]
   where
     w (t0,t1) =
       pretty "when" <+> nest 5 (commaSep $ map (scalarExpr dia) t0)
@@ -261,7 +264,7 @@ scalarExpr _ (NextValueFor ns) =
     pretty "next value for" <+> names ns
 
 scalarExpr d (VEComment cmt v) =
-    vsep $ map comment cmt ++ [scalarExpr d v]
+    vsep $ map comment cmt <> [scalarExpr d v]
 
 scalarExpr _ (OdbcLiteral t s) =
     pretty "{" <> lt t <+> squotes (pretty s) <> pretty "}"
@@ -278,13 +281,13 @@ scalarExpr d (Convert t e Nothing) =
 scalarExpr d (Convert t e (Just i)) =
     pretty "convert(" <> typeName t <> pretty "," <+> scalarExpr d e <> pretty "," <+> pretty (show i) <> pretty ")"
 
-unname :: Name -> String
+unname :: Name -> Text
 unname (Name Nothing n) = n
 unname (Name (Just (s,e)) n) =
-    s ++ n ++ e
+    s <> n <> e
 
-unnames :: [Name] -> String
-unnames ns = intercalate "." $ map unname ns
+unnames :: [Name] -> Text
+unnames ns = T.intercalate "." $ map unname ns
 
 
 name :: Name -> Doc a
@@ -404,7 +407,7 @@ queryExpr d (Values vs) =
     <+> nest 7 (commaSep (map (parens . commaSep . map (scalarExpr d)) vs))
 queryExpr _ (Table t) = pretty "table" <+> names t
 queryExpr d (QEComment cmt v) =
-    vsep $ map comment cmt ++ [queryExpr d v]
+    vsep $ map comment cmt <> [queryExpr d v]
 
 
 alias :: Alias -> Doc a
@@ -450,10 +453,10 @@ from d ts =
         pretty "using" <+> parens (commaSep $ map name es)
     joinCond Nothing = mempty
 
-maybeScalarExpr :: Dialect -> String -> Maybe ScalarExpr -> Doc a
+maybeScalarExpr :: Dialect -> Text -> Maybe ScalarExpr -> Doc a
 maybeScalarExpr d k = me
       (\e -> sep [pretty k
-                 ,nest (length k + 1) $ scalarExpr d e])
+                 ,nest (T.length k + 1) $ scalarExpr d e])
 
 grpBy :: Dialect -> [GroupingExpr] -> Doc a
 grpBy _ [] = mempty
@@ -725,7 +728,7 @@ columnDef d (ColumnDef n t mdef cons) =
     pcon ColNullableConstraint = texts ["null"]
     pcon ColUniqueConstraint = pretty "unique"
     pcon (ColPrimaryKeyConstraint autoincrement) = 
-      texts $ ["primary","key"] ++ ["autoincrement"|autoincrement]
+      texts $ ["primary","key"] <> ["autoincrement"|autoincrement]
     --pcon ColPrimaryKeyConstraint = texts ["primary","key"]
     pcon (ColCheckConstraint v) = pretty "check" <+> parens (scalarExpr d v)
     pcon (ColReferencesConstraint tb c m u del) =
@@ -757,7 +760,7 @@ refMatch m = case m of
                      MatchPartial -> texts ["match","partial"]
                      MatchSimple -> texts ["match", "simple"]
 
-refAct :: String -> ReferentialAction -> Doc a
+refAct :: Text -> ReferentialAction -> Doc a
 refAct t a = case a of
                      DefaultReferentialAction -> mempty
                      RefCascade -> texts ["on", t, "cascade"]
@@ -863,11 +866,14 @@ me = maybe mempty
 comment :: Comment -> Doc a
 comment (BlockComment str) = pretty "/*" <+> pretty str <+> pretty "*/"
 
-texts :: [String] -> Doc a
+texts :: [Text] -> Doc a
 texts ts = sep $ map pretty ts
 
 -- regular pretty completely defeats the type checker when you want
 -- to change the ast and get type errors, instead it just produces
 -- incorrect code.
-pretty :: String -> Doc a
-pretty = P.pretty . T.pack
+pretty :: Text -> Doc a
+pretty = P.pretty
+
+show :: Show a => a -> Text
+show = T.pack . P.show
diff --git a/Language/SQL/SimpleSQL/Syntax.hs b/Language/SQL/SimpleSQL/Syntax.hs
index d33bc31..708f555 100644
--- a/Language/SQL/SimpleSQL/Syntax.hs
+++ b/Language/SQL/SimpleSQL/Syntax.hs
@@ -62,6 +62,8 @@ module Language.SQL.SimpleSQL.Syntax
     ,Comment(..)
     ) where
 
+import Data.Text (Text)
+
 import Data.Data
 
 -- | Represents a value expression. This is used for the expressions
@@ -82,21 +84,21 @@ data ScalarExpr
       -- * 1e5
       --
       -- * 12.34e-6
-      NumLit String
+      NumLit Text
       -- | string literal, with the start and end quote
-      -- e.g. 'test' -> StringLit "'" "'" "test"
-    | StringLit String String String
+      -- e.g. 'test' -> TextLit "'" "'" "test"
+    | StringLit Text Text Text
       -- | text of interval literal, units of interval precision,
       -- e.g. interval 3 days (3)
     | IntervalLit
       {ilSign :: Maybe Sign -- ^ if + or - used
-      ,ilLiteral :: String -- ^ literal text
+      ,ilLiteral :: Text -- ^ literal text
       ,ilFrom :: IntervalTypeField
       ,ilTo :: Maybe IntervalTypeField
       }
 
       -- | prefix 'typed literal', e.g. int '42'
-    | TypedLit TypeName String
+    | TypedLit TypeName Text
 
       -- | identifier with parts separated by dots
     | Iden [Name]
@@ -105,9 +107,9 @@ data ScalarExpr
 
     | Parameter -- ^ Represents a ? in a parameterized query
     | PositionalArg Int -- ^ Represents an e.g. $1 in a parameterized query
-    | HostParameter String (Maybe String) -- ^ represents a host
+    | HostParameter Text (Maybe Text) -- ^ represents a host
                                           -- parameter, e.g. :a. The
-                                          -- Maybe String is for the
+                                          -- Maybe Text is for the
                                           -- indicator, e.g. :var
                                           -- indicator :nl
 
@@ -163,7 +165,7 @@ data ScalarExpr
       -- of commas. The maybe is for the first unnamed argument
       -- if it is present, and the list is for the keyword argument
       -- pairs.
-    | SpecialOpK [Name] (Maybe ScalarExpr) [(String,ScalarExpr)]
+    | SpecialOpK [Name] (Maybe ScalarExpr) [(Text,ScalarExpr)]
 
       -- | cast(a as typename)
     | Cast ScalarExpr TypeName
@@ -215,7 +217,7 @@ in other places
     | MultisetQueryCtor QueryExpr
     | NextValueFor [Name]
     | VEComment [Comment] ScalarExpr
-    | OdbcLiteral OdbcLiteralType String
+    | OdbcLiteral OdbcLiteralType Text
       -- ^ an odbc literal e.g. {d '2000-01-01'}
     | OdbcFunc ScalarExpr
       -- ^ an odbc function call e.g. {fn CHARACTER_LENGTH('test')}
@@ -228,7 +230,7 @@ in other places
 -- * "test" -> Name (Just "\"","\"") "test"
 -- * `something` -> Name (Just ("`","`") "something"
 -- * [ms] -> Name (Just ("[","]") "ms"
-data Name = Name (Maybe (String,String)) String
+data Name = Name (Maybe (Text,Text)) Text
             deriving (Eq,Show,Read,Data,Typeable)
 
 -- | Represents a type name, used in casts.
@@ -246,7 +248,7 @@ data TypeName
     | MultisetTypeName TypeName
       deriving (Eq,Show,Read,Data,Typeable)
 
-data IntervalTypeField = Itf String (Maybe (Integer, Maybe Integer))
+data IntervalTypeField = Itf Text (Maybe (Integer, Maybe Integer))
                          deriving (Eq,Show,Read,Data,Typeable)
 
 data Sign = Plus | Minus
@@ -739,6 +741,6 @@ data PrivilegeAction =
 
 -- | Comment. Useful when generating SQL code programmatically. The
 -- parser doesn't produce these.
-newtype Comment = BlockComment String
+newtype Comment = BlockComment Text
                deriving (Eq,Show,Read,Data,Typeable)
 
diff --git a/simple-sql-parser.cabal b/simple-sql-parser.cabal
index e4a2a6f..31612eb 100644
--- a/simple-sql-parser.cabal
+++ b/simple-sql-parser.cabal
@@ -31,11 +31,6 @@ Flag parserexe
   Description: Build SimpleSqlParserTool exe
   Default:     False
 
-Flag fixitytest
-  Description: Build fixity test exe
-  Default:     False
-  
-
 common shared-properties
   default-language:    Haskell2010
   build-depends:       base >=4 && <5,
@@ -44,7 +39,8 @@ common shared-properties
                        parsec,
                        mtl >=2.1 && <2.4,
                        prettyprinter >= 1.7 && < 1.8,
-                       text  >= 2.1 && < 2.2
+                       text  >= 2.1 && < 2.2,
+                       containers
 
   ghc-options:         -Wall
 
@@ -56,8 +52,6 @@ library
                        Language.SQL.SimpleSQL.Lex,
                        Language.SQL.SimpleSQL.Syntax,
                        Language.SQL.SimpleSQL.Dialect
-  Other-Modules:       Language.SQL.SimpleSQL.Errors,
-                       Language.SQL.SimpleSQL.Combinators
 
 Test-Suite Tests
   import:              shared-properties
@@ -104,17 +98,3 @@ executable SimpleSqlParserTool
     buildable:         True
   else
     buildable:         False
-
-executable Fixity
-  import:              shared-properties
-  main-is:             Fixity.hs
-  hs-source-dirs:      tools
-  Build-Depends:       simple-sql-parser,
-                       pretty-show >= 1.6 && < 1.10,
-                       tasty >= 1.1 && < 1.6,
-                       tasty-hunit >= 0.9 && < 0.11
-  if flag(fixitytest)
-    buildable:         True
-  else
-    buildable:         False
-
diff --git a/tools/Fixity.hs b/tools/Fixity.hs
deleted file mode 100644
index e1a0d53..0000000
--- a/tools/Fixity.hs
+++ /dev/null
@@ -1,720 +0,0 @@
-
-{-
-= Fixity fixups
-
-The point of this code is to be able to take a table of fixity
-information for unary and binary operators, then adjust an ast to
-match these fixities. The standard way of handling this is handling
-fixities at the parsing stage.
-
-For the SQL parser, this is difficult because there is lots of weird
-syntax for operators (such as prefix and postfix multiple keyword
-operators, between, etc.).
-
-An alterative idea which is used in some places is to parse the tree
-regarding all the operators to have the same precedence and left
-associativity, then correct the fixity in a pass over the ast after
-parsing. Would also like to use this to fix the fixity for the join
-trees, and set operations, after parsing them. TODO: anything else?
-
-
-Approach
-
-Really not sure how to get this correct. So: lots of testing
-
-Basic testing idea: create an expression, then write down manually how
-the expression should parse with correct fixity. Can write down the
-expression in concrete syntax, and the correct fixity version using
-parens.
-
-Then can parse the expression, fix it, parse the fixed expression,
-remove the parens and compare them to make sure they are equal.
-
-Second layer of testing. For each source expression parsed, run it
-through a generator which will generate every version of that tree by
-choosing all possibilities of fixities on a token by token basis. This
-will ensure the fixity fixer is robust. An alternative approach is to
-guarantee the parser will produce trees where all the fixities are
-known (e.g. unary operators always bind tighter than binary, binary
-are all left associative, prefix unary bind tighter than postfix. This
-way, the fix code can make some assumptions and have less code. We
-will stick with the full general version which is more robust.
-
-Another testing approach is to parse the tree with our non fixity
-respecting parser then fix it, and also parse it with a fixity
-respecting expression parser, and check the results are the same. This
-is difficult with the parsec build expression parser which doesn't
-handle nested unary operators, so have to find or write another build
-expression parser. We can test the fixer with simple operators (single
-symbol prefix, postfix and binary ops) and then use it on the complex
-sql ast trees.
-
-Can also try to generate trees ala quickcheck/smallcheck, then check
-them with the fixer and the build expression parser.
-
-generate a tree:
-
-start with a term
-then roll dice:
-  add a prefix
-  add a postfix
-  do nothing
-then roll dice
-  add a binary op
-  for the second arg, recurse the algo
-
-
-algorithm:
-
-consider possible cases:
-binop with two binops args
-binop with prefix on left
-binop with postfix on right
-postfix with prefix inside
-prefix with postfix inside
-postfix with binop inside
-prefix with binop inside
-
-write a function to deal with each case and try to compose
-
-Tasks:
-
-write unary op tests: on each other, and with binary ops
-figure out how to generate trees
-do the step one tests (write the fixity with parens)
-check out parsers expression parser
-see if can generate trees using smallcheck
-try to test these trees against expression parser
-  otherwise, generate tree, generate variations, check fixity always
-produces same result
-
-
-
-
-todo:
-
-1. more tests for unary operators with each other
-2. moving unary operators inside and outside binary operators:
-   have to think about how this will work in general case
-3. ways to generate lots of tests and check them
-   -> what about creating a parser which parses to a list of all possible
-      parses with different fixities for each operator it sees?
-4. ambiguous fixity cases - need position annotation to do these nicely
-5. real sql: how to work with a variety of ast nodes
-6. plug into simple-sql-parser
-7. refactor the simple-sql-parser parsing code
-8. simple-sql-parser todo for sqream: add other dml, dialects,
-   procedural?
-9. testing idea: write big expressions with explicit parens everywhere
-   parse this
-   remove the parens
-   pretty print, then parse and fixfixity to see if same
-   then generate all variations of tree as if the fixities are different
-     and then fixfixity to check it restores the original
-
-
-write fixity tests
-write code to do the fixing
-add error cases: put it in the either monad to report these
-
-check the descend
-then: move to real sql
-  different abstract representations of binops, etc.
-  what is the best way to deal with this? typeclass? conversion to and
-  from a generic tree?
-
-
-
-
-
-can the binops be fixed on their own (precedence and assocativity)
-and then the prefix and postfix ops in separate passes
-
-what about a pass which puts the tree into canonical form:
-all left associative, all unary ops tight as possible?
-then the fixer can be easier?
--}
-
-
-
-
-
-{-# LANGUAGE DeriveDataTypeable,TupleSections #-}
-import Data.Data
-
-import Text.Parsec.String (Parser)
-import Text.Parsec (try)
-import Text.Parsec.Char
-import Text.Parsec.Combinator
-import Text.Parsec (parse,ParseError)
-import Control.Applicative ((<|>),many) -- ((<**>),(<$>),(<*), (*>),(<*>), (<$), (<|>), many)
---import qualified Text.Parsec.String.Expr as E
-import Control.Monad
---import Data.List (intercalate)
-import Data.Maybe ()
---import qualified Test.HUnit as H
---import FunctionsAndTypesForParsing
-import Debug.Trace
-import Text.Show.Pretty
-import Data.List
-import Control.Applicative
-
-import qualified Test.Tasty as T
-import qualified Test.Tasty.HUnit as H
-
-
-data Expr = BinOp Expr String Expr
-          | PrefOp String Expr
-          | PostOp String Expr
-          | Iden String
-          | Lit String
-          | App String [Expr]
-          | Parens Expr
-            deriving (Eq,Show,Data,Typeable)
-
-{-
---------
-
-quick parser
--}
-
-parensValue :: Parser Expr
-parensValue = Parens <$> parens valueExpr
-
-idenApp :: Parser Expr
-idenApp = try $ do
-    i <- identifier
-    guard (i `notElem` ["not", "and", "or", "is"])
-    choice [do
-            args <- parens (commaSep valueExpr)
-            return $ App i args
-           ,return $ Iden i
-           ]
-
-lit :: Parser Expr
-lit = stringLit <|> numLit
-    where
-      stringLit = Lit <$> lexeme (char '\'' *> manyTill anyChar (char '\''))
-      numLit = do
-        x <- lexeme (many1 digit)
-        let y :: Integer
-            y = read x
-        return $ Lit $ show y
-
-prefOp :: Parser Expr
-prefOp = sym <|> kw
-   where
-      sym = do
-            let prefOps = ["+", "-"]
-            s <- choice $ map symbol prefOps
-            v <- term
-            return $ PrefOp s v
-      kw = do
-           let prefOps = ["not"]
-           i <- identifier
-           guard (i `elem` prefOps)
-           v <- term
-           return $ PrefOp i v
-
-postOp :: Parser (Expr -> Expr)
-postOp = try $ do
-    let kws = ["is null"]
-        kwsp = map (\a -> try $ do
-                       let x :: [String]
-                           x = words a
-                       mapM_ keyword_ x
-                       return $ PostOp a
-                       ) kws
-    choice kwsp
-
-binOp :: Parser (Expr -> Expr -> Expr)
-binOp = symbolBinOp <|> kwBinOp
-  where
-    symbolBinOp = do
-                  let binOps = ["+", "-", "*", "/"]
-                  s <- choice $ map symbol binOps
-                  return $ \a b -> BinOp a s b
-    kwBinOp = do
-              let kwBinOps = ["and", "or"]
-              i <- identifier
-              guard (i `elem` kwBinOps)
-              return $ \a b -> BinOp a i b
-
-term :: Parser Expr
-term = (parensValue
-       <|> try prefOp
-       <|> idenApp
-       <|> lit)
-       <??*> postOp
-
--- (<??>) :: Parser a -> Parser (a -> a) -> Parser a
--- p <??> q = p <**> option id q
-
-(<??*>) :: Parser a -> Parser (a -> a) -> Parser a
-p <??*> q = foldr ($) <$> p <*> (reverse <$> many q)
-
-valueExpr :: Parser Expr
-valueExpr = chainl1 term binOp
-
-
-parens :: Parser a -> Parser a
-parens = between openParen closeParen
-
-openParen :: Parser Char
-openParen = lexeme $ char '('
-closeParen :: Parser Char
-closeParen = lexeme $ char ')'
-
-symbol :: String -> Parser String
-symbol s = try $ lexeme $ do
-    u <- many1 (oneOf "<>=+-^%/*!|")
-    guard (s == u)
-    return s
-
-identifier :: Parser String
-identifier = lexeme ((:) <$> firstChar <*> many nonFirstChar)
-  where
-    firstChar = letter <|> char '_'
-    nonFirstChar = digit <|> firstChar
-
-keyword :: String -> Parser String
-keyword k = try $ do
-    i <- identifier
-    guard (i == k)
-    return k
-
-keyword_ :: String -> Parser ()
-keyword_ = void . keyword
-
-whitespace :: Parser ()
-whitespace =
-    choice [simpleWhitespace *> whitespace
-           ,lineComment *> whitespace
-           ,blockComment *> whitespace
-           ,return ()]
-  where
-    lineComment = try (string "--")
-                  *> manyTill anyChar (void (char '\n') <|> eof)
-    blockComment = try (string "/*")
-                   *> manyTill anyChar (try $ string "*/")
-    simpleWhitespace = void $ many1 (oneOf " \t\n")
-lexeme :: Parser a -> Parser a
-lexeme p = p <* whitespace
-comma :: Parser Char
-comma = lexeme $ char ','
-
-commaSep :: Parser a -> Parser [a]
-commaSep = (`sepBy` comma)
-
-parseExpr :: String -> Either ParseError Expr
-parseExpr = parse (whitespace *> valueExpr <* eof) ""
-
--- --------------
-
-data Assoc = AssocLeft | AssocRight | AssocNone deriving (Eq,Show)
-
-type Fixities = [(String, (Int, Assoc))]
-
-fixFixity :: Fixities -> Expr -> Expr
-fixFixity fixities = fixBinOpPrecedence . fixBinOpAssociativity . fixNestedPrefPostPrec
-  where
-    fixBinOpAssociativity e = case e of
-        BinOp a op b ->
-            let a' = fixBinOpAssociativity a
-                b' = fixBinOpAssociativity b
-                def = BinOp a' op b'
-            in case (a',b') of
-                -- both
-                -- a1 op1 a2 op b1 op2 b2
-                (BinOp a1 op1 a2
-                 ,BinOp b1 op2 b2)
-                  | Just (_p,opa) <- lookupFixity op
-                  , Just (_p,op1a) <- lookupFixity op1
-                  , Just (_p,op2a) <- lookupFixity op2
-                  -> case (opa, op1a, op2a) of
-                       (AssocRight, AssocRight, AssocRight) ->
-                           BinOp a1 op1 (BinOp a2 op (BinOp b1 op2 b2))
-                       (AssocLeft, AssocLeft, AssocLeft) ->
-                           BinOp (BinOp (BinOp a1 op1 a2) op b1) op2 b2
-                       --todo: other cases
-                       _ -> def
-                -- just left side
-                (BinOp a1 op1 a2, _)
-                -- a1 op1 a2 op b'
-                  | Just (_p,opa) <- lookupFixity op
-                  , Just (_p,op1a) <- lookupFixity op1
-                  -> case (opa, op1a) of
-                       (AssocRight, AssocRight) ->
-                           BinOp a1 op1 (BinOp a2 op b')
-                       (AssocLeft, AssocLeft) ->
-                           BinOp (BinOp a1 op1 a2) op b'
-                       _ -> def
-
-                -- just right side
-                (_, BinOp b1 op2 b2)
-                -- e op b1 op2 b2
-                  | Just (_p,opa) <- lookupFixity op
-                  , Just (_p,op2a) <- lookupFixity op2
-                  -> case (opa, op2a) of
-                       (AssocRight, AssocRight) ->
-                           BinOp a' op (BinOp b1 op2 b2)
-                       (AssocLeft, AssocLeft) ->
-                           BinOp (BinOp a' op b1) op2 b2
-                       _ -> def
-                _ -> def
-        _ -> e
-
-    fixBinOpPrecedence e = case e of
-        BinOp a op b ->
-            let a' = fixBinOpPrecedence a
-                b' = fixBinOpPrecedence b
-                def = BinOp a' op b'
-            in case (a',b') of
-                -- both
-                -- a1 op1 a2 op b1 op2 b2
-                -- all equal
-                -- p > or < p1 == p2
-                -- p == p1 < or > p2
-                (BinOp a1 op1 a2
-                 ,BinOp b1 op2 b2)
-                  | Just (p,_opa) <- lookupFixity op
-                  , Just (p1,_op1a) <- lookupFixity op1
-                  , Just (p2,_op2a) <- lookupFixity op2
-                  -> case () of
-                        -- _ | trace ("both prec " ++ show (p,p1,p2)) False -> undefined
-                        _ | p == p1 && p1 == p2 -> def
-                        _ | p > p1 && p1 == p2 -> BinOp a1 op1 b'
-                        _ | p < p1 && p1 == p2 -> BinOp (BinOp a1 op1 a2) op b'
-                        _ | p == p1 && p2 > p1 -> BinOp a' op (BinOp b1 op2 b2)
-                        _ | p == p1 && p2 < p1 -> def -- todo
-                        _ | otherwise -> def
-                -- just left side
-                (BinOp a1 op1 a2, _)
-                -- a1 op1 a2 op b'
-                  | Just (p,_opa) <- lookupFixity op
-                  , Just (p1,_op1a) <- lookupFixity op1
-                  -> case () of
-                        --  _ | trace ("left prec " ++ show (p,p1)) False -> undefined
-                        _ | p < p1 -> {-trace "b1" $ -}BinOp (BinOp a1 op1 a2) op b'
-                          | p > p1 -> {-trace "b2" $ -}BinOp a1 op1 (BinOp a2 op b')
-                          | otherwise -> def
-
-                -- just right side
-                (_, BinOp b1 op2 b2)
-                -- a' op b1 op2 b2
-                  | Just (p,_opa) <- lookupFixity op
-                  , Just (p2,_op1a) <- lookupFixity op2
-                  -> case () of
-                        --  _ | trace ("right prec " ++ show (p,p2)) False -> undefined
-                        _ | p > p2 -> {-trace "b1" $ -}BinOp (BinOp a' op b1) op2 b2
-                          | p < p2 -> {-trace "b2" $ -}BinOp a' op (BinOp b1 op2 b2)
-                          | otherwise -> {-trace "def" $ -} def
-                _ -> def
-        _ -> e
-
-    fixNestedPrefPostPrec e = case e of
-        PrefOp op a ->
-            let a' = fixNestedPrefPostPrec a
-            in case a' of
-                 PostOp op1 b | Just (p,_) <- lookupFixity op
-                              , Just (p1,_) <- lookupFixity op1
-                              , p > p1 -> PostOp op1 (PrefOp op b)
-                 _ -> PrefOp op a'
-        PostOp op a ->
-            let a' = fixNestedPrefPostPrec a
-            in case a' of
-                 PrefOp op1 b | Just (p,_) <- lookupFixity op
-                              , Just (p1,_) <- lookupFixity op1
-                              , p > p1 -> PrefOp op1 (PostOp op b)
-                 _ -> PostOp op a'
-        _ -> e
-
-
-
-    lookupFixity :: String -> Maybe (Int,Assoc)
-    lookupFixity s = maybe (trace ("didn't find " ++ s ++ "\n" ++ ppShow fixities) Nothing)
-                     Just $ lookup s fixities
-
-
-sqlFixity :: [(String, (Int, Assoc))]
-sqlFixity = [(".", (13, AssocLeft))
-            ,("[]", (12, AssocNone))
-
-{-
-unary + -
-todo: split the fixity table into prefix, binary and postfix
-
-todo: don't have explicit precedence numbers in the table??
--}
-
-            ,("^", (10, AssocNone))]
-            ++ m ["*", "/", "%"] (9, AssocLeft)
-            ++ m ["+","-"] (8, AssocLeft)
-            ++ m ["<", ">", "=", "<=", ">=", "<>"] (4, AssocNone)
-            ++ [("is null", (3, AssocNone))
-               ,("not", (2, AssocRight))
-               ,("and", (1, AssocLeft))
-               ,("or", (0, AssocLeft))]
-
-  where
-    m l a = map (,a) l
-
-{-
--------
-
-some simple parser tests
--}
-
-data Test = Group String [Test]
-          | ParserTest String Expr
-          | FixityTest Fixities Expr Expr
-
-parserTests :: Test
-parserTests = Group "parserTests" $ map (uncurry ParserTest) $
-    [("a", Iden "a")
-    ,("'test'", Lit "test")
-    ,("34", Lit "34")
-    ,("f()", App "f" [])
-    ,("f(3)", App "f" [Lit "3"])
-    ,("(7)", Parens (Lit "7"))
-    ,("a + 3", BinOp (Iden "a") "+" (Lit "3"))
-    ,("1 + 2 + 3", BinOp (BinOp (Lit "1") "+" (Lit "2")) "+" (Lit "3"))
-
-    ,("a or b", BinOp (Iden "a") "or" (Iden "b"))
-    ,("-1", PrefOp "-" (Lit "1"))
-    ,("not a", PrefOp "not" (Iden "a"))
-    ,("not not a", PrefOp "not" (PrefOp "not" (Iden "a")))
-    ,("a is null", PostOp "is null" (Iden "a"))
-    ,("a is null is null", PostOp "is null" (PostOp "is null" (Iden "a")))
-    ,("-a+3", BinOp (PrefOp "-" (Iden "a")) "+" (Lit "3"))
-    ,("a is null and b is null", BinOp (PostOp "is null" (Iden "a"))
-                                 "and"
-                                 (PostOp "is null" (Iden "b")))
-    ]
-
-makeParserTest :: String -> Expr -> T.TestTree
-makeParserTest s e = H.testCase s $ do
-    let a = parseExpr s
-    if (Right e == a)
-       then putStrLn $ s ++ " OK"
-       else putStrLn $ "bad parse " ++ s ++ " " ++ show a
-
-{-
-------
-
-fixity checks
-
-test cases:
--}
-
-
-fixityTests :: Test
-fixityTests = Group "fixityTests" $
-     map (\(f,s,e) -> FixityTest f s e) $
-     [
-
--- 2 bin ops wrong associativity left + null versions
-
-     (sqlFixity
-     ,i "a" `plus` (i "b" `plus` i "c")
-     ,(i "a" `plus` i "b") `plus` i "c")
-    ,(sqlFixity
-     ,(i "a" `plus` i "b") `plus` i "c"
-     ,(i "a" `plus` i "b") `plus` i "c")
-
--- 2 bin ops wrong associativity right
-
-    ,(timesRight
-     ,i "a" `times` (i "b" `times` i "c")
-     ,i "a" `times` (i "b" `times` i "c"))
-    ,(timesRight
-     ,(i "a" `times` i "b") `times` i "c"
-     ,i "a" `times` (i "b" `times` i "c"))
-
-
--- 2 bin ops wrong precedence left
-
-    ,(sqlFixity
-     ,i "a" `plus` (i "b" `times` i "c")
-     ,i "a" `plus` (i "b" `times` i "c"))
-
-    ,(sqlFixity
-     ,(i "a" `plus` i "b") `times` i "c"
-     ,i "a" `plus` (i "b" `times` i "c"))
-
--- 2 bin ops wrong precedence right
-
-    ,(sqlFixity
-     ,(i "a" `times` i "b") `plus` i "c"
-     ,(i "a" `times` i "b") `plus` i "c")
-
-    ,(sqlFixity
-     ,i "a" `times` (i "b" `plus` i "c")
-     ,(i "a" `times` i "b") `plus` i "c")
-
-{-
-a + b * c + d
-a * b + c * d
-
-check all variations
--}
-
-    ] ++
-      (let t = (i "a" `plus` i "b")
-               `times`
-               (i "c" `plus` i "d")
-           trs = generateTrees $ splitTree t
-       in [(sqlFixity, x
-           ,i "a" `plus` (i "b" `times` i "c")
-            `plus` i "d")
-          | x <- trs])
-      ++
-      (let t = (i "a" `times` i "b")
-               `plus`
-               (i "c" `times` i "d")
-           trs = generateTrees $ splitTree t
-       in [(sqlFixity, x
-           ,(i "a" `times` i "b")
-               `plus`
-               (i "c" `times` i "d"))
-          | x <- trs])
-
-
-    ++ [
-
--- prefix then postfix wrong precedence
-
-     ([("+", (9, AssocNone))
-      ,("is null", (3, AssocNone))]
-     ,PrefOp "+" (PostOp "is null" (i "a"))
-     ,PostOp "is null" (PrefOp "+" (i "a")))
-
-    ,([("+", (9, AssocNone))
-      ,("is null", (3, AssocNone))]
-     ,PostOp "is null" (PrefOp "+" (i "a"))
-     ,PostOp "is null" (PrefOp "+" (i "a")))
-
-    ,([("+", (3, AssocNone))
-      ,("is null", (9, AssocNone))]
-     ,PrefOp "+" (PostOp "is null" (i "a"))
-     ,PrefOp "+" (PostOp "is null" (i "a")))
-
-    ,([("+", (3, AssocNone))
-      ,("is null", (9, AssocNone))]
-     ,PostOp "is null" (PrefOp "+" (i "a"))
-     ,PrefOp "+" (PostOp "is null" (i "a")))
-
-{-
-3-way unary operator movement:
-take a starting point and generate variations
-
-postfix on first arg of binop (cannot move) make sure precedence wants
-  it to move
-
-prefix on second arg of binop (cannot move)
-
-prefix on binop, precedence wrong
-postfix on binop precedence wrong
-prefix on first arg of binop, precedence wrong
-postfix on second arg of binop, precedence wrong
-
-ambiguous fixity tests
-
-sanity check: parens stops rearrangement
-
-check nesting 1 + f(expr)
--}
-
-    ]
-    where
-      plus a b = BinOp a "+" b
-      times a b = BinOp a "*" b
-      i a = Iden a
-      timesRight = [("*", (9, AssocRight))]
-
--- testCase
-
-makeFixityTest :: Fixities -> Expr -> Expr -> T.TestTree
-makeFixityTest fs s e = H.testCase (show s) $ do
-    let s' = fixFixity fs s
-    H.assertEqual "" s' e
-    {-if (s' == e)
-      then putStrLn $ show s ++ " OK"
-      else putStrLn $ "ERROR\nstart: " ++ show s ++ "\nfixed: " ++ show s' ++ "\nshould be: " ++ show e-}
-
-tests :: Test
-tests = Group "Tests" [parserTests, fixityTests]
-
-makeTest :: Test -> T.TestTree
-makeTest (Group n ts) = T.testGroup n $ map makeTest ts
-makeTest (ParserTest s e) = makeParserTest s e
-makeTest (FixityTest f s e) = makeFixityTest f s e
-
-{-
---------
-
- > tests :: T.TestTree
- > tests = T.testGroup "Tests" $ map makeFixityTest fixityTests
--}
-
-main :: IO ()
-main = T.defaultMain $ makeTest tests
-  {-do
-       mapM_ checkTest tests
-       mapM_ checkFixity fixityTests
-       let plus a b = BinOp a "+" b
-           times a b = BinOp a "*" b
-           i a = Iden a
-       let t = (i "a" `plus` i "b")
-               `times`
-               (i "c" `plus` i "d")
-           spl = splitTree t
-           trs = generateTrees spl
-       --putStrLn $ "\nSplit\n"
-       --putStrLn $ ppShow (fst spl, length $ snd spl)
-       --putStrLn $ show $ length trs
-       --putStrLn $ "\nTrees\n"
-       --putStrLn $ intercalate "\n" $ map show trs
-       return ()-}
-
-{-
-generating trees
-
-1. tree -> list
-val op val op val op ...
-(has to be two lists?
-
-generate variations:
-pick numbers from 0 to n - 1 (n is the number of ops)
-choose the op at this position to be the root
-recurse on the two sides
--}
-
-splitTree :: Expr -> ([Expr], [Expr->Expr->Expr])
-splitTree (BinOp a op b) = let (x,y) = splitTree a
-                               (z,w) = splitTree b
-                           in (x++z, y++ [\a b -> BinOp a op b] ++ w)
-splitTree x = ([x],[])
-
-
-
-generateTrees :: ([Expr], [Expr->Expr->Expr]) -> [Expr]
-generateTrees (es,ops) | length es /= length ops + 1 =
-    error $ "mismatch in lengths " ++ show (length es, length ops)
-    ++"\n" ++ ppShow es ++ "\n"
-generateTrees ([a,b], [op]) = [op a b]
-generateTrees ([a], []) = [a]
-generateTrees (vs, ops) =
-    let n = length ops
-    in --trace ("generating " ++ show (length vs, n) ++ "trees\n") $
-       concat $ flip map [0..n-1] $ \m ->
-         let (v1,v2) = splitAt (m + 1) vs
-             (ops1,op':ops2) = splitAt m ops
-             r = [op' t u | t <- generateTrees (v1,ops1)
-                     , u <- generateTrees (v2,ops2)]
-         in -- trace ("generated " ++ show (length r) ++ " trees")
-            r
-generateTrees ([],[]) = []
-
-
-
diff --git a/tools/Language/SQL/SimpleSQL/CreateIndex.hs b/tools/Language/SQL/SimpleSQL/CreateIndex.hs
index 5928707..c8a2e82 100644
--- a/tools/Language/SQL/SimpleSQL/CreateIndex.hs
+++ b/tools/Language/SQL/SimpleSQL/CreateIndex.hs
@@ -1,4 +1,5 @@
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.CreateIndex where
 
 import Language.SQL.SimpleSQL.Syntax
diff --git a/tools/Language/SQL/SimpleSQL/CustomDialect.hs b/tools/Language/SQL/SimpleSQL/CustomDialect.hs
index 8fe28b3..073794d 100644
--- a/tools/Language/SQL/SimpleSQL/CustomDialect.hs
+++ b/tools/Language/SQL/SimpleSQL/CustomDialect.hs
@@ -1,4 +1,5 @@
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.CustomDialect (customDialectTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/EmptyStatement.hs b/tools/Language/SQL/SimpleSQL/EmptyStatement.hs
index 60a722d..72fd2be 100644
--- a/tools/Language/SQL/SimpleSQL/EmptyStatement.hs
+++ b/tools/Language/SQL/SimpleSQL/EmptyStatement.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.EmptyStatement where
 
 import Language.SQL.SimpleSQL.Syntax
diff --git a/tools/Language/SQL/SimpleSQL/FullQueries.hs b/tools/Language/SQL/SimpleSQL/FullQueries.hs
index 9a590ee..1950ee4 100644
--- a/tools/Language/SQL/SimpleSQL/FullQueries.hs
+++ b/tools/Language/SQL/SimpleSQL/FullQueries.hs
@@ -1,6 +1,7 @@
 
 -- Some tests for parsing full queries.
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.FullQueries (fullQueriesTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/GroupBy.hs b/tools/Language/SQL/SimpleSQL/GroupBy.hs
index 72d897c..e91ad43 100644
--- a/tools/Language/SQL/SimpleSQL/GroupBy.hs
+++ b/tools/Language/SQL/SimpleSQL/GroupBy.hs
@@ -1,6 +1,7 @@
 
 -- Here are the tests for the group by component of query exprs
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.GroupBy (groupByTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/MySQL.hs b/tools/Language/SQL/SimpleSQL/MySQL.hs
index ffaf4eb..5777092 100644
--- a/tools/Language/SQL/SimpleSQL/MySQL.hs
+++ b/tools/Language/SQL/SimpleSQL/MySQL.hs
@@ -1,6 +1,7 @@
 
 -- Tests for mysql dialect parsing
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.MySQL (mySQLTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/Odbc.hs b/tools/Language/SQL/SimpleSQL/Odbc.hs
index b791084..3d26eab 100644
--- a/tools/Language/SQL/SimpleSQL/Odbc.hs
+++ b/tools/Language/SQL/SimpleSQL/Odbc.hs
@@ -1,4 +1,5 @@
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.Odbc (odbcTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/Oracle.hs b/tools/Language/SQL/SimpleSQL/Oracle.hs
index 36dbb38..eea2be8 100644
--- a/tools/Language/SQL/SimpleSQL/Oracle.hs
+++ b/tools/Language/SQL/SimpleSQL/Oracle.hs
@@ -1,6 +1,7 @@
 
 -- Tests for oracle dialect parsing
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.Oracle (oracleTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/Postgres.hs b/tools/Language/SQL/SimpleSQL/Postgres.hs
index 509993c..65d7d01 100644
--- a/tools/Language/SQL/SimpleSQL/Postgres.hs
+++ b/tools/Language/SQL/SimpleSQL/Postgres.hs
@@ -5,6 +5,7 @@ all of the postgres specific syntax has been skipped, this can be
 revisited when the dialect support is added.
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.Postgres (postgresTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/QueryExprComponents.hs b/tools/Language/SQL/SimpleSQL/QueryExprComponents.hs
index a9d0c1f..8a558a8 100644
--- a/tools/Language/SQL/SimpleSQL/QueryExprComponents.hs
+++ b/tools/Language/SQL/SimpleSQL/QueryExprComponents.hs
@@ -7,6 +7,7 @@ table refs which are in a separate file.
 These are a few misc tests which don't fit anywhere else.
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.QueryExprComponents (queryExprComponentTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/QueryExprs.hs b/tools/Language/SQL/SimpleSQL/QueryExprs.hs
index b2e23f1..6628619 100644
--- a/tools/Language/SQL/SimpleSQL/QueryExprs.hs
+++ b/tools/Language/SQL/SimpleSQL/QueryExprs.hs
@@ -4,6 +4,7 @@ These are the tests for the queryExprs parsing which parses multiple
 query expressions from one string.
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.QueryExprs (queryExprsTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/SQL2011AccessControl.hs b/tools/Language/SQL/SimpleSQL/SQL2011AccessControl.hs
index 5ebb8c0..9ca9d88 100644
--- a/tools/Language/SQL/SimpleSQL/SQL2011AccessControl.hs
+++ b/tools/Language/SQL/SimpleSQL/SQL2011AccessControl.hs
@@ -6,6 +6,7 @@ grant, etc
 -}
 
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.SQL2011AccessControl (sql2011AccessControlTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/SQL2011Bits.hs b/tools/Language/SQL/SimpleSQL/SQL2011Bits.hs
index 443f232..cf794de 100644
--- a/tools/Language/SQL/SimpleSQL/SQL2011Bits.hs
+++ b/tools/Language/SQL/SimpleSQL/SQL2011Bits.hs
@@ -7,6 +7,7 @@ commit, savepoint, etc.), and session management (set).
 -}
 
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.SQL2011Bits (sql2011BitsTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/SQL2011DataManipulation.hs b/tools/Language/SQL/SimpleSQL/SQL2011DataManipulation.hs
index d4e973a..eab9348 100644
--- a/tools/Language/SQL/SimpleSQL/SQL2011DataManipulation.hs
+++ b/tools/Language/SQL/SimpleSQL/SQL2011DataManipulation.hs
@@ -2,6 +2,7 @@
 -- Section 14 in Foundation
 
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.SQL2011DataManipulation (sql2011DataManipulationTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/SQL2011Queries.hs b/tools/Language/SQL/SimpleSQL/SQL2011Queries.hs
index 03db5cb..f754bc2 100644
--- a/tools/Language/SQL/SimpleSQL/SQL2011Queries.hs
+++ b/tools/Language/SQL/SimpleSQL/SQL2011Queries.hs
@@ -31,10 +31,14 @@ some areas getting more comprehensive coverage tests, and also to note
 which parts aren't currently supported.
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.SQL2011Queries (sql2011QueryTests) where
 import Language.SQL.SimpleSQL.TestTypes
 import Language.SQL.SimpleSQL.Syntax
 
+import qualified Data.Text as T
+import Data.Text (Text)
+
 sql2011QueryTests :: TestItem
 sql2011QueryTests = Group "sql 2011 query tests"
     [literals
@@ -1050,14 +1054,15 @@ new multipliers
 create a list of type name variations:
 -}
 
-typeNames :: ([(String,TypeName)],[(String,TypeName)])
+typeNames :: ([(Text,TypeName)],[(Text,TypeName)])
 typeNames =
     (basicTypes, concatMap makeArray basicTypes
-                 ++ map makeMultiset basicTypes)
+                 <> map makeMultiset basicTypes)
   where
-    makeArray (s,t) = [(s ++ " array", ArrayTypeName t Nothing)
-                      ,(s ++ " array[5]", ArrayTypeName t (Just 5))]
-    makeMultiset (s,t) = (s ++ " multiset", MultisetTypeName t)
+    makeArray (s,t) = [(s <> " array", ArrayTypeName t Nothing)
+                      ,(s <> " array[5]", ArrayTypeName t (Just 5))]
+    makeMultiset (s,t) = (s <> " multiset", MultisetTypeName t)
+    basicTypes :: [(Text, TypeName)]
     basicTypes =
         -- example of every standard type name
         map (\t -> (t,TypeName [Name Nothing t]))
@@ -1102,7 +1107,7 @@ typeNames =
         -- array -- not allowed on own
         -- multiset -- not allowed on own
 
-         ++
+         <>
          [-- 1 single prec + 1 with multiname
           ("char(5)", PrecTypeName [Name Nothing "char"] 5)
          ,("char varying(5)", PrecTypeName [Name Nothing "char varying"] 5)
@@ -1224,12 +1229,12 @@ typeNameTests = Group "type names"
                           $ concatMap makeTests $ snd typeNames]
   where
     makeSimpleTests (ctn, stn) =
-        [(ctn ++ " 'test'", TypedLit stn "test")
+        [(ctn <> " 'test'", TypedLit stn "test")
         ]
     makeCastTests (ctn, stn) =
-        [("cast('test' as " ++ ctn ++ ")", Cast (StringLit "'" "'" "test") stn)
+        [("cast('test' as " <> ctn <> ")", Cast (StringLit "'" "'" "test") stn)
         ]
-    makeTests a = makeSimpleTests a ++ makeCastTests a
+    makeTests a = makeSimpleTests a <> makeCastTests a
 
 
 {-
@@ -3590,7 +3595,7 @@ comparisonPredicates :: TestItem
 comparisonPredicates = Group "comparison predicates"
     $ map (uncurry (TestScalarExpr ansi2011))
     $ map mkOp ["=", "<>", "<", ">", "<=", ">="]
-    ++ [("ROW(a) = ROW(b)"
+    <> [("ROW(a) = ROW(b)"
         ,BinOp (App [Name Nothing "ROW"] [a])
                [Name Nothing "="]
                (App [Name Nothing "ROW"] [b]))
@@ -3600,7 +3605,7 @@ comparisonPredicates = Group "comparison predicates"
            (SpecialOp [Name Nothing "rowctor"] [Iden [Name Nothing "c"], Iden [Name Nothing "d"]]))
     ]
   where
-    mkOp nm = ("a " ++ nm ++ " b"
+    mkOp nm = ("a " <> nm <> " b"
               ,BinOp a [Name Nothing nm] b)
     a = Iden [Name Nothing "a"]
     b = Iden [Name Nothing "b"]
@@ -3911,7 +3916,7 @@ matchPredicate = Group "match predicate"
          {qeSelectList = [(Iden [Name Nothing "a"],Nothing)]
          ,qeFrom = [TRSimple [Name Nothing "t"]]}
     qea = qe {qeSelectList = qeSelectList qe
-                             ++ [(Iden [Name Nothing "b"],Nothing)]}
+                             <> [(Iden [Name Nothing "b"],Nothing)]}
 
 {-
 TODO: simple, partial and full
@@ -4397,7 +4402,7 @@ aggregateFunction = Group "aggregate function"
      ,AggregateApp [Name Nothing "count"]
                    All
                    [Iden [Name Nothing "a"]] [] fil)
-    ] ++ concatMap mkSimpleAgg
+    ] <> concatMap mkSimpleAgg
          ["avg","max","min","sum"
          ,"every", "any", "some"
          ,"stddev_pop","stddev_samp","var_samp","var_pop"
@@ -4405,7 +4410,7 @@ aggregateFunction = Group "aggregate function"
 
 -- bsf
 
-    ++ concatMap mkBsf
+    <> concatMap mkBsf
          ["COVAR_POP","COVAR_SAMP","CORR","REGR_SLOPE"
           ,"REGR_INTERCEPT","REGR_COUNT","REGR_R2"
           ,"REGR_AVGX","REGR_AVGY"
@@ -4413,15 +4418,15 @@ aggregateFunction = Group "aggregate function"
 
 -- osf
 
-    ++
+    <>
     [("rank(a,c) within group (order by b)"
      ,AggregateAppGroup [Name Nothing "rank"]
           [Iden [Name Nothing "a"], Iden [Name Nothing "c"]]
           ob)]
-    ++ map mkGp ["dense_rank","percent_rank"
+    <> map mkGp ["dense_rank","percent_rank"
                 ,"cume_dist", "percentile_cont"
                 ,"percentile_disc"]
-    ++ [("array_agg(a)", App [Name Nothing "array_agg"] [Iden [Name Nothing "a"]])
+    <> [("array_agg(a)", App [Name Nothing "array_agg"] [Iden [Name Nothing "a"]])
        ,("array_agg(a order by z)"
         ,AggregateApp [Name Nothing "array_agg"]
                        SQDefault
@@ -4433,20 +4438,20 @@ aggregateFunction = Group "aggregate function"
   where
     fil = Just $ BinOp (Iden [Name Nothing "something"]) [Name Nothing ">"] (NumLit "5")
     ob = [SortSpec (Iden [Name Nothing "b"]) DirDefault NullsOrderDefault]
-    mkGp nm = (nm ++ "(a) within group (order by b)"
+    mkGp nm = (nm <> "(a) within group (order by b)"
               ,AggregateAppGroup [Name Nothing nm]
                [Iden [Name Nothing "a"]]
                ob)
 
     mkSimpleAgg nm =
-        [(nm ++ "(a)",App [Name Nothing nm] [Iden [Name Nothing "a"]])
-        ,(nm ++ "(distinct a)"
+        [(nm <> "(a)",App [Name Nothing nm] [Iden [Name Nothing "a"]])
+        ,(nm <> "(distinct a)"
          ,AggregateApp [Name Nothing nm]
                        Distinct
                        [Iden [Name Nothing "a"]] [] Nothing)]
     mkBsf nm =
-        [(nm ++ "(a,b)",App [Name Nothing nm] [Iden [Name Nothing "a"],Iden [Name Nothing "b"]])
-        ,(nm ++"(a,b) filter (where something > 5)"
+        [(nm <> "(a,b)",App [Name Nothing nm] [Iden [Name Nothing "a"],Iden [Name Nothing "b"]])
+        ,(nm <> "(a,b) filter (where something > 5)"
           ,AggregateApp [Name Nothing nm]
                         SQDefault
                         [Iden [Name Nothing "a"],Iden [Name Nothing "b"]] [] fil)]
diff --git a/tools/Language/SQL/SimpleSQL/SQL2011Schema.hs b/tools/Language/SQL/SimpleSQL/SQL2011Schema.hs
index f860fa2..363339e 100644
--- a/tools/Language/SQL/SimpleSQL/SQL2011Schema.hs
+++ b/tools/Language/SQL/SimpleSQL/SQL2011Schema.hs
@@ -5,6 +5,7 @@ Section 11 in Foundation
 This module covers the tests for parsing schema and DDL statements.
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.SQL2011Schema (sql2011SchemaTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/ScalarExprs.hs b/tools/Language/SQL/SimpleSQL/ScalarExprs.hs
index 4c371f7..f8cd403 100644
--- a/tools/Language/SQL/SimpleSQL/ScalarExprs.hs
+++ b/tools/Language/SQL/SimpleSQL/ScalarExprs.hs
@@ -1,11 +1,14 @@
 
 -- Tests for parsing scalar expressions
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.ScalarExprs (scalarExprTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
 import Language.SQL.SimpleSQL.Syntax
 
+import qualified Data.Text as T
+
 scalarExprTests :: TestItem
 scalarExprTests = Group "scalarExprTests"
     [literals
@@ -428,5 +431,5 @@ functionsWithReservedNames = Group "functionsWithReservedNames" $ map t
     ,"char_length"
     ]
   where
-    t fn = TestScalarExpr ansi2011 (fn ++ "(a)") $ App [Name Nothing fn] [Iden [Name Nothing "a"]]
+    t fn = TestScalarExpr ansi2011 (fn <> "(a)") $ App [Name Nothing fn] [Iden [Name Nothing "a"]]
 
diff --git a/tools/Language/SQL/SimpleSQL/TableRefs.hs b/tools/Language/SQL/SimpleSQL/TableRefs.hs
index 8aad6f6..ec18c6a 100644
--- a/tools/Language/SQL/SimpleSQL/TableRefs.hs
+++ b/tools/Language/SQL/SimpleSQL/TableRefs.hs
@@ -4,6 +4,7 @@ These are the tests for parsing focusing on the from part of query
 expression
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.TableRefs (tableRefTests) where
 
 import Language.SQL.SimpleSQL.TestTypes
diff --git a/tools/Language/SQL/SimpleSQL/TestTypes.hs b/tools/Language/SQL/SimpleSQL/TestTypes.hs
index 24757ab..f2884aa 100644
--- a/tools/Language/SQL/SimpleSQL/TestTypes.hs
+++ b/tools/Language/SQL/SimpleSQL/TestTypes.hs
@@ -22,11 +22,11 @@ mentioned give a parse error. Not sure if this will be too awkward due
 to lots of tricky exceptions/variationsx.
 -}
 
-data TestItem = Group String [TestItem]
-              | TestScalarExpr Dialect String ScalarExpr
-              | TestQueryExpr Dialect String QueryExpr
-              | TestStatement Dialect String Statement
-              | TestStatements Dialect String [Statement]
+data TestItem = Group Text [TestItem]
+              | TestScalarExpr Dialect Text ScalarExpr
+              | TestQueryExpr Dialect Text QueryExpr
+              | TestStatement Dialect Text Statement
+              | TestStatements Dialect Text [Statement]
 
 {-
 this just checks the sql parses without error, mostly just a
@@ -34,12 +34,12 @@ intermediate when I'm too lazy to write out the parsed AST. These
 should all be TODO to convert to a testqueryexpr test.
 -}
 
-              | ParseQueryExpr Dialect String
+              | ParseQueryExpr Dialect Text
 
 -- check that the string given fails to parse
 
-              | ParseQueryExprFails Dialect String
-              | ParseScalarExprFails Dialect String
+              | ParseQueryExprFails Dialect Text
+              | ParseScalarExprFails Dialect Text
               | LexTest Dialect Text [Token]
-              | LexFails Dialect String
+              | LexFails Dialect Text
                 deriving (Eq,Show)
diff --git a/tools/Language/SQL/SimpleSQL/Tests.hs b/tools/Language/SQL/SimpleSQL/Tests.hs
index 78ec3c6..1f107ad 100644
--- a/tools/Language/SQL/SimpleSQL/Tests.hs
+++ b/tools/Language/SQL/SimpleSQL/Tests.hs
@@ -87,7 +87,7 @@ tests = itemToTest testData
 
 itemToTest :: TestItem -> T.TestTree
 itemToTest (Group nm ts) =
-    T.testGroup nm $ map itemToTest ts
+    T.testGroup (T.unpack nm) $ map itemToTest ts
 itemToTest (TestScalarExpr d str expected) =
     toTest parseScalarExpr prettyScalarExpr d str expected
 itemToTest (TestQueryExpr d str expected) =
@@ -116,65 +116,64 @@ makeLexerTest d s ts = H.testCase (T.unpack s) $ do
     let s' = Lex.prettyTokens d $ ts1
     H.assertEqual "pretty print" s s'
 
-makeLexingFailsTest :: Dialect -> String -> T.TestTree
-makeLexingFailsTest d s = H.testCase s $ do
+makeLexingFailsTest :: Dialect -> Text -> T.TestTree
+makeLexingFailsTest d s = H.testCase (T.unpack s) $ do
     undefined {-case lexSQL d "" Nothing s of
          Right x -> H.assertFailure $ "lexing should have failed: " ++ s ++ "\ngot: " ++ show x
          Left _ -> return ()-}
 
 
 toTest :: (Eq a, Show a) =>
-          (Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
-       -> (Dialect -> a -> String)
+          (Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
+       -> (Dialect -> a -> Text)
        -> Dialect
-       -> String
+       -> Text
        -> a
        -> T.TestTree
-toTest parser pp d str expected = H.testCase str $ do
+toTest parser pp d str expected = H.testCase (T.unpack str) $ do
         let egot = parser d "" Nothing str
         case egot of
-            Left e -> H.assertFailure $ peFormattedError e
+            Left e -> H.assertFailure $ T.unpack $ prettyError e
             Right got -> do
                 H.assertEqual "" expected got
                 let str' = pp d got
                 let egot' = parser d "" Nothing str'
                 case egot' of
                     Left e' -> H.assertFailure $ "pp roundtrip"
-                                                 ++ "\n" ++ str'
-                                                 ++ peFormattedError e'
+                                                 ++ "\n" ++ (T.unpack str')
+                                                 ++ (T.unpack $ prettyError e')
                     Right got' -> H.assertEqual
-                                  ("pp roundtrip" ++ "\n" ++ str')
+                                  ("pp roundtrip" ++ "\n" ++ T.unpack str')
                                    expected got'
 
 toPTest :: (Eq a, Show a) =>
-          (Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
-       -> (Dialect -> a -> String)
+          (Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
+       -> (Dialect -> a -> Text)
        -> Dialect
-       -> String
+       -> Text
        -> T.TestTree
-toPTest parser pp d str = H.testCase str $ do
+toPTest parser pp d str = H.testCase (T.unpack str) $ do
         let egot = parser d "" Nothing str
         case egot of
-            Left e -> H.assertFailure $ peFormattedError e
+            Left e -> H.assertFailure $ T.unpack $ prettyError e
             Right got -> do
                 let str' = pp d got
                 let egot' = parser d "" Nothing str'
                 case egot' of
                     Left e' -> H.assertFailure $ "pp roundtrip "
-                                                 ++ "\n" ++ str' ++ "\n"
-                                                 ++ peFormattedError e'
+                                                 ++ "\n" ++ T.unpack str' ++ "\n"
+                                                 ++ T.unpack (prettyError e')
                     Right _got' -> return ()
 
-
 toFTest :: (Eq a, Show a) =>
-          (Dialect -> String -> Maybe (Int,Int) -> String -> Either ParseError a)
-       -> (Dialect -> a -> String)
+          (Dialect -> Text -> Maybe (Int,Int) -> Text -> Either ParseError a)
+       -> (Dialect -> a -> Text)
        -> Dialect
-       -> String
+       -> Text
        -> T.TestTree
-toFTest parser _pp d str = H.testCase str $ do
+toFTest parser _pp d str = H.testCase (T.unpack str) $ do
         let egot = parser d "" Nothing str
         case egot of
             Left _e -> return ()
             Right _got ->
-              H.assertFailure $ "parse didn't fail: " ++ show d ++ "\n" ++ str
+              H.assertFailure $ "parse didn't fail: " ++ show d ++ "\n" ++ T.unpack str
diff --git a/tools/Language/SQL/SimpleSQL/Tpch.hs b/tools/Language/SQL/SimpleSQL/Tpch.hs
index c00a6be..c5cfb87 100644
--- a/tools/Language/SQL/SimpleSQL/Tpch.hs
+++ b/tools/Language/SQL/SimpleSQL/Tpch.hs
@@ -8,16 +8,19 @@ The changes made to the official syntax are:
    using a common table expression
 -}
 
+{-# LANGUAGE OverloadedStrings #-}
 module Language.SQL.SimpleSQL.Tpch (tpchTests,tpchQueries) where
 
 import Language.SQL.SimpleSQL.TestTypes
 
+import Data.Text (Text)
+
 tpchTests :: TestItem
 tpchTests =
     Group "parse tpch"
     $ map (ParseQueryExpr ansi2011 . snd) tpchQueries
 
-tpchQueries :: [(String,String)]
+tpchQueries :: [(String,Text)]
 tpchQueries =
   [("Q1","\n\
          \select\n\
diff --git a/tools/SimpleSqlParserTool.hs b/tools/SimpleSqlParserTool.hs
index 15e521c..78091b9 100644
--- a/tools/SimpleSqlParserTool.hs
+++ b/tools/SimpleSqlParserTool.hs
@@ -7,20 +7,30 @@ Commands:
 parse: parse sql from file, stdin or from command line
 lex: lex sql same
 indent: parse then pretty print sql
+
+TODO: this is supposed to be a simple example, but it's a total mess
+write some simple helpers so it's all in text?
+
 -}
 
 {-# LANGUAGE TupleSections #-}
-import System.Environment
-import Control.Monad
-import Data.Maybe
-import System.Exit
-import Data.List
-import Text.Show.Pretty
+import System.Environment (getArgs)
+import Control.Monad (forM_, when)
+import Data.Maybe (isJust)
+import System.Exit (exitFailure)
+import Data.List (intercalate)
+import Text.Show.Pretty (ppShow)
 --import Control.Applicative
 
+import qualified Data.Text as T
+
 import Language.SQL.SimpleSQL.Pretty
+    (prettyStatements)
 import Language.SQL.SimpleSQL.Parse
-import Language.SQL.SimpleSQL.Lex
+    (parseStatements
+    ,prettyError)
+import qualified Language.SQL.SimpleSQL.Lex as L
+import Language.SQL.SimpleSQL.Dialect (ansi2011)
 
 
 main :: IO ()
@@ -67,9 +77,9 @@ parseCommand =
   ("parse SQL from file/stdin/command line (use -c to parse from command line)"
   ,\args -> do
       (f,src) <- getInput args
-      either (error . peFormattedError)
+      either (error . T.unpack . prettyError)
           (putStrLn . ppShow)
-          $ parseStatements ansi2011 f Nothing src
+          $ parseStatements ansi2011 (T.pack f) Nothing (T.pack src)
   )
 
 lexCommand :: (String,[String] -> IO ())
@@ -77,9 +87,9 @@ lexCommand =
   ("lex SQL from file/stdin/command line (use -c to parse from command line)"
   ,\args -> do
       (f,src) <- getInput args
-      either (error . peFormattedError)
+      either (error . T.unpack . L.prettyError)
              (putStrLn . intercalate ",\n" . map show)
-             $ lexSQL ansi2011 f Nothing src
+             $ L.lexSQL ansi2011 (T.pack f) Nothing (T.pack src)
   )
 
 
@@ -88,8 +98,8 @@ indentCommand =
   ("parse then pretty print SQL from file/stdin/command line (use -c to parse from command line)"
   ,\args -> do
       (f,src) <- getInput args
-      either (error . peFormattedError)
-          (putStrLn . prettyStatements ansi2011)
-          $ parseStatements ansi2011 f Nothing src
+      either (error . T.unpack . prettyError)
+          (putStrLn . T.unpack . prettyStatements ansi2011)
+          $ parseStatements ansi2011 (T.pack f) Nothing (T.pack src)
 
   )
diff --git a/website/RenderTestCases.hs b/website/RenderTestCases.hs
index 559c391..e53c581 100644
--- a/website/RenderTestCases.hs
+++ b/website/RenderTestCases.hs
@@ -1,43 +1,52 @@
 
 -- Converts the test data to asciidoc
 
+{-# LANGUAGE OverloadedStrings #-}
 import Language.SQL.SimpleSQL.Tests
 import Text.Show.Pretty
 import Control.Monad.State
-import Language.SQL.SimpleSQL.Parse
-import Language.SQL.SimpleSQL.Lex
+import qualified Language.SQL.SimpleSQL.Parse as P
+import qualified Language.SQL.SimpleSQL.Lex as L
 import Data.List
 import Control.Monad (when, unless)
+import Data.Text (Text)
+import qualified Data.Text as T
 
-data TableItem = Heading Int String
-               | Row String String
+import Prelude hiding (putStrLn)
+import Data.Text.IO (putStrLn)
+
+data TableItem = Heading Int Text
+               | Row Text Text
 
 doc :: Int -> TestItem -> [TableItem]
 -- filter out some groups of tests
-doc n (Group nm _) | "generated" `isInfixOf` nm = []
+doc n (Group nm _) | "generated" `T.isInfixOf` nm = []
 doc n (Group nm is) =
     Heading n nm
     : concatMap (doc (n + 1)) is
 doc _ (TestScalarExpr _ str e) =
-    [Row str (ppShow e)]
+    [Row str (T.pack $ ppShow e)]
 doc _ (TestQueryExpr _ str e) =
-    [Row str (ppShow e)]
+    [Row str (T.pack $ ppShow e)]
 doc _ (TestStatement _ str e) =
-    [Row str (ppShow e)]
+    [Row str (T.pack $ ppShow e)]
 doc _ (TestStatements _ str e) =
-    [Row str (ppShow e)]
+    [Row str (T.pack $ ppShow e)]
 doc _ (ParseQueryExpr d str) =
-    [Row str (ppShow $ parseQueryExpr d "" Nothing str)]
+    [Row str (showResult $ P.parseQueryExpr d "" Nothing str)]
 doc _ (ParseQueryExprFails d str) =
-    [Row str (ppShow $ parseQueryExpr d "" Nothing str)]
+    [Row str (showResult $ P.parseQueryExpr d "" Nothing str)]
 doc _ (ParseScalarExprFails d str) =
-    [Row str (ppShow $ parseScalarExpr d "" Nothing str)]
+    [Row str (showResult $ P.parseScalarExpr d "" Nothing str)]
 
 doc _ (LexTest d str t) =
-    [Row str (ppShow $ lexSQL d "" Nothing str)]
+    [Row str (T.pack $ ppShow $ L.lexSQL d "" Nothing str)]
 
 doc _ (LexFails d str) =
-    [Row str (ppShow $ lexSQL d "" Nothing str)]
+    [Row str (T.pack $ ppShow $ L.lexSQL d "" Nothing str)]
+
+showResult :: Show a => Either P.ParseError a -> Text
+showResult = either P.prettyError (T.pack . ppShow)
 
 -- TODO: should put the dialect in the html output
 
@@ -49,20 +58,21 @@ render = go False
         when t $ putStrLn "|==="
         -- slight hack
         when (level > 1) $
-            putStrLn $ "\n" ++ replicate level '=' ++ " " ++ title
+            putStrLn $ "\n" <> T.replicate level "=" <> " " <> title
         go False is
     go t (Row sql hask : is) = do
         unless t $ putStrLn "[cols=\"2\"]\n|==="
-        let sql' = "\n[source,sql]\n----\n" ++ sql ++ "\n----\n"
-            hask' = "\n[source,haskell]\n----\n" ++ hask ++ "\n----\n"
-        putStrLn $ "a| " ++ escapePipe sql'
-                   ++ "a| " ++ escapePipe hask' ++ " "
+        let sql' = "\n[source,sql]\n----\n" <> sql <> "\n----\n"
+            hask' = "\n[source,haskell]\n----\n" <> hask <> "\n----\n"
+        putStrLn $ "a| " <> escapePipe sql'
+                   <> "a| " <> escapePipe hask' <> " "
         go True is
     go t [] = when t $ putStrLn "|==="
-    escapePipe [] = []
-    escapePipe ('\\':'|':xs) = '\\' : '\\' : '\\' : '|' : escapePipe xs
-    escapePipe ('|':xs) = '\\' : '|' : escapePipe xs
-    escapePipe (x:xs) = x : escapePipe xs
+    escapePipe t = T.pack $ escapePipe' $ T.unpack t
+    escapePipe' [] = []
+    escapePipe' ('\\':'|':xs) = '\\' : '\\' : '\\' : '|' : escapePipe' xs
+    escapePipe' ('|':xs) = '\\' : '|' : escapePipe' xs
+    escapePipe' (x:xs) = x : escapePipe' xs
 
 main :: IO ()
 main = do