forked from OSS/graphql
Parse queries with megaparsec
This commit is contained in:
parent
ce169ecef2
commit
5e9bf9648d
@ -3,10 +3,12 @@ module Data.GraphQL where
|
||||
|
||||
import Control.Applicative (Alternative)
|
||||
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
|
||||
import qualified Data.Aeson as Aeson
|
||||
import qualified Data.Attoparsec.Text as Attoparsec
|
||||
import Text.Megaparsec ( errorBundlePretty
|
||||
, parse
|
||||
)
|
||||
|
||||
import Data.GraphQL.Execute
|
||||
import Data.GraphQL.Parser
|
||||
@ -19,7 +21,7 @@ import Data.GraphQL.Error
|
||||
-- executed according to the given 'Schema'.
|
||||
--
|
||||
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
||||
graphql :: (Alternative m, Monad m) => Schema m -> Text -> m Aeson.Value
|
||||
graphql :: (Alternative m, Monad m) => Schema m -> T.Text -> m Aeson.Value
|
||||
graphql = flip graphqlSubs $ const Nothing
|
||||
|
||||
-- | Takes a 'Schema', a variable substitution function and text
|
||||
@ -28,7 +30,7 @@ graphql = flip graphqlSubs $ const Nothing
|
||||
-- query and the query is then executed according to the given 'Schema'.
|
||||
--
|
||||
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
||||
graphqlSubs :: (Alternative m, Monad m) => Schema m -> Subs -> Text -> m Aeson.Value
|
||||
graphqlSubs :: (Alternative m, Monad m) => Schema m -> Subs -> T.Text -> m Aeson.Value
|
||||
graphqlSubs schema f =
|
||||
either parseError (execute schema f)
|
||||
. Attoparsec.parseOnly document
|
||||
either (parseError . errorBundlePretty) (execute schema f)
|
||||
. parse document ""
|
||||
|
@ -1,4 +1,5 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE TupleSections #-}
|
||||
module Data.GraphQL.Error (
|
||||
parseError,
|
||||
CollectErrsT,
|
||||
@ -31,7 +32,7 @@ joinErrs = fmap $ fmap fst &&& concatMap snd
|
||||
|
||||
-- | Wraps the given 'Applicative' to handle errors
|
||||
errWrap :: Functor f => f a -> f (a, [Aeson.Value])
|
||||
errWrap = fmap (flip (,) [])
|
||||
errWrap = fmap (, [])
|
||||
|
||||
-- | Adds an error to the list of errors.
|
||||
addErr :: Functor f => Aeson.Value -> CollectErrsT f a -> CollectErrsT f a
|
||||
|
@ -1,50 +1,22 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
-- | This module defines a parser for @GraphQL@ request documents.
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module Data.GraphQL.Parser where
|
||||
|
||||
import Prelude hiding (takeWhile)
|
||||
|
||||
import Control.Applicative ((<|>), Alternative, empty, many, optional)
|
||||
import Control.Monad (when)
|
||||
import Data.Char (isDigit, isSpace)
|
||||
import Data.Foldable (traverse_)
|
||||
import Data.Monoid ((<>))
|
||||
import Data.List.NonEmpty (NonEmpty((:|)))
|
||||
import Data.Scientific (floatingOrInteger, scientific, toBoundedInteger)
|
||||
|
||||
import Data.Text (Text, append)
|
||||
import Data.Attoparsec.Combinator (lookAhead)
|
||||
import Data.Attoparsec.Text
|
||||
( Parser
|
||||
, (<?>)
|
||||
, anyChar
|
||||
, endOfLine
|
||||
, inClass
|
||||
, many1
|
||||
, manyTill
|
||||
, option
|
||||
, peekChar
|
||||
, takeWhile
|
||||
, takeWhile1
|
||||
import Control.Applicative ( Alternative(..)
|
||||
, optional
|
||||
)
|
||||
import qualified Data.Attoparsec.Text as Attoparsec (scientific)
|
||||
|
||||
import Data.GraphQL.AST
|
||||
|
||||
-- * Name
|
||||
|
||||
name :: Parser Name
|
||||
name = tok $ append <$> takeWhile1 isA_z
|
||||
<*> takeWhile ((||) <$> isDigit <*> isA_z)
|
||||
where
|
||||
-- `isAlpha` handles many more Unicode Chars
|
||||
isA_z = inClass $ '_' : ['A'..'Z'] <> ['a'..'z']
|
||||
|
||||
-- * Document
|
||||
import Language.GraphQL.Lexer
|
||||
import Data.List.NonEmpty (NonEmpty(..))
|
||||
import qualified Data.Text as T
|
||||
import Text.Megaparsec ( lookAhead
|
||||
, option
|
||||
, try
|
||||
, (<?>)
|
||||
)
|
||||
|
||||
document :: Parser Document
|
||||
document = whiteSpace *> manyNE definition
|
||||
document = spaceConsumer >> lexeme (manyNE definition)
|
||||
|
||||
definition :: Parser Definition
|
||||
definition = DefinitionOperation <$> operationDefinition
|
||||
@ -61,8 +33,8 @@ operationDefinition = OperationSelectionSet <$> selectionSet
|
||||
<?> "operationDefinition error"
|
||||
|
||||
operationType :: Parser OperationType
|
||||
operationType = Query <$ tok "query"
|
||||
<|> Mutation <$ tok "mutation"
|
||||
operationType = Query <$ symbol "query"
|
||||
<|> Mutation <$ symbol "mutation"
|
||||
<?> "operationType error"
|
||||
|
||||
-- * SelectionSet
|
||||
@ -71,11 +43,11 @@ selectionSet :: Parser SelectionSet
|
||||
selectionSet = braces $ manyNE selection
|
||||
|
||||
selectionSetOpt :: Parser SelectionSetOpt
|
||||
selectionSetOpt = braces $ many1 selection
|
||||
selectionSetOpt = braces $ some selection
|
||||
|
||||
selection :: Parser Selection
|
||||
selection = SelectionField <$> field
|
||||
<|> SelectionFragmentSpread <$> fragmentSpread
|
||||
<|> try (SelectionFragmentSpread <$> fragmentSpread)
|
||||
<|> SelectionInlineFragment <$> inlineFragment
|
||||
<?> "selection error!"
|
||||
|
||||
@ -89,146 +61,116 @@ field = Field <$> optional alias
|
||||
<*> opt selectionSetOpt
|
||||
|
||||
alias :: Parser Alias
|
||||
alias = name <* tok ":"
|
||||
alias = try $ name <* colon
|
||||
|
||||
-- * Arguments
|
||||
|
||||
arguments :: Parser Arguments
|
||||
arguments = parens $ many1 argument
|
||||
arguments = parens $ some argument
|
||||
|
||||
argument :: Parser Argument
|
||||
argument = Argument <$> name <* tok ":" <*> value
|
||||
argument = Argument <$> name <* colon <*> value
|
||||
|
||||
-- * Fragments
|
||||
|
||||
fragmentSpread :: Parser FragmentSpread
|
||||
fragmentSpread = FragmentSpread <$ tok "..."
|
||||
fragmentSpread = FragmentSpread <$ spread
|
||||
<*> fragmentName
|
||||
<*> opt directives
|
||||
|
||||
inlineFragment :: Parser InlineFragment
|
||||
inlineFragment = InlineFragment <$ tok "..."
|
||||
inlineFragment = InlineFragment <$ spread
|
||||
<*> optional typeCondition
|
||||
<*> opt directives
|
||||
<*> selectionSet
|
||||
|
||||
fragmentDefinition :: Parser FragmentDefinition
|
||||
fragmentDefinition = FragmentDefinition
|
||||
<$ tok "fragment"
|
||||
<$ symbol "fragment"
|
||||
<*> name
|
||||
<*> typeCondition
|
||||
<*> opt directives
|
||||
<*> selectionSet
|
||||
|
||||
fragmentName :: Parser FragmentName
|
||||
fragmentName = but (tok "on") *> name
|
||||
fragmentName = but (symbol "on") *> name
|
||||
|
||||
typeCondition :: Parser TypeCondition
|
||||
typeCondition = tok "on" *> name
|
||||
typeCondition = symbol "on" *> name
|
||||
|
||||
-- * Input Values
|
||||
|
||||
value :: Parser Value
|
||||
value = ValueVariable <$> variable
|
||||
<|> tok floatOrInt32Value
|
||||
<|> ValueFloat <$> try float
|
||||
<|> ValueInt <$> integer
|
||||
<|> ValueBoolean <$> booleanValue
|
||||
<|> ValueNull <$ tok "null"
|
||||
<|> ValueString <$> stringValue
|
||||
<|> ValueEnum <$> enumValue
|
||||
<|> ValueNull <$ symbol "null"
|
||||
<|> ValueString <$> string
|
||||
<|> ValueString <$> blockString
|
||||
<|> ValueEnum <$> try enumValue
|
||||
<|> ValueList <$> listValue
|
||||
<|> ValueObject <$> objectValue
|
||||
<?> "value error!"
|
||||
where
|
||||
booleanValue :: Parser Bool
|
||||
booleanValue = True <$ tok "true"
|
||||
<|> False <$ tok "false"
|
||||
|
||||
floatOrInt32Value :: Parser Value
|
||||
floatOrInt32Value =
|
||||
Attoparsec.scientific >>=
|
||||
either (pure . ValueFloat)
|
||||
(maybe (fail "Integer value is out of range.")
|
||||
(pure . ValueInt)
|
||||
. toBoundedInteger . (`scientific` 0))
|
||||
. floatingOrInteger
|
||||
|
||||
-- TODO: Escape characters. Look at `jsstring_` in aeson package.
|
||||
stringValue :: Parser Text
|
||||
stringValue = quotes (takeWhile (/= '"'))
|
||||
booleanValue = True <$ symbol "true"
|
||||
<|> False <$ symbol "false"
|
||||
|
||||
enumValue :: Parser Name
|
||||
enumValue = but (tok "true") *> but (tok "false") *> but (tok "null") *> name
|
||||
enumValue = but (symbol "true") *> but (symbol "false") *> but (symbol "null") *> name
|
||||
|
||||
listValue :: Parser [Value]
|
||||
listValue = brackets $ many1 value
|
||||
listValue = brackets $ some value
|
||||
|
||||
objectValue :: Parser [ObjectField]
|
||||
objectValue = braces $ many1 objectField
|
||||
objectValue = braces $ some objectField
|
||||
|
||||
objectField :: Parser ObjectField
|
||||
objectField = ObjectField <$> name <* tok ":" <*> value
|
||||
objectField = ObjectField <$> name <* symbol ":" <*> value
|
||||
|
||||
-- * Variables
|
||||
|
||||
variableDefinitions :: Parser VariableDefinitions
|
||||
variableDefinitions = parens $ many1 variableDefinition
|
||||
variableDefinitions = parens $ some variableDefinition
|
||||
|
||||
variableDefinition :: Parser VariableDefinition
|
||||
variableDefinition = VariableDefinition <$> variable
|
||||
<* tok ":"
|
||||
<* colon
|
||||
<*> type_
|
||||
<*> optional defaultValue
|
||||
|
||||
variable :: Parser Variable
|
||||
variable = tok "$" *> name
|
||||
variable = dollar *> name
|
||||
|
||||
defaultValue :: Parser DefaultValue
|
||||
defaultValue = tok "=" *> value
|
||||
defaultValue = equals *> value
|
||||
|
||||
-- * Input Types
|
||||
|
||||
type_ :: Parser Type
|
||||
type_ = TypeNamed <$> name <* but "!"
|
||||
type_ = try (TypeNamed <$> name <* but "!")
|
||||
<|> TypeList <$> brackets type_
|
||||
<|> TypeNonNull <$> nonNullType
|
||||
<?> "type_ error!"
|
||||
|
||||
nonNullType :: Parser NonNullType
|
||||
nonNullType = NonNullTypeNamed <$> name <* tok "!"
|
||||
<|> NonNullTypeList <$> brackets type_ <* tok "!"
|
||||
nonNullType = NonNullTypeNamed <$> name <* bang
|
||||
<|> NonNullTypeList <$> brackets type_ <* bang
|
||||
<?> "nonNullType error!"
|
||||
|
||||
-- * Directives
|
||||
|
||||
directives :: Parser Directives
|
||||
directives = many1 directive
|
||||
directives = some directive
|
||||
|
||||
directive :: Parser Directive
|
||||
directive = Directive
|
||||
<$ tok "@"
|
||||
<$ at
|
||||
<*> name
|
||||
<*> opt arguments
|
||||
|
||||
-- * Internal
|
||||
|
||||
tok :: Parser a -> Parser a
|
||||
tok p = p <* whiteSpace
|
||||
|
||||
parens :: Parser a -> Parser a
|
||||
parens = between "(" ")"
|
||||
|
||||
braces :: Parser a -> Parser a
|
||||
braces = between "{" "}"
|
||||
|
||||
quotes :: Parser a -> Parser a
|
||||
quotes = between "\"" "\""
|
||||
|
||||
brackets :: Parser a -> Parser a
|
||||
brackets = between "[" "]"
|
||||
|
||||
between :: Parser Text -> Parser Text -> Parser a -> Parser a
|
||||
between open close p = tok open *> p <* tok close
|
||||
|
||||
opt :: Monoid a => Parser a -> Parser a
|
||||
opt = option mempty
|
||||
|
||||
@ -240,9 +182,3 @@ but pn = False <$ lookAhead pn <|> pure True >>= \case
|
||||
|
||||
manyNE :: Alternative f => f a -> f (NonEmpty a)
|
||||
manyNE p = (:|) <$> p <*> many p
|
||||
|
||||
whiteSpace :: Parser ()
|
||||
whiteSpace = peekChar >>= traverse_ (\c ->
|
||||
if isSpace c || c == ','
|
||||
then anyChar *> whiteSpace
|
||||
else when (c == '#') $ manyTill anyChar endOfLine *> whiteSpace)
|
||||
|
218
Language/GraphQL/Lexer.hs
Normal file
218
Language/GraphQL/Lexer.hs
Normal file
@ -0,0 +1,218 @@
|
||||
{-# LANGUAGE ExplicitForAll #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module Language.GraphQL.Lexer
|
||||
( Parser
|
||||
, amp
|
||||
, at
|
||||
, bang
|
||||
, blockString
|
||||
, braces
|
||||
, brackets
|
||||
, colon
|
||||
, dollar
|
||||
, comment
|
||||
, equals
|
||||
, integer
|
||||
, float
|
||||
, lexeme
|
||||
, name
|
||||
, parens
|
||||
, pipe
|
||||
, spaceConsumer
|
||||
, spread
|
||||
, string
|
||||
, symbol
|
||||
) where
|
||||
|
||||
import Control.Applicative ( Alternative(..)
|
||||
, liftA2
|
||||
)
|
||||
import Data.Char ( chr
|
||||
, digitToInt
|
||||
, isAsciiLower
|
||||
, isAsciiUpper
|
||||
, ord
|
||||
)
|
||||
import Data.Foldable (foldl')
|
||||
import Data.List (dropWhileEnd)
|
||||
import Data.Proxy (Proxy(..))
|
||||
import Data.Void (Void)
|
||||
import Text.Megaparsec ( Parsec
|
||||
, MonadParsec
|
||||
, Token
|
||||
, between
|
||||
, chunk
|
||||
, chunkToTokens
|
||||
, lookAhead
|
||||
, notFollowedBy
|
||||
, oneOf
|
||||
, option
|
||||
, satisfy
|
||||
, sepBy
|
||||
, skipSome
|
||||
, takeP
|
||||
, takeWhile1P
|
||||
, try
|
||||
)
|
||||
import Text.Megaparsec.Char ( char
|
||||
, digitChar
|
||||
, space1
|
||||
)
|
||||
import qualified Text.Megaparsec.Char.Lexer as Lexer
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text.Lazy as TL
|
||||
|
||||
-- | Standard parser.
|
||||
-- Accepts the type of the parsed token.
|
||||
type Parser = Parsec Void T.Text
|
||||
|
||||
ignoredCharacters :: Parser ()
|
||||
ignoredCharacters = space1 <|> skipSome (char ',')
|
||||
|
||||
spaceConsumer :: Parser ()
|
||||
spaceConsumer = Lexer.space ignoredCharacters comment empty
|
||||
|
||||
-- | Parser for comments.
|
||||
comment :: Parser ()
|
||||
comment = Lexer.skipLineComment "#"
|
||||
|
||||
lexeme :: forall a. Parser a -> Parser a
|
||||
lexeme = Lexer.lexeme spaceConsumer
|
||||
|
||||
symbol :: T.Text -> Parser T.Text
|
||||
symbol = Lexer.symbol spaceConsumer
|
||||
|
||||
-- | Parser for "!".
|
||||
bang :: Parser Char
|
||||
bang = char '!'
|
||||
|
||||
-- | Parser for "$".
|
||||
dollar :: Parser Char
|
||||
dollar = char '$'
|
||||
|
||||
-- | Parser for "@".
|
||||
at :: Parser Char
|
||||
at = char '@'
|
||||
|
||||
-- | Parser for "&".
|
||||
amp :: Parser T.Text
|
||||
amp = symbol "&"
|
||||
|
||||
-- | Parser for ":".
|
||||
colon :: Parser T.Text
|
||||
colon = symbol ":"
|
||||
|
||||
-- | Parser for "=".
|
||||
equals :: Parser T.Text
|
||||
equals = symbol "="
|
||||
|
||||
-- | Parser for the spread operator (...).
|
||||
spread :: Parser T.Text
|
||||
spread = symbol "..."
|
||||
|
||||
-- | Parser for "|".
|
||||
pipe :: Parser T.Text
|
||||
pipe = symbol "|"
|
||||
|
||||
-- | Parser for an expression between "(" and ")".
|
||||
parens :: forall a. Parser a -> Parser a
|
||||
parens = between (symbol "(") (symbol ")")
|
||||
|
||||
-- | Parser for an expression between "[" and "]".
|
||||
brackets :: forall a. Parser a -> Parser a
|
||||
brackets = between (symbol "[") (symbol "]")
|
||||
|
||||
-- | Parser for an expression between "{" and "}".
|
||||
braces :: forall a. Parser a -> Parser a
|
||||
braces = between (symbol "{") (symbol "}")
|
||||
|
||||
-- | Parser for strings.
|
||||
string :: Parser T.Text
|
||||
string = between "\"" "\"" stringValue
|
||||
where
|
||||
stringValue = T.pack <$> many stringCharacter
|
||||
stringCharacter = satisfy isStringCharacter1
|
||||
<|> escapeSequence
|
||||
isStringCharacter1 = liftA2 (&&) isSourceCharacter isChunkDelimiter
|
||||
|
||||
-- | Parser for block strings.
|
||||
blockString :: Parser T.Text
|
||||
blockString = between "\"\"\"" "\"\"\"" stringValue
|
||||
where
|
||||
stringValue = do
|
||||
byLine <- sepBy (many blockStringCharacter) lineTerminator
|
||||
let indentSize = foldr countIndent 0 $ tail byLine
|
||||
withoutIndent = head byLine : (removeIndent indentSize <$> tail byLine)
|
||||
withoutEmptyLines = liftA2 (.) dropWhile dropWhileEnd removeEmptyLine withoutIndent
|
||||
|
||||
return $ T.intercalate "\n" $ T.concat <$> withoutEmptyLines
|
||||
removeEmptyLine [] = True
|
||||
removeEmptyLine [x] = T.null x || isWhiteSpace (T.head x)
|
||||
removeEmptyLine _ = False
|
||||
blockStringCharacter
|
||||
= takeWhile1P Nothing isWhiteSpace
|
||||
<|> takeWhile1P Nothing isBlockStringCharacter1
|
||||
<|> escapeTripleQuote
|
||||
<|> try (chunk "\"" <* notFollowedBy (chunk "\"\""))
|
||||
escapeTripleQuote = chunk "\\" >>= flip option (chunk "\"\"")
|
||||
isBlockStringCharacter1 = liftA2 (&&) isSourceCharacter isChunkDelimiter
|
||||
countIndent [] acc = acc
|
||||
countIndent (x:_) acc
|
||||
| T.null x = acc
|
||||
| not (isWhiteSpace $ T.head x) = acc
|
||||
| acc == 0 = T.length x
|
||||
| otherwise = min acc $ T.length x
|
||||
removeIndent n [] = []
|
||||
removeIndent n (x:chunks) = T.drop n x : chunks
|
||||
|
||||
-- | Parser for integers.
|
||||
integer :: Integral a => Parser a
|
||||
integer = Lexer.signed (pure ()) $ lexeme Lexer.decimal
|
||||
|
||||
-- | Parser for floating-point numbers.
|
||||
float :: Parser Double
|
||||
float = Lexer.signed (pure ()) $ lexeme Lexer.float
|
||||
|
||||
-- | Parser for names (/[_A-Za-z][_0-9A-Za-z]*/).
|
||||
name :: Parser T.Text
|
||||
name = do
|
||||
firstLetter <- nameFirstLetter
|
||||
rest <- many $ nameFirstLetter <|> digitChar
|
||||
_ <- spaceConsumer
|
||||
return $ TL.toStrict $ TL.cons firstLetter $ TL.pack rest
|
||||
where
|
||||
nameFirstLetter = satisfy isAsciiUpper <|> satisfy isAsciiLower <|> char '_'
|
||||
|
||||
isChunkDelimiter :: Char -> Bool
|
||||
isChunkDelimiter = flip notElem ['"', '\\', '\n', '\r']
|
||||
|
||||
isWhiteSpace :: Char -> Bool
|
||||
isWhiteSpace = liftA2 (||) (== ' ') (== '\t')
|
||||
|
||||
lineTerminator :: Parser T.Text
|
||||
lineTerminator = chunk "\r\n" <|> chunk "\n" <|> chunk "\r"
|
||||
|
||||
isSourceCharacter :: Char -> Bool
|
||||
isSourceCharacter = isSourceCharacter' . ord
|
||||
where
|
||||
isSourceCharacter' code = code >= 0x0020
|
||||
|| code == 0x0009
|
||||
|| code == 0x000a
|
||||
|| code == 0x000d
|
||||
|
||||
escapeSequence :: Parser Char
|
||||
escapeSequence = do
|
||||
_ <- char '\\'
|
||||
escaped <- oneOf ['"', '\\', '/', 'b', 'f', 'n', 'r', 't', 'u']
|
||||
case escaped of
|
||||
'b' -> return '\b'
|
||||
'f' -> return '\f'
|
||||
'n' -> return '\n'
|
||||
'r' -> return '\r'
|
||||
't' -> return '\t'
|
||||
'u' -> chr . foldl' step 0
|
||||
. chunkToTokens (Proxy :: Proxy T.Text)
|
||||
<$> takeP Nothing 4
|
||||
_ -> return escaped
|
||||
where
|
||||
step accumulator = (accumulator * 16 +) . digitToInt
|
@ -4,7 +4,7 @@ cabal-version: 1.12
|
||||
--
|
||||
-- see: https://github.com/sol/hpack
|
||||
--
|
||||
-- hash: 5e8ecb58b182478576a725b6da5466c8e71db7dda7735397006e2b14406ee3ad
|
||||
-- hash: 06d3fa29e37864ef5e4254215c50d95942b4a33b0ea4f4d4c930a071fdcd2872
|
||||
|
||||
name: graphql
|
||||
version: 0.3
|
||||
@ -46,16 +46,16 @@ library
|
||||
Data.GraphQL.Execute
|
||||
Data.GraphQL.Parser
|
||||
Data.GraphQL.Schema
|
||||
Language.GraphQL.Lexer
|
||||
other-modules:
|
||||
Paths_graphql
|
||||
hs-source-dirs:
|
||||
./.
|
||||
build-depends:
|
||||
aeson
|
||||
, attoparsec
|
||||
, base >=4.7 && <5
|
||||
, megaparsec
|
||||
, scientific
|
||||
, semigroups
|
||||
, text
|
||||
, unordered-containers
|
||||
default-language: Haskell2010
|
||||
@ -64,6 +64,7 @@ test-suite tasty
|
||||
type: exitcode-stdio-1.0
|
||||
main-is: tasty.hs
|
||||
other-modules:
|
||||
Language.GraphQL.LexerTest
|
||||
Test.StarWars.Data
|
||||
Test.StarWars.QueryTests
|
||||
Test.StarWars.Schema
|
||||
@ -73,11 +74,10 @@ test-suite tasty
|
||||
ghc-options: -threaded -rtsopts -with-rtsopts=-N
|
||||
build-depends:
|
||||
aeson
|
||||
, attoparsec
|
||||
, base >=4.7 && <5
|
||||
, graphql
|
||||
, megaparsec
|
||||
, raw-strings-qq
|
||||
, semigroups
|
||||
, tasty
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -27,9 +27,8 @@ data-files:
|
||||
|
||||
dependencies:
|
||||
- aeson
|
||||
- attoparsec
|
||||
- base >= 4.7 && < 5
|
||||
- semigroups
|
||||
- megaparsec
|
||||
- text
|
||||
- unordered-containers
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
resolver: lts-13.25
|
||||
resolver: lts-13.26
|
||||
packages:
|
||||
- '.'
|
||||
extra-deps: []
|
||||
|
103
tests/Language/GraphQL/LexerTest.hs
Normal file
103
tests/Language/GraphQL/LexerTest.hs
Normal file
@ -0,0 +1,103 @@
|
||||
{-# LANGUAGE ExplicitForAll #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
module Language.GraphQL.LexerTest
|
||||
( implementation
|
||||
, reference
|
||||
) where
|
||||
|
||||
import Control.Applicative (Alternative(..))
|
||||
import Language.GraphQL.Lexer
|
||||
import qualified Data.Text as T
|
||||
import Data.Void (Void)
|
||||
import Test.Tasty ( TestTree
|
||||
, testGroup
|
||||
)
|
||||
import Test.Tasty.HUnit ( testCase
|
||||
, (@?=)
|
||||
)
|
||||
import Text.Megaparsec ( ParseErrorBundle
|
||||
, parse
|
||||
)
|
||||
import Text.RawString.QQ (r)
|
||||
|
||||
reference :: TestTree
|
||||
reference = testGroup "Lexer"
|
||||
[ testCase "lexes strings" $ do
|
||||
runParser string [r|"simple"|] @?= Right "simple"
|
||||
runParser string [r|" white space "|] @?= Right " white space "
|
||||
runParser string [r|"quote \""|] @?= Right [r|quote "|]
|
||||
runParser string [r|"escaped \n"|] @?= Right "escaped \n"
|
||||
runParser string [r|"slashes \\ \/"|] @?= Right [r|slashes \ /|]
|
||||
runParser string [r|"unicode \u1234\u5678\u90AB\uCDEF"|]
|
||||
@?= Right "unicode ሴ噸邫췯"
|
||||
|
||||
, testCase "lexes block string" $ do
|
||||
runParser blockString [r|"""simple"""|] @?= Right "simple"
|
||||
runParser blockString [r|""" white space """|]
|
||||
@?= Right " white space "
|
||||
runParser blockString [r|"""contains " quote"""|]
|
||||
@?= Right [r|contains " quote|]
|
||||
runParser blockString [r|"""contains \""" triplequote"""|]
|
||||
@?= Right [r|contains """ triplequote|]
|
||||
runParser blockString "\"\"\"multi\nline\"\"\"" @?= Right "multi\nline"
|
||||
runParser blockString "\"\"\"multi\rline\r\nnormalized\"\"\""
|
||||
@?= Right "multi\nline\nnormalized"
|
||||
runParser blockString "\"\"\"multi\rline\r\nnormalized\"\"\""
|
||||
@?= Right "multi\nline\nnormalized"
|
||||
runParser blockString [r|"""unescaped \n\r\b\t\f\u1234"""|]
|
||||
@?= Right [r|unescaped \n\r\b\t\f\u1234|]
|
||||
runParser blockString [r|"""slashes \\ \/"""|]
|
||||
@?= Right [r|slashes \\ \/|]
|
||||
runParser blockString [r|"""
|
||||
|
||||
spans
|
||||
multiple
|
||||
lines
|
||||
|
||||
"""|] @?= Right "spans\n multiple\n lines"
|
||||
|
||||
, testCase "lexes numbers" $ do
|
||||
runParser integer "4" @?= Right 4
|
||||
runParser float "4.123" @?= Right 4.123
|
||||
runParser integer "-4" @?= Right (-4)
|
||||
runParser integer "9" @?= Right 9
|
||||
runParser integer "0" @?= Right 0
|
||||
runParser float "-4.123" @?= Right (-4.123)
|
||||
runParser float "0.123" @?= Right 0.123
|
||||
runParser float "123e4" @?= Right 123e4
|
||||
runParser float "123E4" @?= Right 123E4
|
||||
runParser float "123e-4" @?= Right 123e-4
|
||||
runParser float "123e+4" @?= Right 123e+4
|
||||
runParser float "-1.123e4" @?= Right (-1.123e4)
|
||||
runParser float "-1.123E4" @?= Right (-1.123E4)
|
||||
runParser float "-1.123e-4" @?= Right (-1.123e-4)
|
||||
runParser float "-1.123e+4" @?= Right (-1.123e+4)
|
||||
runParser float "-1.123e4567" @?= Right (-1.123e4567)
|
||||
|
||||
, testCase "lexes punctuation" $ do
|
||||
runParser bang "!" @?= Right '!'
|
||||
runParser dollar "$" @?= Right '$'
|
||||
runBetween parens "()" @?= Right ()
|
||||
runParser spread "..." @?= Right "..."
|
||||
runParser colon ":" @?= Right ":"
|
||||
runParser equals "=" @?= Right "="
|
||||
runParser at "@" @?= Right '@'
|
||||
runBetween brackets "[]" @?= Right ()
|
||||
runBetween braces "{}" @?= Right ()
|
||||
runParser pipe "|" @?= Right "|"
|
||||
]
|
||||
|
||||
implementation :: TestTree
|
||||
implementation = testGroup "Lexer"
|
||||
[ testCase "lexes empty block strings" $
|
||||
runParser blockString [r|""""""|] @?= Right ""
|
||||
, testCase "lexes ampersand" $
|
||||
runParser amp "&" @?= Right "&"
|
||||
]
|
||||
|
||||
runParser :: forall a. Parser a -> T.Text -> Either (ParseErrorBundle T.Text Void) a
|
||||
runParser = flip parse ""
|
||||
|
||||
runBetween :: (Parser () -> Parser ()) -> T.Text -> Either (ParseErrorBundle T.Text Void) ()
|
||||
runBetween parser = parse (parser $ pure ()) ""
|
@ -1,32 +1,37 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module Main where
|
||||
|
||||
#if !MIN_VERSION_base(4,8,0)
|
||||
import Control.Applicative ((<$>), (<*>))
|
||||
#endif
|
||||
|
||||
import Data.Attoparsec.Text (parseOnly)
|
||||
import qualified Data.Text.IO as Text
|
||||
import Test.Tasty (TestTree, defaultMain, testGroup)
|
||||
import Test.Tasty.HUnit
|
||||
|
||||
import qualified Data.GraphQL.Parser as Parser
|
||||
import Control.Monad.IO.Class (liftIO)
|
||||
import qualified Data.GraphQL.Encoder as Encoder
|
||||
|
||||
import qualified Test.StarWars.QueryTests as SW
|
||||
import qualified Language.GraphQL.LexerTest as LexerTest
|
||||
import qualified Data.GraphQL.Parser as Parser
|
||||
import qualified Data.Text.IO as T.IO
|
||||
import Text.Megaparsec ( errorBundlePretty
|
||||
, parse
|
||||
)
|
||||
import Test.Tasty ( TestTree
|
||||
, defaultMain
|
||||
, testGroup
|
||||
)
|
||||
import Test.Tasty.HUnit ( assertEqual
|
||||
, assertFailure
|
||||
, testCase
|
||||
)
|
||||
import Paths_graphql (getDataFileName)
|
||||
import qualified Test.StarWars.QueryTests as SW
|
||||
|
||||
main :: IO ()
|
||||
main = defaultMain . testGroup "Tests" . (: [SW.test]) =<< kitchenTest
|
||||
main = defaultMain $ testGroup "Tests"
|
||||
[ testGroup "Reference tests" [LexerTest.reference, SW.test]
|
||||
, testGroup "Implementation tests" [LexerTest.implementation]
|
||||
, kitchenTest
|
||||
]
|
||||
|
||||
kitchenTest :: IO TestTree
|
||||
kitchenTest = testCase "Kitchen Sink"
|
||||
<$> (assertEqual "Encode" <$> expected <*> actual)
|
||||
where
|
||||
expected = Text.readFile
|
||||
=<< getDataFileName "tests/data/kitchen-sink.min.graphql"
|
||||
kitchenTest :: TestTree
|
||||
kitchenTest = testCase "Kitchen Sink" $ do
|
||||
dataFileName <- getDataFileName "tests/data/kitchen-sink.min.graphql"
|
||||
expected <- T.IO.readFile dataFileName
|
||||
|
||||
actual = either (error "Parsing error!") Encoder.document
|
||||
. parseOnly Parser.document
|
||||
<$> expected
|
||||
either
|
||||
(assertFailure . errorBundlePretty)
|
||||
(assertEqual "Encode" expected . Encoder.document)
|
||||
$ parse Parser.document dataFileName expected
|
||||
|
Loading…
Reference in New Issue
Block a user