forked from OSS/graphql
Parse queries with megaparsec
This commit is contained in:
parent
ce169ecef2
commit
5e9bf9648d
@ -3,10 +3,12 @@ module Data.GraphQL where
|
|||||||
|
|
||||||
import Control.Applicative (Alternative)
|
import Control.Applicative (Alternative)
|
||||||
|
|
||||||
import Data.Text (Text)
|
import qualified Data.Text as T
|
||||||
|
|
||||||
import qualified Data.Aeson as Aeson
|
import qualified Data.Aeson as Aeson
|
||||||
import qualified Data.Attoparsec.Text as Attoparsec
|
import Text.Megaparsec ( errorBundlePretty
|
||||||
|
, parse
|
||||||
|
)
|
||||||
|
|
||||||
import Data.GraphQL.Execute
|
import Data.GraphQL.Execute
|
||||||
import Data.GraphQL.Parser
|
import Data.GraphQL.Parser
|
||||||
@ -19,7 +21,7 @@ import Data.GraphQL.Error
|
|||||||
-- executed according to the given 'Schema'.
|
-- executed according to the given 'Schema'.
|
||||||
--
|
--
|
||||||
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
||||||
graphql :: (Alternative m, Monad m) => Schema m -> Text -> m Aeson.Value
|
graphql :: (Alternative m, Monad m) => Schema m -> T.Text -> m Aeson.Value
|
||||||
graphql = flip graphqlSubs $ const Nothing
|
graphql = flip graphqlSubs $ const Nothing
|
||||||
|
|
||||||
-- | Takes a 'Schema', a variable substitution function and text
|
-- | Takes a 'Schema', a variable substitution function and text
|
||||||
@ -28,7 +30,7 @@ graphql = flip graphqlSubs $ const Nothing
|
|||||||
-- query and the query is then executed according to the given 'Schema'.
|
-- query and the query is then executed according to the given 'Schema'.
|
||||||
--
|
--
|
||||||
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
-- Returns the response as an @Aeson.@'Aeson.Value'.
|
||||||
graphqlSubs :: (Alternative m, Monad m) => Schema m -> Subs -> Text -> m Aeson.Value
|
graphqlSubs :: (Alternative m, Monad m) => Schema m -> Subs -> T.Text -> m Aeson.Value
|
||||||
graphqlSubs schema f =
|
graphqlSubs schema f =
|
||||||
either parseError (execute schema f)
|
either (parseError . errorBundlePretty) (execute schema f)
|
||||||
. Attoparsec.parseOnly document
|
. parse document ""
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
{-# LANGUAGE OverloadedStrings #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE TupleSections #-}
|
||||||
module Data.GraphQL.Error (
|
module Data.GraphQL.Error (
|
||||||
parseError,
|
parseError,
|
||||||
CollectErrsT,
|
CollectErrsT,
|
||||||
@ -31,7 +32,7 @@ joinErrs = fmap $ fmap fst &&& concatMap snd
|
|||||||
|
|
||||||
-- | Wraps the given 'Applicative' to handle errors
|
-- | Wraps the given 'Applicative' to handle errors
|
||||||
errWrap :: Functor f => f a -> f (a, [Aeson.Value])
|
errWrap :: Functor f => f a -> f (a, [Aeson.Value])
|
||||||
errWrap = fmap (flip (,) [])
|
errWrap = fmap (, [])
|
||||||
|
|
||||||
-- | Adds an error to the list of errors.
|
-- | Adds an error to the list of errors.
|
||||||
addErr :: Functor f => Aeson.Value -> CollectErrsT f a -> CollectErrsT f a
|
addErr :: Functor f => Aeson.Value -> CollectErrsT f a -> CollectErrsT f a
|
||||||
|
@ -1,50 +1,22 @@
|
|||||||
{-# LANGUAGE OverloadedStrings #-}
|
|
||||||
{-# LANGUAGE LambdaCase #-}
|
{-# LANGUAGE LambdaCase #-}
|
||||||
-- | This module defines a parser for @GraphQL@ request documents.
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
module Data.GraphQL.Parser where
|
module Data.GraphQL.Parser where
|
||||||
|
|
||||||
import Prelude hiding (takeWhile)
|
import Control.Applicative ( Alternative(..)
|
||||||
|
, optional
|
||||||
import Control.Applicative ((<|>), Alternative, empty, many, optional)
|
)
|
||||||
import Control.Monad (when)
|
|
||||||
import Data.Char (isDigit, isSpace)
|
|
||||||
import Data.Foldable (traverse_)
|
|
||||||
import Data.Monoid ((<>))
|
|
||||||
import Data.List.NonEmpty (NonEmpty((:|)))
|
|
||||||
import Data.Scientific (floatingOrInteger, scientific, toBoundedInteger)
|
|
||||||
|
|
||||||
import Data.Text (Text, append)
|
|
||||||
import Data.Attoparsec.Combinator (lookAhead)
|
|
||||||
import Data.Attoparsec.Text
|
|
||||||
( Parser
|
|
||||||
, (<?>)
|
|
||||||
, anyChar
|
|
||||||
, endOfLine
|
|
||||||
, inClass
|
|
||||||
, many1
|
|
||||||
, manyTill
|
|
||||||
, option
|
|
||||||
, peekChar
|
|
||||||
, takeWhile
|
|
||||||
, takeWhile1
|
|
||||||
)
|
|
||||||
import qualified Data.Attoparsec.Text as Attoparsec (scientific)
|
|
||||||
|
|
||||||
import Data.GraphQL.AST
|
import Data.GraphQL.AST
|
||||||
|
import Language.GraphQL.Lexer
|
||||||
-- * Name
|
import Data.List.NonEmpty (NonEmpty(..))
|
||||||
|
import qualified Data.Text as T
|
||||||
name :: Parser Name
|
import Text.Megaparsec ( lookAhead
|
||||||
name = tok $ append <$> takeWhile1 isA_z
|
, option
|
||||||
<*> takeWhile ((||) <$> isDigit <*> isA_z)
|
, try
|
||||||
where
|
, (<?>)
|
||||||
-- `isAlpha` handles many more Unicode Chars
|
)
|
||||||
isA_z = inClass $ '_' : ['A'..'Z'] <> ['a'..'z']
|
|
||||||
|
|
||||||
-- * Document
|
|
||||||
|
|
||||||
document :: Parser Document
|
document :: Parser Document
|
||||||
document = whiteSpace *> manyNE definition
|
document = spaceConsumer >> lexeme (manyNE definition)
|
||||||
|
|
||||||
definition :: Parser Definition
|
definition :: Parser Definition
|
||||||
definition = DefinitionOperation <$> operationDefinition
|
definition = DefinitionOperation <$> operationDefinition
|
||||||
@ -61,9 +33,9 @@ operationDefinition = OperationSelectionSet <$> selectionSet
|
|||||||
<?> "operationDefinition error"
|
<?> "operationDefinition error"
|
||||||
|
|
||||||
operationType :: Parser OperationType
|
operationType :: Parser OperationType
|
||||||
operationType = Query <$ tok "query"
|
operationType = Query <$ symbol "query"
|
||||||
<|> Mutation <$ tok "mutation"
|
<|> Mutation <$ symbol "mutation"
|
||||||
<?> "operationType error"
|
<?> "operationType error"
|
||||||
|
|
||||||
-- * SelectionSet
|
-- * SelectionSet
|
||||||
|
|
||||||
@ -71,11 +43,11 @@ selectionSet :: Parser SelectionSet
|
|||||||
selectionSet = braces $ manyNE selection
|
selectionSet = braces $ manyNE selection
|
||||||
|
|
||||||
selectionSetOpt :: Parser SelectionSetOpt
|
selectionSetOpt :: Parser SelectionSetOpt
|
||||||
selectionSetOpt = braces $ many1 selection
|
selectionSetOpt = braces $ some selection
|
||||||
|
|
||||||
selection :: Parser Selection
|
selection :: Parser Selection
|
||||||
selection = SelectionField <$> field
|
selection = SelectionField <$> field
|
||||||
<|> SelectionFragmentSpread <$> fragmentSpread
|
<|> try (SelectionFragmentSpread <$> fragmentSpread)
|
||||||
<|> SelectionInlineFragment <$> inlineFragment
|
<|> SelectionInlineFragment <$> inlineFragment
|
||||||
<?> "selection error!"
|
<?> "selection error!"
|
||||||
|
|
||||||
@ -89,160 +61,124 @@ field = Field <$> optional alias
|
|||||||
<*> opt selectionSetOpt
|
<*> opt selectionSetOpt
|
||||||
|
|
||||||
alias :: Parser Alias
|
alias :: Parser Alias
|
||||||
alias = name <* tok ":"
|
alias = try $ name <* colon
|
||||||
|
|
||||||
-- * Arguments
|
-- * Arguments
|
||||||
|
|
||||||
arguments :: Parser Arguments
|
arguments :: Parser Arguments
|
||||||
arguments = parens $ many1 argument
|
arguments = parens $ some argument
|
||||||
|
|
||||||
argument :: Parser Argument
|
argument :: Parser Argument
|
||||||
argument = Argument <$> name <* tok ":" <*> value
|
argument = Argument <$> name <* colon <*> value
|
||||||
|
|
||||||
-- * Fragments
|
-- * Fragments
|
||||||
|
|
||||||
fragmentSpread :: Parser FragmentSpread
|
fragmentSpread :: Parser FragmentSpread
|
||||||
fragmentSpread = FragmentSpread <$ tok "..."
|
fragmentSpread = FragmentSpread <$ spread
|
||||||
<*> fragmentName
|
<*> fragmentName
|
||||||
<*> opt directives
|
<*> opt directives
|
||||||
|
|
||||||
inlineFragment :: Parser InlineFragment
|
inlineFragment :: Parser InlineFragment
|
||||||
inlineFragment = InlineFragment <$ tok "..."
|
inlineFragment = InlineFragment <$ spread
|
||||||
<*> optional typeCondition
|
<*> optional typeCondition
|
||||||
<*> opt directives
|
<*> opt directives
|
||||||
<*> selectionSet
|
<*> selectionSet
|
||||||
|
|
||||||
fragmentDefinition :: Parser FragmentDefinition
|
fragmentDefinition :: Parser FragmentDefinition
|
||||||
fragmentDefinition = FragmentDefinition
|
fragmentDefinition = FragmentDefinition
|
||||||
<$ tok "fragment"
|
<$ symbol "fragment"
|
||||||
<*> name
|
<*> name
|
||||||
<*> typeCondition
|
<*> typeCondition
|
||||||
<*> opt directives
|
<*> opt directives
|
||||||
<*> selectionSet
|
<*> selectionSet
|
||||||
|
|
||||||
fragmentName :: Parser FragmentName
|
fragmentName :: Parser FragmentName
|
||||||
fragmentName = but (tok "on") *> name
|
fragmentName = but (symbol "on") *> name
|
||||||
|
|
||||||
typeCondition :: Parser TypeCondition
|
typeCondition :: Parser TypeCondition
|
||||||
typeCondition = tok "on" *> name
|
typeCondition = symbol "on" *> name
|
||||||
|
|
||||||
-- * Input Values
|
-- * Input Values
|
||||||
|
|
||||||
value :: Parser Value
|
value :: Parser Value
|
||||||
value = ValueVariable <$> variable
|
value = ValueVariable <$> variable
|
||||||
<|> tok floatOrInt32Value
|
<|> ValueFloat <$> try float
|
||||||
|
<|> ValueInt <$> integer
|
||||||
<|> ValueBoolean <$> booleanValue
|
<|> ValueBoolean <$> booleanValue
|
||||||
<|> ValueNull <$ tok "null"
|
<|> ValueNull <$ symbol "null"
|
||||||
<|> ValueString <$> stringValue
|
<|> ValueString <$> string
|
||||||
<|> ValueEnum <$> enumValue
|
<|> ValueString <$> blockString
|
||||||
|
<|> ValueEnum <$> try enumValue
|
||||||
<|> ValueList <$> listValue
|
<|> ValueList <$> listValue
|
||||||
<|> ValueObject <$> objectValue
|
<|> ValueObject <$> objectValue
|
||||||
<?> "value error!"
|
<?> "value error!"
|
||||||
where
|
where
|
||||||
booleanValue :: Parser Bool
|
booleanValue :: Parser Bool
|
||||||
booleanValue = True <$ tok "true"
|
booleanValue = True <$ symbol "true"
|
||||||
<|> False <$ tok "false"
|
<|> False <$ symbol "false"
|
||||||
|
|
||||||
floatOrInt32Value :: Parser Value
|
|
||||||
floatOrInt32Value =
|
|
||||||
Attoparsec.scientific >>=
|
|
||||||
either (pure . ValueFloat)
|
|
||||||
(maybe (fail "Integer value is out of range.")
|
|
||||||
(pure . ValueInt)
|
|
||||||
. toBoundedInteger . (`scientific` 0))
|
|
||||||
. floatingOrInteger
|
|
||||||
|
|
||||||
-- TODO: Escape characters. Look at `jsstring_` in aeson package.
|
|
||||||
stringValue :: Parser Text
|
|
||||||
stringValue = quotes (takeWhile (/= '"'))
|
|
||||||
|
|
||||||
enumValue :: Parser Name
|
enumValue :: Parser Name
|
||||||
enumValue = but (tok "true") *> but (tok "false") *> but (tok "null") *> name
|
enumValue = but (symbol "true") *> but (symbol "false") *> but (symbol "null") *> name
|
||||||
|
|
||||||
listValue :: Parser [Value]
|
listValue :: Parser [Value]
|
||||||
listValue = brackets $ many1 value
|
listValue = brackets $ some value
|
||||||
|
|
||||||
objectValue :: Parser [ObjectField]
|
objectValue :: Parser [ObjectField]
|
||||||
objectValue = braces $ many1 objectField
|
objectValue = braces $ some objectField
|
||||||
|
|
||||||
objectField :: Parser ObjectField
|
objectField :: Parser ObjectField
|
||||||
objectField = ObjectField <$> name <* tok ":" <*> value
|
objectField = ObjectField <$> name <* symbol ":" <*> value
|
||||||
|
|
||||||
-- * Variables
|
-- * Variables
|
||||||
|
|
||||||
variableDefinitions :: Parser VariableDefinitions
|
variableDefinitions :: Parser VariableDefinitions
|
||||||
variableDefinitions = parens $ many1 variableDefinition
|
variableDefinitions = parens $ some variableDefinition
|
||||||
|
|
||||||
variableDefinition :: Parser VariableDefinition
|
variableDefinition :: Parser VariableDefinition
|
||||||
variableDefinition = VariableDefinition <$> variable
|
variableDefinition = VariableDefinition <$> variable
|
||||||
<* tok ":"
|
<* colon
|
||||||
<*> type_
|
<*> type_
|
||||||
<*> optional defaultValue
|
<*> optional defaultValue
|
||||||
|
|
||||||
variable :: Parser Variable
|
variable :: Parser Variable
|
||||||
variable = tok "$" *> name
|
variable = dollar *> name
|
||||||
|
|
||||||
defaultValue :: Parser DefaultValue
|
defaultValue :: Parser DefaultValue
|
||||||
defaultValue = tok "=" *> value
|
defaultValue = equals *> value
|
||||||
|
|
||||||
-- * Input Types
|
-- * Input Types
|
||||||
|
|
||||||
type_ :: Parser Type
|
type_ :: Parser Type
|
||||||
type_ = TypeNamed <$> name <* but "!"
|
type_ = try (TypeNamed <$> name <* but "!")
|
||||||
<|> TypeList <$> brackets type_
|
<|> TypeList <$> brackets type_
|
||||||
<|> TypeNonNull <$> nonNullType
|
<|> TypeNonNull <$> nonNullType
|
||||||
<?> "type_ error!"
|
<?> "type_ error!"
|
||||||
|
|
||||||
nonNullType :: Parser NonNullType
|
nonNullType :: Parser NonNullType
|
||||||
nonNullType = NonNullTypeNamed <$> name <* tok "!"
|
nonNullType = NonNullTypeNamed <$> name <* bang
|
||||||
<|> NonNullTypeList <$> brackets type_ <* tok "!"
|
<|> NonNullTypeList <$> brackets type_ <* bang
|
||||||
<?> "nonNullType error!"
|
<?> "nonNullType error!"
|
||||||
|
|
||||||
-- * Directives
|
-- * Directives
|
||||||
|
|
||||||
directives :: Parser Directives
|
directives :: Parser Directives
|
||||||
directives = many1 directive
|
directives = some directive
|
||||||
|
|
||||||
directive :: Parser Directive
|
directive :: Parser Directive
|
||||||
directive = Directive
|
directive = Directive
|
||||||
<$ tok "@"
|
<$ at
|
||||||
<*> name
|
<*> name
|
||||||
<*> opt arguments
|
<*> opt arguments
|
||||||
|
|
||||||
-- * Internal
|
-- * Internal
|
||||||
|
|
||||||
tok :: Parser a -> Parser a
|
|
||||||
tok p = p <* whiteSpace
|
|
||||||
|
|
||||||
parens :: Parser a -> Parser a
|
|
||||||
parens = between "(" ")"
|
|
||||||
|
|
||||||
braces :: Parser a -> Parser a
|
|
||||||
braces = between "{" "}"
|
|
||||||
|
|
||||||
quotes :: Parser a -> Parser a
|
|
||||||
quotes = between "\"" "\""
|
|
||||||
|
|
||||||
brackets :: Parser a -> Parser a
|
|
||||||
brackets = between "[" "]"
|
|
||||||
|
|
||||||
between :: Parser Text -> Parser Text -> Parser a -> Parser a
|
|
||||||
between open close p = tok open *> p <* tok close
|
|
||||||
|
|
||||||
opt :: Monoid a => Parser a -> Parser a
|
opt :: Monoid a => Parser a -> Parser a
|
||||||
opt = option mempty
|
opt = option mempty
|
||||||
|
|
||||||
-- Hack to reverse parser success
|
-- Hack to reverse parser success
|
||||||
but :: Parser a -> Parser ()
|
but :: Parser a -> Parser ()
|
||||||
but pn = False <$ lookAhead pn <|> pure True >>= \case
|
but pn = False <$ lookAhead pn <|> pure True >>= \case
|
||||||
False -> empty
|
False -> empty
|
||||||
True -> pure ()
|
True -> pure ()
|
||||||
|
|
||||||
manyNE :: Alternative f => f a -> f (NonEmpty a)
|
manyNE :: Alternative f => f a -> f (NonEmpty a)
|
||||||
manyNE p = (:|) <$> p <*> many p
|
manyNE p = (:|) <$> p <*> many p
|
||||||
|
|
||||||
whiteSpace :: Parser ()
|
|
||||||
whiteSpace = peekChar >>= traverse_ (\c ->
|
|
||||||
if isSpace c || c == ','
|
|
||||||
then anyChar *> whiteSpace
|
|
||||||
else when (c == '#') $ manyTill anyChar endOfLine *> whiteSpace)
|
|
||||||
|
218
Language/GraphQL/Lexer.hs
Normal file
218
Language/GraphQL/Lexer.hs
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
{-# LANGUAGE ExplicitForAll #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
module Language.GraphQL.Lexer
|
||||||
|
( Parser
|
||||||
|
, amp
|
||||||
|
, at
|
||||||
|
, bang
|
||||||
|
, blockString
|
||||||
|
, braces
|
||||||
|
, brackets
|
||||||
|
, colon
|
||||||
|
, dollar
|
||||||
|
, comment
|
||||||
|
, equals
|
||||||
|
, integer
|
||||||
|
, float
|
||||||
|
, lexeme
|
||||||
|
, name
|
||||||
|
, parens
|
||||||
|
, pipe
|
||||||
|
, spaceConsumer
|
||||||
|
, spread
|
||||||
|
, string
|
||||||
|
, symbol
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Control.Applicative ( Alternative(..)
|
||||||
|
, liftA2
|
||||||
|
)
|
||||||
|
import Data.Char ( chr
|
||||||
|
, digitToInt
|
||||||
|
, isAsciiLower
|
||||||
|
, isAsciiUpper
|
||||||
|
, ord
|
||||||
|
)
|
||||||
|
import Data.Foldable (foldl')
|
||||||
|
import Data.List (dropWhileEnd)
|
||||||
|
import Data.Proxy (Proxy(..))
|
||||||
|
import Data.Void (Void)
|
||||||
|
import Text.Megaparsec ( Parsec
|
||||||
|
, MonadParsec
|
||||||
|
, Token
|
||||||
|
, between
|
||||||
|
, chunk
|
||||||
|
, chunkToTokens
|
||||||
|
, lookAhead
|
||||||
|
, notFollowedBy
|
||||||
|
, oneOf
|
||||||
|
, option
|
||||||
|
, satisfy
|
||||||
|
, sepBy
|
||||||
|
, skipSome
|
||||||
|
, takeP
|
||||||
|
, takeWhile1P
|
||||||
|
, try
|
||||||
|
)
|
||||||
|
import Text.Megaparsec.Char ( char
|
||||||
|
, digitChar
|
||||||
|
, space1
|
||||||
|
)
|
||||||
|
import qualified Text.Megaparsec.Char.Lexer as Lexer
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Data.Text.Lazy as TL
|
||||||
|
|
||||||
|
-- | Standard parser.
|
||||||
|
-- Accepts the type of the parsed token.
|
||||||
|
type Parser = Parsec Void T.Text
|
||||||
|
|
||||||
|
ignoredCharacters :: Parser ()
|
||||||
|
ignoredCharacters = space1 <|> skipSome (char ',')
|
||||||
|
|
||||||
|
spaceConsumer :: Parser ()
|
||||||
|
spaceConsumer = Lexer.space ignoredCharacters comment empty
|
||||||
|
|
||||||
|
-- | Parser for comments.
|
||||||
|
comment :: Parser ()
|
||||||
|
comment = Lexer.skipLineComment "#"
|
||||||
|
|
||||||
|
lexeme :: forall a. Parser a -> Parser a
|
||||||
|
lexeme = Lexer.lexeme spaceConsumer
|
||||||
|
|
||||||
|
symbol :: T.Text -> Parser T.Text
|
||||||
|
symbol = Lexer.symbol spaceConsumer
|
||||||
|
|
||||||
|
-- | Parser for "!".
|
||||||
|
bang :: Parser Char
|
||||||
|
bang = char '!'
|
||||||
|
|
||||||
|
-- | Parser for "$".
|
||||||
|
dollar :: Parser Char
|
||||||
|
dollar = char '$'
|
||||||
|
|
||||||
|
-- | Parser for "@".
|
||||||
|
at :: Parser Char
|
||||||
|
at = char '@'
|
||||||
|
|
||||||
|
-- | Parser for "&".
|
||||||
|
amp :: Parser T.Text
|
||||||
|
amp = symbol "&"
|
||||||
|
|
||||||
|
-- | Parser for ":".
|
||||||
|
colon :: Parser T.Text
|
||||||
|
colon = symbol ":"
|
||||||
|
|
||||||
|
-- | Parser for "=".
|
||||||
|
equals :: Parser T.Text
|
||||||
|
equals = symbol "="
|
||||||
|
|
||||||
|
-- | Parser for the spread operator (...).
|
||||||
|
spread :: Parser T.Text
|
||||||
|
spread = symbol "..."
|
||||||
|
|
||||||
|
-- | Parser for "|".
|
||||||
|
pipe :: Parser T.Text
|
||||||
|
pipe = symbol "|"
|
||||||
|
|
||||||
|
-- | Parser for an expression between "(" and ")".
|
||||||
|
parens :: forall a. Parser a -> Parser a
|
||||||
|
parens = between (symbol "(") (symbol ")")
|
||||||
|
|
||||||
|
-- | Parser for an expression between "[" and "]".
|
||||||
|
brackets :: forall a. Parser a -> Parser a
|
||||||
|
brackets = between (symbol "[") (symbol "]")
|
||||||
|
|
||||||
|
-- | Parser for an expression between "{" and "}".
|
||||||
|
braces :: forall a. Parser a -> Parser a
|
||||||
|
braces = between (symbol "{") (symbol "}")
|
||||||
|
|
||||||
|
-- | Parser for strings.
|
||||||
|
string :: Parser T.Text
|
||||||
|
string = between "\"" "\"" stringValue
|
||||||
|
where
|
||||||
|
stringValue = T.pack <$> many stringCharacter
|
||||||
|
stringCharacter = satisfy isStringCharacter1
|
||||||
|
<|> escapeSequence
|
||||||
|
isStringCharacter1 = liftA2 (&&) isSourceCharacter isChunkDelimiter
|
||||||
|
|
||||||
|
-- | Parser for block strings.
|
||||||
|
blockString :: Parser T.Text
|
||||||
|
blockString = between "\"\"\"" "\"\"\"" stringValue
|
||||||
|
where
|
||||||
|
stringValue = do
|
||||||
|
byLine <- sepBy (many blockStringCharacter) lineTerminator
|
||||||
|
let indentSize = foldr countIndent 0 $ tail byLine
|
||||||
|
withoutIndent = head byLine : (removeIndent indentSize <$> tail byLine)
|
||||||
|
withoutEmptyLines = liftA2 (.) dropWhile dropWhileEnd removeEmptyLine withoutIndent
|
||||||
|
|
||||||
|
return $ T.intercalate "\n" $ T.concat <$> withoutEmptyLines
|
||||||
|
removeEmptyLine [] = True
|
||||||
|
removeEmptyLine [x] = T.null x || isWhiteSpace (T.head x)
|
||||||
|
removeEmptyLine _ = False
|
||||||
|
blockStringCharacter
|
||||||
|
= takeWhile1P Nothing isWhiteSpace
|
||||||
|
<|> takeWhile1P Nothing isBlockStringCharacter1
|
||||||
|
<|> escapeTripleQuote
|
||||||
|
<|> try (chunk "\"" <* notFollowedBy (chunk "\"\""))
|
||||||
|
escapeTripleQuote = chunk "\\" >>= flip option (chunk "\"\"")
|
||||||
|
isBlockStringCharacter1 = liftA2 (&&) isSourceCharacter isChunkDelimiter
|
||||||
|
countIndent [] acc = acc
|
||||||
|
countIndent (x:_) acc
|
||||||
|
| T.null x = acc
|
||||||
|
| not (isWhiteSpace $ T.head x) = acc
|
||||||
|
| acc == 0 = T.length x
|
||||||
|
| otherwise = min acc $ T.length x
|
||||||
|
removeIndent n [] = []
|
||||||
|
removeIndent n (x:chunks) = T.drop n x : chunks
|
||||||
|
|
||||||
|
-- | Parser for integers.
|
||||||
|
integer :: Integral a => Parser a
|
||||||
|
integer = Lexer.signed (pure ()) $ lexeme Lexer.decimal
|
||||||
|
|
||||||
|
-- | Parser for floating-point numbers.
|
||||||
|
float :: Parser Double
|
||||||
|
float = Lexer.signed (pure ()) $ lexeme Lexer.float
|
||||||
|
|
||||||
|
-- | Parser for names (/[_A-Za-z][_0-9A-Za-z]*/).
|
||||||
|
name :: Parser T.Text
|
||||||
|
name = do
|
||||||
|
firstLetter <- nameFirstLetter
|
||||||
|
rest <- many $ nameFirstLetter <|> digitChar
|
||||||
|
_ <- spaceConsumer
|
||||||
|
return $ TL.toStrict $ TL.cons firstLetter $ TL.pack rest
|
||||||
|
where
|
||||||
|
nameFirstLetter = satisfy isAsciiUpper <|> satisfy isAsciiLower <|> char '_'
|
||||||
|
|
||||||
|
isChunkDelimiter :: Char -> Bool
|
||||||
|
isChunkDelimiter = flip notElem ['"', '\\', '\n', '\r']
|
||||||
|
|
||||||
|
isWhiteSpace :: Char -> Bool
|
||||||
|
isWhiteSpace = liftA2 (||) (== ' ') (== '\t')
|
||||||
|
|
||||||
|
lineTerminator :: Parser T.Text
|
||||||
|
lineTerminator = chunk "\r\n" <|> chunk "\n" <|> chunk "\r"
|
||||||
|
|
||||||
|
isSourceCharacter :: Char -> Bool
|
||||||
|
isSourceCharacter = isSourceCharacter' . ord
|
||||||
|
where
|
||||||
|
isSourceCharacter' code = code >= 0x0020
|
||||||
|
|| code == 0x0009
|
||||||
|
|| code == 0x000a
|
||||||
|
|| code == 0x000d
|
||||||
|
|
||||||
|
escapeSequence :: Parser Char
|
||||||
|
escapeSequence = do
|
||||||
|
_ <- char '\\'
|
||||||
|
escaped <- oneOf ['"', '\\', '/', 'b', 'f', 'n', 'r', 't', 'u']
|
||||||
|
case escaped of
|
||||||
|
'b' -> return '\b'
|
||||||
|
'f' -> return '\f'
|
||||||
|
'n' -> return '\n'
|
||||||
|
'r' -> return '\r'
|
||||||
|
't' -> return '\t'
|
||||||
|
'u' -> chr . foldl' step 0
|
||||||
|
. chunkToTokens (Proxy :: Proxy T.Text)
|
||||||
|
<$> takeP Nothing 4
|
||||||
|
_ -> return escaped
|
||||||
|
where
|
||||||
|
step accumulator = (accumulator * 16 +) . digitToInt
|
@ -4,7 +4,7 @@ cabal-version: 1.12
|
|||||||
--
|
--
|
||||||
-- see: https://github.com/sol/hpack
|
-- see: https://github.com/sol/hpack
|
||||||
--
|
--
|
||||||
-- hash: 5e8ecb58b182478576a725b6da5466c8e71db7dda7735397006e2b14406ee3ad
|
-- hash: 06d3fa29e37864ef5e4254215c50d95942b4a33b0ea4f4d4c930a071fdcd2872
|
||||||
|
|
||||||
name: graphql
|
name: graphql
|
||||||
version: 0.3
|
version: 0.3
|
||||||
@ -46,16 +46,16 @@ library
|
|||||||
Data.GraphQL.Execute
|
Data.GraphQL.Execute
|
||||||
Data.GraphQL.Parser
|
Data.GraphQL.Parser
|
||||||
Data.GraphQL.Schema
|
Data.GraphQL.Schema
|
||||||
|
Language.GraphQL.Lexer
|
||||||
other-modules:
|
other-modules:
|
||||||
Paths_graphql
|
Paths_graphql
|
||||||
hs-source-dirs:
|
hs-source-dirs:
|
||||||
./.
|
./.
|
||||||
build-depends:
|
build-depends:
|
||||||
aeson
|
aeson
|
||||||
, attoparsec
|
|
||||||
, base >=4.7 && <5
|
, base >=4.7 && <5
|
||||||
|
, megaparsec
|
||||||
, scientific
|
, scientific
|
||||||
, semigroups
|
|
||||||
, text
|
, text
|
||||||
, unordered-containers
|
, unordered-containers
|
||||||
default-language: Haskell2010
|
default-language: Haskell2010
|
||||||
@ -64,6 +64,7 @@ test-suite tasty
|
|||||||
type: exitcode-stdio-1.0
|
type: exitcode-stdio-1.0
|
||||||
main-is: tasty.hs
|
main-is: tasty.hs
|
||||||
other-modules:
|
other-modules:
|
||||||
|
Language.GraphQL.LexerTest
|
||||||
Test.StarWars.Data
|
Test.StarWars.Data
|
||||||
Test.StarWars.QueryTests
|
Test.StarWars.QueryTests
|
||||||
Test.StarWars.Schema
|
Test.StarWars.Schema
|
||||||
@ -73,11 +74,10 @@ test-suite tasty
|
|||||||
ghc-options: -threaded -rtsopts -with-rtsopts=-N
|
ghc-options: -threaded -rtsopts -with-rtsopts=-N
|
||||||
build-depends:
|
build-depends:
|
||||||
aeson
|
aeson
|
||||||
, attoparsec
|
|
||||||
, base >=4.7 && <5
|
, base >=4.7 && <5
|
||||||
, graphql
|
, graphql
|
||||||
|
, megaparsec
|
||||||
, raw-strings-qq
|
, raw-strings-qq
|
||||||
, semigroups
|
|
||||||
, tasty
|
, tasty
|
||||||
, tasty-hunit
|
, tasty-hunit
|
||||||
, text
|
, text
|
||||||
|
@ -27,9 +27,8 @@ data-files:
|
|||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
- aeson
|
- aeson
|
||||||
- attoparsec
|
|
||||||
- base >= 4.7 && < 5
|
- base >= 4.7 && < 5
|
||||||
- semigroups
|
- megaparsec
|
||||||
- text
|
- text
|
||||||
- unordered-containers
|
- unordered-containers
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
resolver: lts-13.25
|
resolver: lts-13.26
|
||||||
packages:
|
packages:
|
||||||
- '.'
|
- '.'
|
||||||
extra-deps: []
|
extra-deps: []
|
||||||
|
103
tests/Language/GraphQL/LexerTest.hs
Normal file
103
tests/Language/GraphQL/LexerTest.hs
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
{-# LANGUAGE ExplicitForAll #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE QuasiQuotes #-}
|
||||||
|
module Language.GraphQL.LexerTest
|
||||||
|
( implementation
|
||||||
|
, reference
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Control.Applicative (Alternative(..))
|
||||||
|
import Language.GraphQL.Lexer
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Data.Void (Void)
|
||||||
|
import Test.Tasty ( TestTree
|
||||||
|
, testGroup
|
||||||
|
)
|
||||||
|
import Test.Tasty.HUnit ( testCase
|
||||||
|
, (@?=)
|
||||||
|
)
|
||||||
|
import Text.Megaparsec ( ParseErrorBundle
|
||||||
|
, parse
|
||||||
|
)
|
||||||
|
import Text.RawString.QQ (r)
|
||||||
|
|
||||||
|
reference :: TestTree
|
||||||
|
reference = testGroup "Lexer"
|
||||||
|
[ testCase "lexes strings" $ do
|
||||||
|
runParser string [r|"simple"|] @?= Right "simple"
|
||||||
|
runParser string [r|" white space "|] @?= Right " white space "
|
||||||
|
runParser string [r|"quote \""|] @?= Right [r|quote "|]
|
||||||
|
runParser string [r|"escaped \n"|] @?= Right "escaped \n"
|
||||||
|
runParser string [r|"slashes \\ \/"|] @?= Right [r|slashes \ /|]
|
||||||
|
runParser string [r|"unicode \u1234\u5678\u90AB\uCDEF"|]
|
||||||
|
@?= Right "unicode ሴ噸邫췯"
|
||||||
|
|
||||||
|
, testCase "lexes block string" $ do
|
||||||
|
runParser blockString [r|"""simple"""|] @?= Right "simple"
|
||||||
|
runParser blockString [r|""" white space """|]
|
||||||
|
@?= Right " white space "
|
||||||
|
runParser blockString [r|"""contains " quote"""|]
|
||||||
|
@?= Right [r|contains " quote|]
|
||||||
|
runParser blockString [r|"""contains \""" triplequote"""|]
|
||||||
|
@?= Right [r|contains """ triplequote|]
|
||||||
|
runParser blockString "\"\"\"multi\nline\"\"\"" @?= Right "multi\nline"
|
||||||
|
runParser blockString "\"\"\"multi\rline\r\nnormalized\"\"\""
|
||||||
|
@?= Right "multi\nline\nnormalized"
|
||||||
|
runParser blockString "\"\"\"multi\rline\r\nnormalized\"\"\""
|
||||||
|
@?= Right "multi\nline\nnormalized"
|
||||||
|
runParser blockString [r|"""unescaped \n\r\b\t\f\u1234"""|]
|
||||||
|
@?= Right [r|unescaped \n\r\b\t\f\u1234|]
|
||||||
|
runParser blockString [r|"""slashes \\ \/"""|]
|
||||||
|
@?= Right [r|slashes \\ \/|]
|
||||||
|
runParser blockString [r|"""
|
||||||
|
|
||||||
|
spans
|
||||||
|
multiple
|
||||||
|
lines
|
||||||
|
|
||||||
|
"""|] @?= Right "spans\n multiple\n lines"
|
||||||
|
|
||||||
|
, testCase "lexes numbers" $ do
|
||||||
|
runParser integer "4" @?= Right 4
|
||||||
|
runParser float "4.123" @?= Right 4.123
|
||||||
|
runParser integer "-4" @?= Right (-4)
|
||||||
|
runParser integer "9" @?= Right 9
|
||||||
|
runParser integer "0" @?= Right 0
|
||||||
|
runParser float "-4.123" @?= Right (-4.123)
|
||||||
|
runParser float "0.123" @?= Right 0.123
|
||||||
|
runParser float "123e4" @?= Right 123e4
|
||||||
|
runParser float "123E4" @?= Right 123E4
|
||||||
|
runParser float "123e-4" @?= Right 123e-4
|
||||||
|
runParser float "123e+4" @?= Right 123e+4
|
||||||
|
runParser float "-1.123e4" @?= Right (-1.123e4)
|
||||||
|
runParser float "-1.123E4" @?= Right (-1.123E4)
|
||||||
|
runParser float "-1.123e-4" @?= Right (-1.123e-4)
|
||||||
|
runParser float "-1.123e+4" @?= Right (-1.123e+4)
|
||||||
|
runParser float "-1.123e4567" @?= Right (-1.123e4567)
|
||||||
|
|
||||||
|
, testCase "lexes punctuation" $ do
|
||||||
|
runParser bang "!" @?= Right '!'
|
||||||
|
runParser dollar "$" @?= Right '$'
|
||||||
|
runBetween parens "()" @?= Right ()
|
||||||
|
runParser spread "..." @?= Right "..."
|
||||||
|
runParser colon ":" @?= Right ":"
|
||||||
|
runParser equals "=" @?= Right "="
|
||||||
|
runParser at "@" @?= Right '@'
|
||||||
|
runBetween brackets "[]" @?= Right ()
|
||||||
|
runBetween braces "{}" @?= Right ()
|
||||||
|
runParser pipe "|" @?= Right "|"
|
||||||
|
]
|
||||||
|
|
||||||
|
implementation :: TestTree
|
||||||
|
implementation = testGroup "Lexer"
|
||||||
|
[ testCase "lexes empty block strings" $
|
||||||
|
runParser blockString [r|""""""|] @?= Right ""
|
||||||
|
, testCase "lexes ampersand" $
|
||||||
|
runParser amp "&" @?= Right "&"
|
||||||
|
]
|
||||||
|
|
||||||
|
runParser :: forall a. Parser a -> T.Text -> Either (ParseErrorBundle T.Text Void) a
|
||||||
|
runParser = flip parse ""
|
||||||
|
|
||||||
|
runBetween :: (Parser () -> Parser ()) -> T.Text -> Either (ParseErrorBundle T.Text Void) ()
|
||||||
|
runBetween parser = parse (parser $ pure ()) ""
|
@ -1,32 +1,37 @@
|
|||||||
{-# LANGUAGE CPP #-}
|
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
|
||||||
module Main where
|
module Main where
|
||||||
|
|
||||||
#if !MIN_VERSION_base(4,8,0)
|
import Control.Monad.IO.Class (liftIO)
|
||||||
import Control.Applicative ((<$>), (<*>))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
import Data.Attoparsec.Text (parseOnly)
|
|
||||||
import qualified Data.Text.IO as Text
|
|
||||||
import Test.Tasty (TestTree, defaultMain, testGroup)
|
|
||||||
import Test.Tasty.HUnit
|
|
||||||
|
|
||||||
import qualified Data.GraphQL.Parser as Parser
|
|
||||||
import qualified Data.GraphQL.Encoder as Encoder
|
import qualified Data.GraphQL.Encoder as Encoder
|
||||||
|
import qualified Language.GraphQL.LexerTest as LexerTest
|
||||||
import qualified Test.StarWars.QueryTests as SW
|
import qualified Data.GraphQL.Parser as Parser
|
||||||
|
import qualified Data.Text.IO as T.IO
|
||||||
|
import Text.Megaparsec ( errorBundlePretty
|
||||||
|
, parse
|
||||||
|
)
|
||||||
|
import Test.Tasty ( TestTree
|
||||||
|
, defaultMain
|
||||||
|
, testGroup
|
||||||
|
)
|
||||||
|
import Test.Tasty.HUnit ( assertEqual
|
||||||
|
, assertFailure
|
||||||
|
, testCase
|
||||||
|
)
|
||||||
import Paths_graphql (getDataFileName)
|
import Paths_graphql (getDataFileName)
|
||||||
|
import qualified Test.StarWars.QueryTests as SW
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = defaultMain . testGroup "Tests" . (: [SW.test]) =<< kitchenTest
|
main = defaultMain $ testGroup "Tests"
|
||||||
|
[ testGroup "Reference tests" [LexerTest.reference, SW.test]
|
||||||
|
, testGroup "Implementation tests" [LexerTest.implementation]
|
||||||
|
, kitchenTest
|
||||||
|
]
|
||||||
|
|
||||||
kitchenTest :: IO TestTree
|
kitchenTest :: TestTree
|
||||||
kitchenTest = testCase "Kitchen Sink"
|
kitchenTest = testCase "Kitchen Sink" $ do
|
||||||
<$> (assertEqual "Encode" <$> expected <*> actual)
|
dataFileName <- getDataFileName "tests/data/kitchen-sink.min.graphql"
|
||||||
where
|
expected <- T.IO.readFile dataFileName
|
||||||
expected = Text.readFile
|
|
||||||
=<< getDataFileName "tests/data/kitchen-sink.min.graphql"
|
|
||||||
|
|
||||||
actual = either (error "Parsing error!") Encoder.document
|
either
|
||||||
. parseOnly Parser.document
|
(assertFailure . errorBundlePretty)
|
||||||
<$> expected
|
(assertEqual "Encode" expected . Encoder.document)
|
||||||
|
$ parse Parser.document dataFileName expected
|
||||||
|
Loading…
Reference in New Issue
Block a user