1 Star 0 Fork 0

麦冬果果/graphql-go

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
parser.go 31.65 KB
一键复制 编辑 原始数据 按行查看 历史
Hafiz Ismail 提交于 2016-04-17 14:32 . Fix further go lint ./... warnings
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337
package parser
import (
"fmt"
"github.com/graphql-go/graphql/gqlerrors"
"github.com/graphql-go/graphql/language/ast"
"github.com/graphql-go/graphql/language/lexer"
"github.com/graphql-go/graphql/language/source"
)
type parseFn func(parser *Parser) (interface{}, error)
type ParseOptions struct {
NoLocation bool
NoSource bool
}
type ParseParams struct {
Source interface{}
Options ParseOptions
}
type Parser struct {
LexToken lexer.Lexer
Source *source.Source
Options ParseOptions
PrevEnd int
Token lexer.Token
}
func Parse(p ParseParams) (*ast.Document, error) {
var sourceObj *source.Source
switch p.Source.(type) {
case *source.Source:
sourceObj = p.Source.(*source.Source)
default:
body, _ := p.Source.(string)
sourceObj = source.NewSource(&source.Source{Body: body})
}
parser, err := makeParser(sourceObj, p.Options)
if err != nil {
return nil, err
}
doc, err := parseDocument(parser)
if err != nil {
return nil, err
}
return doc, nil
}
// TODO: test and expose parseValue as a public
func parseValue(p ParseParams) (ast.Value, error) {
var value ast.Value
var sourceObj *source.Source
switch p.Source.(type) {
case *source.Source:
sourceObj = p.Source.(*source.Source)
default:
body, _ := p.Source.(string)
sourceObj = source.NewSource(&source.Source{Body: body})
}
parser, err := makeParser(sourceObj, p.Options)
if err != nil {
return value, err
}
value, err = parseValueLiteral(parser, false)
if err != nil {
return value, err
}
return value, nil
}
// Converts a name lex token into a name parse node.
func parseName(parser *Parser) (*ast.Name, error) {
token, err := expect(parser, lexer.TokenKind[lexer.NAME])
if err != nil {
return nil, err
}
return ast.NewName(&ast.Name{
Value: token.Value,
Loc: loc(parser, token.Start),
}), nil
}
func makeParser(s *source.Source, opts ParseOptions) (*Parser, error) {
lexToken := lexer.Lex(s)
token, err := lexToken(0)
if err != nil {
return &Parser{}, err
}
return &Parser{
LexToken: lexToken,
Source: s,
Options: opts,
PrevEnd: 0,
Token: token,
}, nil
}
/* Implements the parsing rules in the Document section. */
func parseDocument(parser *Parser) (*ast.Document, error) {
start := parser.Token.Start
var nodes []ast.Node
for {
if skp, err := skip(parser, lexer.TokenKind[lexer.EOF]); err != nil {
return nil, err
} else if skp {
break
}
if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
node, err := parseOperationDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
} else if peek(parser, lexer.TokenKind[lexer.NAME]) {
switch parser.Token.Value {
case "query":
fallthrough
case "mutation":
fallthrough
case "subscription": // Note: subscription is an experimental non-spec addition.
node, err := parseOperationDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "fragment":
node, err := parseFragmentDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "type":
node, err := parseObjectTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "interface":
node, err := parseInterfaceTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "union":
node, err := parseUnionTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "scalar":
node, err := parseScalarTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "enum":
node, err := parseEnumTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "input":
node, err := parseInputObjectTypeDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
case "extend":
node, err := parseTypeExtensionDefinition(parser)
if err != nil {
return nil, err
}
nodes = append(nodes, node)
default:
if err := unexpected(parser, lexer.Token{}); err != nil {
return nil, err
}
}
} else {
if err := unexpected(parser, lexer.Token{}); err != nil {
return nil, err
}
}
}
return ast.NewDocument(&ast.Document{
Loc: loc(parser, start),
Definitions: nodes,
}), nil
}
/* Implements the parsing rules in the Operations section. */
/**
* OperationDefinition :
* - SelectionSet
* - OperationType Name? VariableDefinitions? Directives? SelectionSet
*
* OperationType : one of query mutation
*/
func parseOperationDefinition(parser *Parser) (*ast.OperationDefinition, error) {
start := parser.Token.Start
if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
selectionSet, err := parseSelectionSet(parser)
if err != nil {
return nil, err
}
return ast.NewOperationDefinition(&ast.OperationDefinition{
Operation: "query",
Directives: []*ast.Directive{},
SelectionSet: selectionSet,
Loc: loc(parser, start),
}), nil
}
operationToken, err := expect(parser, lexer.TokenKind[lexer.NAME])
if err != nil {
return nil, err
}
operation := ""
switch operationToken.Value {
case "mutation":
fallthrough
case "subscription":
fallthrough
case "query":
operation = operationToken.Value
default:
return nil, unexpected(parser, operationToken)
}
var name *ast.Name
if peek(parser, lexer.TokenKind[lexer.NAME]) {
name, err = parseName(parser)
}
variableDefinitions, err := parseVariableDefinitions(parser)
if err != nil {
return nil, err
}
directives, err := parseDirectives(parser)
if err != nil {
return nil, err
}
selectionSet, err := parseSelectionSet(parser)
if err != nil {
return nil, err
}
return ast.NewOperationDefinition(&ast.OperationDefinition{
Operation: operation,
Name: name,
VariableDefinitions: variableDefinitions,
Directives: directives,
SelectionSet: selectionSet,
Loc: loc(parser, start),
}), nil
}
/**
* VariableDefinitions : ( VariableDefinition+ )
*/
func parseVariableDefinitions(parser *Parser) ([]*ast.VariableDefinition, error) {
variableDefinitions := []*ast.VariableDefinition{}
if peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
vdefs, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseVariableDefinition, lexer.TokenKind[lexer.PAREN_R])
for _, vdef := range vdefs {
if vdef != nil {
variableDefinitions = append(variableDefinitions, vdef.(*ast.VariableDefinition))
}
}
if err != nil {
return variableDefinitions, err
}
return variableDefinitions, nil
}
return variableDefinitions, nil
}
/**
* VariableDefinition : Variable : Type DefaultValue?
*/
func parseVariableDefinition(parser *Parser) (interface{}, error) {
start := parser.Token.Start
variable, err := parseVariable(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.COLON])
if err != nil {
return nil, err
}
ttype, err := parseType(parser)
if err != nil {
return nil, err
}
var defaultValue ast.Value
if skp, err := skip(parser, lexer.TokenKind[lexer.EQUALS]); err != nil {
return nil, err
} else if skp {
dv, err := parseValueLiteral(parser, true)
if err != nil {
return nil, err
}
defaultValue = dv
}
return ast.NewVariableDefinition(&ast.VariableDefinition{
Variable: variable,
Type: ttype,
DefaultValue: defaultValue,
Loc: loc(parser, start),
}), nil
}
/**
* Variable : $ Name
*/
func parseVariable(parser *Parser) (*ast.Variable, error) {
start := parser.Token.Start
_, err := expect(parser, lexer.TokenKind[lexer.DOLLAR])
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
return ast.NewVariable(&ast.Variable{
Name: name,
Loc: loc(parser, start),
}), nil
}
/**
* SelectionSet : { Selection+ }
*/
func parseSelectionSet(parser *Parser) (*ast.SelectionSet, error) {
start := parser.Token.Start
iSelections, err := many(parser, lexer.TokenKind[lexer.BRACE_L], parseSelection, lexer.TokenKind[lexer.BRACE_R])
if err != nil {
return nil, err
}
selections := []ast.Selection{}
for _, iSelection := range iSelections {
if iSelection != nil {
// type assert interface{} into Selection interface
selections = append(selections, iSelection.(ast.Selection))
}
}
return ast.NewSelectionSet(&ast.SelectionSet{
Selections: selections,
Loc: loc(parser, start),
}), nil
}
/**
* Selection :
* - Field
* - FragmentSpread
* - InlineFragment
*/
func parseSelection(parser *Parser) (interface{}, error) {
if peek(parser, lexer.TokenKind[lexer.SPREAD]) {
r, err := parseFragment(parser)
return r, err
}
return parseField(parser)
}
/**
* Field : Alias? Name Arguments? Directives? SelectionSet?
*
* Alias : Name :
*/
func parseField(parser *Parser) (*ast.Field, error) {
start := parser.Token.Start
nameOrAlias, err := parseName(parser)
if err != nil {
return nil, err
}
var (
name *ast.Name
alias *ast.Name
)
if skp, err := skip(parser, lexer.TokenKind[lexer.COLON]); err != nil {
return nil, err
} else if skp {
alias = nameOrAlias
n, err := parseName(parser)
if err != nil {
return nil, err
}
name = n
} else {
name = nameOrAlias
}
arguments, err := parseArguments(parser)
if err != nil {
return nil, err
}
directives, err := parseDirectives(parser)
if err != nil {
return nil, err
}
var selectionSet *ast.SelectionSet
if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
sSet, err := parseSelectionSet(parser)
if err != nil {
return nil, err
}
selectionSet = sSet
}
return ast.NewField(&ast.Field{
Alias: alias,
Name: name,
Arguments: arguments,
Directives: directives,
SelectionSet: selectionSet,
Loc: loc(parser, start),
}), nil
}
/**
* Arguments : ( Argument+ )
*/
func parseArguments(parser *Parser) ([]*ast.Argument, error) {
arguments := []*ast.Argument{}
if peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
iArguments, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseArgument, lexer.TokenKind[lexer.PAREN_R])
if err != nil {
return arguments, err
}
for _, iArgument := range iArguments {
if iArgument != nil {
arguments = append(arguments, iArgument.(*ast.Argument))
}
}
return arguments, nil
}
return arguments, nil
}
/**
* Argument : Name : Value
*/
func parseArgument(parser *Parser) (interface{}, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.COLON])
if err != nil {
return nil, err
}
value, err := parseValueLiteral(parser, false)
if err != nil {
return nil, err
}
return ast.NewArgument(&ast.Argument{
Name: name,
Value: value,
Loc: loc(parser, start),
}), nil
}
/* Implements the parsing rules in the Fragments section. */
/**
* Corresponds to both FragmentSpread and InlineFragment in the spec.
*
* FragmentSpread : ... FragmentName Directives?
*
* InlineFragment : ... TypeCondition? Directives? SelectionSet
*/
func parseFragment(parser *Parser) (interface{}, error) {
start := parser.Token.Start
_, err := expect(parser, lexer.TokenKind[lexer.SPREAD])
if err != nil {
return nil, err
}
if peek(parser, lexer.TokenKind[lexer.NAME]) && parser.Token.Value != "on" {
name, err := parseFragmentName(parser)
if err != nil {
return nil, err
}
directives, err := parseDirectives(parser)
if err != nil {
return nil, err
}
return ast.NewFragmentSpread(&ast.FragmentSpread{
Name: name,
Directives: directives,
Loc: loc(parser, start),
}), nil
}
var typeCondition *ast.Named
if parser.Token.Value == "on" {
if err := advance(parser); err != nil {
return nil, err
}
name, err := parseNamed(parser)
if err != nil {
return nil, err
}
typeCondition = name
}
directives, err := parseDirectives(parser)
if err != nil {
return nil, err
}
selectionSet, err := parseSelectionSet(parser)
if err != nil {
return nil, err
}
return ast.NewInlineFragment(&ast.InlineFragment{
TypeCondition: typeCondition,
Directives: directives,
SelectionSet: selectionSet,
Loc: loc(parser, start),
}), nil
}
/**
* FragmentDefinition :
* - fragment FragmentName on TypeCondition Directives? SelectionSet
*
* TypeCondition : NamedType
*/
func parseFragmentDefinition(parser *Parser) (*ast.FragmentDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "fragment")
if err != nil {
return nil, err
}
name, err := parseFragmentName(parser)
if err != nil {
return nil, err
}
_, err = expectKeyWord(parser, "on")
if err != nil {
return nil, err
}
typeCondition, err := parseNamed(parser)
if err != nil {
return nil, err
}
directives, err := parseDirectives(parser)
if err != nil {
return nil, err
}
selectionSet, err := parseSelectionSet(parser)
if err != nil {
return nil, err
}
return ast.NewFragmentDefinition(&ast.FragmentDefinition{
Name: name,
TypeCondition: typeCondition,
Directives: directives,
SelectionSet: selectionSet,
Loc: loc(parser, start),
}), nil
}
/**
* FragmentName : Name but not `on`
*/
func parseFragmentName(parser *Parser) (*ast.Name, error) {
if parser.Token.Value == "on" {
return nil, unexpected(parser, lexer.Token{})
}
return parseName(parser)
}
/* Implements the parsing rules in the Values section. */
/**
* Value[Const] :
* - [~Const] Variable
* - IntValue
* - FloatValue
* - StringValue
* - BooleanValue
* - EnumValue
* - ListValue[?Const]
* - ObjectValue[?Const]
*
* BooleanValue : one of `true` `false`
*
* EnumValue : Name but not `true`, `false` or `null`
*/
func parseValueLiteral(parser *Parser, isConst bool) (ast.Value, error) {
token := parser.Token
switch token.Kind {
case lexer.TokenKind[lexer.BRACKET_L]:
return parseList(parser, isConst)
case lexer.TokenKind[lexer.BRACE_L]:
return parseObject(parser, isConst)
case lexer.TokenKind[lexer.INT]:
if err := advance(parser); err != nil {
return nil, err
}
return ast.NewIntValue(&ast.IntValue{
Value: token.Value,
Loc: loc(parser, token.Start),
}), nil
case lexer.TokenKind[lexer.FLOAT]:
if err := advance(parser); err != nil {
return nil, err
}
return ast.NewFloatValue(&ast.FloatValue{
Value: token.Value,
Loc: loc(parser, token.Start),
}), nil
case lexer.TokenKind[lexer.STRING]:
if err := advance(parser); err != nil {
return nil, err
}
return ast.NewStringValue(&ast.StringValue{
Value: token.Value,
Loc: loc(parser, token.Start),
}), nil
case lexer.TokenKind[lexer.NAME]:
if token.Value == "true" || token.Value == "false" {
if err := advance(parser); err != nil {
return nil, err
}
value := true
if token.Value == "false" {
value = false
}
return ast.NewBooleanValue(&ast.BooleanValue{
Value: value,
Loc: loc(parser, token.Start),
}), nil
} else if token.Value != "null" {
if err := advance(parser); err != nil {
return nil, err
}
return ast.NewEnumValue(&ast.EnumValue{
Value: token.Value,
Loc: loc(parser, token.Start),
}), nil
}
case lexer.TokenKind[lexer.DOLLAR]:
if !isConst {
return parseVariable(parser)
}
}
if err := unexpected(parser, lexer.Token{}); err != nil {
return nil, err
}
return nil, nil
}
func parseConstValue(parser *Parser) (interface{}, error) {
value, err := parseValueLiteral(parser, true)
if err != nil {
return value, err
}
return value, nil
}
func parseValueValue(parser *Parser) (interface{}, error) {
return parseValueLiteral(parser, false)
}
/**
* ListValue[Const] :
* - [ ]
* - [ Value[?Const]+ ]
*/
func parseList(parser *Parser, isConst bool) (*ast.ListValue, error) {
start := parser.Token.Start
var item parseFn
if isConst {
item = parseConstValue
} else {
item = parseValueValue
}
iValues, err := any(parser, lexer.TokenKind[lexer.BRACKET_L], item, lexer.TokenKind[lexer.BRACKET_R])
if err != nil {
return nil, err
}
values := []ast.Value{}
for _, iValue := range iValues {
values = append(values, iValue.(ast.Value))
}
return ast.NewListValue(&ast.ListValue{
Values: values,
Loc: loc(parser, start),
}), nil
}
/**
* ObjectValue[Const] :
* - { }
* - { ObjectField[?Const]+ }
*/
func parseObject(parser *Parser, isConst bool) (*ast.ObjectValue, error) {
start := parser.Token.Start
_, err := expect(parser, lexer.TokenKind[lexer.BRACE_L])
if err != nil {
return nil, err
}
fields := []*ast.ObjectField{}
for {
if skp, err := skip(parser, lexer.TokenKind[lexer.BRACE_R]); err != nil {
return nil, err
} else if skp {
break
}
field, err := parseObjectField(parser, isConst)
if err != nil {
return nil, err
}
fields = append(fields, field)
}
return ast.NewObjectValue(&ast.ObjectValue{
Fields: fields,
Loc: loc(parser, start),
}), nil
}
/**
* ObjectField[Const] : Name : Value[?Const]
*/
func parseObjectField(parser *Parser, isConst bool) (*ast.ObjectField, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.COLON])
if err != nil {
return nil, err
}
value, err := parseValueLiteral(parser, isConst)
if err != nil {
return nil, err
}
return ast.NewObjectField(&ast.ObjectField{
Name: name,
Value: value,
Loc: loc(parser, start),
}), nil
}
/* Implements the parsing rules in the Directives section. */
/**
* Directives : Directive+
*/
func parseDirectives(parser *Parser) ([]*ast.Directive, error) {
directives := []*ast.Directive{}
for {
if !peek(parser, lexer.TokenKind[lexer.AT]) {
break
}
directive, err := parseDirective(parser)
if err != nil {
return directives, err
}
directives = append(directives, directive)
}
return directives, nil
}
/**
* Directive : @ Name Arguments?
*/
func parseDirective(parser *Parser) (*ast.Directive, error) {
start := parser.Token.Start
_, err := expect(parser, lexer.TokenKind[lexer.AT])
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
args, err := parseArguments(parser)
if err != nil {
return nil, err
}
return ast.NewDirective(&ast.Directive{
Name: name,
Arguments: args,
Loc: loc(parser, start),
}), nil
}
/* Implements the parsing rules in the Types section. */
/**
* Type :
* - NamedType
* - ListType
* - NonNullType
*/
func parseType(parser *Parser) (ast.Type, error) {
start := parser.Token.Start
var ttype ast.Type
if skp, err := skip(parser, lexer.TokenKind[lexer.BRACKET_L]); err != nil {
return nil, err
} else if skp {
t, err := parseType(parser)
if err != nil {
return t, err
}
ttype = t
_, err = expect(parser, lexer.TokenKind[lexer.BRACKET_R])
if err != nil {
return ttype, err
}
ttype = ast.NewList(&ast.List{
Type: ttype,
Loc: loc(parser, start),
})
} else {
name, err := parseNamed(parser)
if err != nil {
return ttype, err
}
ttype = name
}
if skp, err := skip(parser, lexer.TokenKind[lexer.BANG]); err != nil {
return nil, err
} else if skp {
ttype = ast.NewNonNull(&ast.NonNull{
Type: ttype,
Loc: loc(parser, start),
})
return ttype, nil
}
return ttype, nil
}
/**
* NamedType : Name
*/
func parseNamed(parser *Parser) (*ast.Named, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
return ast.NewNamed(&ast.Named{
Name: name,
Loc: loc(parser, start),
}), nil
}
/* Implements the parsing rules in the Type Definition section. */
/**
* ObjectTypeDefinition : type Name ImplementsInterfaces? { FieldDefinition+ }
*/
func parseObjectTypeDefinition(parser *Parser) (*ast.ObjectDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "type")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
interfaces, err := parseImplementsInterfaces(parser)
if err != nil {
return nil, err
}
iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R])
if err != nil {
return nil, err
}
fields := []*ast.FieldDefinition{}
for _, iField := range iFields {
if iField != nil {
fields = append(fields, iField.(*ast.FieldDefinition))
}
}
return ast.NewObjectDefinition(&ast.ObjectDefinition{
Name: name,
Loc: loc(parser, start),
Interfaces: interfaces,
Fields: fields,
}), nil
}
/**
* ImplementsInterfaces : implements NamedType+
*/
func parseImplementsInterfaces(parser *Parser) ([]*ast.Named, error) {
types := []*ast.Named{}
if parser.Token.Value == "implements" {
if err := advance(parser); err != nil {
return nil, err
}
for {
ttype, err := parseNamed(parser)
if err != nil {
return types, err
}
types = append(types, ttype)
if peek(parser, lexer.TokenKind[lexer.BRACE_L]) {
break
}
}
}
return types, nil
}
/**
* FieldDefinition : Name ArgumentsDefinition? : Type
*/
func parseFieldDefinition(parser *Parser) (interface{}, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
args, err := parseArgumentDefs(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.COLON])
if err != nil {
return nil, err
}
ttype, err := parseType(parser)
if err != nil {
return nil, err
}
return ast.NewFieldDefinition(&ast.FieldDefinition{
Name: name,
Arguments: args,
Type: ttype,
Loc: loc(parser, start),
}), nil
}
/**
* ArgumentsDefinition : ( InputValueDefinition+ )
*/
func parseArgumentDefs(parser *Parser) ([]*ast.InputValueDefinition, error) {
inputValueDefinitions := []*ast.InputValueDefinition{}
if !peek(parser, lexer.TokenKind[lexer.PAREN_L]) {
return inputValueDefinitions, nil
}
iInputValueDefinitions, err := many(parser, lexer.TokenKind[lexer.PAREN_L], parseInputValueDef, lexer.TokenKind[lexer.PAREN_R])
if err != nil {
return inputValueDefinitions, err
}
for _, iInputValueDefinition := range iInputValueDefinitions {
if iInputValueDefinition != nil {
inputValueDefinitions = append(inputValueDefinitions, iInputValueDefinition.(*ast.InputValueDefinition))
}
}
return inputValueDefinitions, err
}
/**
* InputValueDefinition : Name : Type DefaultValue?
*/
func parseInputValueDef(parser *Parser) (interface{}, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.COLON])
if err != nil {
return nil, err
}
ttype, err := parseType(parser)
if err != nil {
return nil, err
}
var defaultValue ast.Value
if skp, err := skip(parser, lexer.TokenKind[lexer.EQUALS]); err != nil {
return nil, err
} else if skp {
val, err := parseConstValue(parser)
if err != nil {
return nil, err
}
if val, ok := val.(ast.Value); ok {
defaultValue = val
}
}
return ast.NewInputValueDefinition(&ast.InputValueDefinition{
Name: name,
Type: ttype,
DefaultValue: defaultValue,
Loc: loc(parser, start),
}), nil
}
/**
* InterfaceTypeDefinition : interface Name { FieldDefinition+ }
*/
func parseInterfaceTypeDefinition(parser *Parser) (*ast.InterfaceDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "interface")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
iFields, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseFieldDefinition, lexer.TokenKind[lexer.BRACE_R])
if err != nil {
return nil, err
}
fields := []*ast.FieldDefinition{}
for _, iField := range iFields {
if iField != nil {
fields = append(fields, iField.(*ast.FieldDefinition))
}
}
return ast.NewInterfaceDefinition(&ast.InterfaceDefinition{
Name: name,
Loc: loc(parser, start),
Fields: fields,
}), nil
}
/**
* UnionTypeDefinition : union Name = UnionMembers
*/
func parseUnionTypeDefinition(parser *Parser) (*ast.UnionDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "union")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
_, err = expect(parser, lexer.TokenKind[lexer.EQUALS])
if err != nil {
return nil, err
}
types, err := parseUnionMembers(parser)
if err != nil {
return nil, err
}
return ast.NewUnionDefinition(&ast.UnionDefinition{
Name: name,
Loc: loc(parser, start),
Types: types,
}), nil
}
/**
* UnionMembers :
* - NamedType
* - UnionMembers | NamedType
*/
func parseUnionMembers(parser *Parser) ([]*ast.Named, error) {
members := []*ast.Named{}
for {
member, err := parseNamed(parser)
if err != nil {
return members, err
}
members = append(members, member)
if skp, err := skip(parser, lexer.TokenKind[lexer.PIPE]); err != nil {
return nil, err
} else if !skp {
break
}
}
return members, nil
}
/**
* ScalarTypeDefinition : scalar Name
*/
func parseScalarTypeDefinition(parser *Parser) (*ast.ScalarDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "scalar")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
def := ast.NewScalarDefinition(&ast.ScalarDefinition{
Name: name,
Loc: loc(parser, start),
})
return def, nil
}
/**
* EnumTypeDefinition : enum Name { EnumValueDefinition+ }
*/
func parseEnumTypeDefinition(parser *Parser) (*ast.EnumDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "enum")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
iEnumValueDefs, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseEnumValueDefinition, lexer.TokenKind[lexer.BRACE_R])
if err != nil {
return nil, err
}
values := []*ast.EnumValueDefinition{}
for _, iEnumValueDef := range iEnumValueDefs {
if iEnumValueDef != nil {
values = append(values, iEnumValueDef.(*ast.EnumValueDefinition))
}
}
return ast.NewEnumDefinition(&ast.EnumDefinition{
Name: name,
Loc: loc(parser, start),
Values: values,
}), nil
}
/**
* EnumValueDefinition : EnumValue
*
* EnumValue : Name
*/
func parseEnumValueDefinition(parser *Parser) (interface{}, error) {
start := parser.Token.Start
name, err := parseName(parser)
if err != nil {
return nil, err
}
return ast.NewEnumValueDefinition(&ast.EnumValueDefinition{
Name: name,
Loc: loc(parser, start),
}), nil
}
/**
* InputObjectTypeDefinition : input Name { InputValueDefinition+ }
*/
func parseInputObjectTypeDefinition(parser *Parser) (*ast.InputObjectDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "input")
if err != nil {
return nil, err
}
name, err := parseName(parser)
if err != nil {
return nil, err
}
iInputValueDefinitions, err := any(parser, lexer.TokenKind[lexer.BRACE_L], parseInputValueDef, lexer.TokenKind[lexer.BRACE_R])
if err != nil {
return nil, err
}
fields := []*ast.InputValueDefinition{}
for _, iInputValueDefinition := range iInputValueDefinitions {
if iInputValueDefinition != nil {
fields = append(fields, iInputValueDefinition.(*ast.InputValueDefinition))
}
}
return ast.NewInputObjectDefinition(&ast.InputObjectDefinition{
Name: name,
Loc: loc(parser, start),
Fields: fields,
}), nil
}
/**
* TypeExtensionDefinition : extend ObjectTypeDefinition
*/
func parseTypeExtensionDefinition(parser *Parser) (*ast.TypeExtensionDefinition, error) {
start := parser.Token.Start
_, err := expectKeyWord(parser, "extend")
if err != nil {
return nil, err
}
definition, err := parseObjectTypeDefinition(parser)
if err != nil {
return nil, err
}
return ast.NewTypeExtensionDefinition(&ast.TypeExtensionDefinition{
Loc: loc(parser, start),
Definition: definition,
}), nil
}
/* Core parsing utility functions */
// Returns a location object, used to identify the place in
// the source that created a given parsed object.
func loc(parser *Parser, start int) *ast.Location {
if parser.Options.NoLocation {
return nil
}
if parser.Options.NoSource {
return ast.NewLocation(&ast.Location{
Start: start,
End: parser.PrevEnd,
})
}
return ast.NewLocation(&ast.Location{
Start: start,
End: parser.PrevEnd,
Source: parser.Source,
})
}
// Moves the internal parser object to the next lexed token.
func advance(parser *Parser) error {
prevEnd := parser.Token.End
parser.PrevEnd = prevEnd
token, err := parser.LexToken(prevEnd)
if err != nil {
return err
}
parser.Token = token
return nil
}
// Determines if the next token is of a given kind
func peek(parser *Parser, Kind int) bool {
return parser.Token.Kind == Kind
}
// If the next token is of the given kind, return true after advancing
// the parser. Otherwise, do not change the parser state and return false.
func skip(parser *Parser, Kind int) (bool, error) {
if parser.Token.Kind == Kind {
err := advance(parser)
return true, err
}
return false, nil
}
// If the next token is of the given kind, return that token after advancing
// the parser. Otherwise, do not change the parser state and return error.
func expect(parser *Parser, kind int) (lexer.Token, error) {
token := parser.Token
if token.Kind == kind {
err := advance(parser)
return token, err
}
descp := fmt.Sprintf("Expected %s, found %s", lexer.GetTokenKindDesc(kind), lexer.GetTokenDesc(token))
return token, gqlerrors.NewSyntaxError(parser.Source, token.Start, descp)
}
// If the next token is a keyword with the given value, return that token after
// advancing the parser. Otherwise, do not change the parser state and return false.
func expectKeyWord(parser *Parser, value string) (lexer.Token, error) {
token := parser.Token
if token.Kind == lexer.TokenKind[lexer.NAME] && token.Value == value {
err := advance(parser)
return token, err
}
descp := fmt.Sprintf("Expected \"%s\", found %s", value, lexer.GetTokenDesc(token))
return token, gqlerrors.NewSyntaxError(parser.Source, token.Start, descp)
}
// Helper function for creating an error when an unexpected lexed token
// is encountered.
func unexpected(parser *Parser, atToken lexer.Token) error {
var token lexer.Token
if (atToken == lexer.Token{}) {
token = parser.Token
} else {
token = parser.Token
}
description := fmt.Sprintf("Unexpected %v", lexer.GetTokenDesc(token))
return gqlerrors.NewSyntaxError(parser.Source, token.Start, description)
}
// Returns a possibly empty list of parse nodes, determined by
// the parseFn. This list begins with a lex token of openKind
// and ends with a lex token of closeKind. Advances the parser
// to the next lex token after the closing token.
func any(parser *Parser, openKind int, parseFn parseFn, closeKind int) ([]interface{}, error) {
var nodes []interface{}
_, err := expect(parser, openKind)
if err != nil {
return nodes, nil
}
for {
if skp, err := skip(parser, closeKind); err != nil {
return nil, err
} else if skp {
break
}
n, err := parseFn(parser)
if err != nil {
return nodes, err
}
nodes = append(nodes, n)
}
return nodes, nil
}
// Returns a non-empty list of parse nodes, determined by
// the parseFn. This list begins with a lex token of openKind
// and ends with a lex token of closeKind. Advances the parser
// to the next lex token after the closing token.
func many(parser *Parser, openKind int, parseFn parseFn, closeKind int) ([]interface{}, error) {
_, err := expect(parser, openKind)
if err != nil {
return nil, err
}
var nodes []interface{}
node, err := parseFn(parser)
if err != nil {
return nodes, err
}
nodes = append(nodes, node)
for {
if skp, err := skip(parser, closeKind); err != nil {
return nil, err
} else if skp {
break
}
node, err := parseFn(parser)
if err != nil {
return nodes, err
}
nodes = append(nodes, node)
}
return nodes, nil
}
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/mdgg0816/graphql-go.git
git@gitee.com:mdgg0816/graphql-go.git
mdgg0816
graphql-go
graphql-go
v0.4.18

搜索帮助