diff --git a/lib/graphql/language/lexer.rb b/lib/graphql/language/lexer.rb index c7ef5ae31b..a08cf2b3b1 100644 --- a/lib/graphql/language/lexer.rb +++ b/lib/graphql/language/lexer.rb @@ -1,39 +1,228 @@ # frozen_string_literal: true - -require "strscan" - module GraphQL module Language + class Lexer - IDENTIFIER = /[_A-Za-z][_0-9A-Za-z]*/ - NEWLINE = /[\c\r\n]/ - BLANK = /[, \t]+/ - COMMENT = /#[^\n\r]*/ - INT = /[-]?(?:[0]|[1-9][0-9]*)/ - FLOAT_DECIMAL = /[.][0-9]+/ - FLOAT_EXP = /[eE][+-]?[0-9]+/ - FLOAT = /#{INT}(#{FLOAT_DECIMAL}#{FLOAT_EXP}|#{FLOAT_DECIMAL}|#{FLOAT_EXP})/ - - module Literals - ON = /on\b/ - FRAGMENT = /fragment\b/ - TRUE = /true\b/ - FALSE = /false\b/ - NULL = /null\b/ - QUERY = /query\b/ - MUTATION = /mutation\b/ - SUBSCRIPTION = /subscription\b/ - SCHEMA = /schema\b/ - SCALAR = /scalar\b/ - TYPE = /type\b/ - EXTEND = /extend\b/ - IMPLEMENTS = /implements\b/ - INTERFACE = /interface\b/ - UNION = /union\b/ - ENUM = /enum\b/ - INPUT = /input\b/ - DIRECTIVE = /directive\b/ - REPEATABLE = /repeatable\b/ + def initialize(graphql_str, filename: nil) + if !(graphql_str.encoding == Encoding::UTF_8 || graphql_str.ascii_only?) + graphql_str = graphql_str.dup.force_encoding(Encoding::UTF_8) + end + @string = graphql_str + @filename = filename + @scanner = StringScanner.new(graphql_str) + @pos = nil + end + + def eos? + @scanner.eos? + end + + attr_reader :pos + + def advance + @scanner.skip(IGNORE_REGEXP) + return false if @scanner.eos? + @pos = @scanner.pos + next_byte = @string.getbyte(@pos) + next_byte_is_for = FIRST_BYTES[next_byte] + case next_byte_is_for + when ByteFor::PUNCTUATION + @scanner.pos += 1 + PUNCTUATION_NAME_FOR_BYTE[next_byte] + when ByteFor::NAME + if len = @scanner.skip(KEYWORD_REGEXP) + case len + when 2 + :ON + when 12 + :SUBSCRIPTION + else + pos = @pos + + # Use bytes 2 and 3 as a unique identifier for this keyword + bytes = (@string.getbyte(pos + 2) << 8) | @string.getbyte(pos + 1) + KEYWORD_BY_TWO_BYTES[_hash(bytes)] + end + else + @scanner.skip(IDENTIFIER_REGEXP) + :IDENTIFIER + end + when ByteFor::IDENTIFIER + @scanner.skip(IDENTIFIER_REGEXP) + :IDENTIFIER + when ByteFor::NUMBER + @scanner.skip(NUMERIC_REGEXP) + # Check for a matched decimal: + @scanner[1] ? :FLOAT : :INT + when ByteFor::ELLIPSIS + if @string.getbyte(@pos + 1) != 46 || @string.getbyte(@pos + 2) != 46 + raise_parse_error("Expected `...`, actual: #{@string[@pos..@pos + 2].inspect}") + end + @scanner.pos += 3 + :ELLIPSIS + when ByteFor::STRING + if @scanner.skip(BLOCK_STRING_REGEXP) || @scanner.skip(QUOTED_STRING_REGEXP) + :STRING + else + raise_parse_error("Expected string or block string, but it was malformed") + end + else + @scanner.pos += 1 + :UNKNOWN_CHAR + end + rescue ArgumentError => err + if err.message == "invalid byte sequence in UTF-8" + raise_parse_error("Parse error on bad Unicode escape sequence", nil, nil) + end + end + + def token_value + @string.byteslice(@scanner.pos - @scanner.matched_size, @scanner.matched_size) + rescue StandardError => err + raise GraphQL::Error, "(token_value failed: #{err.class}: #{err.message})" + end + + def debug_token_value(token_name) + if token_name && Lexer::Punctuation.const_defined?(token_name) + Lexer::Punctuation.const_get(token_name) + elsif token_name == :ELLIPSIS + "..." + elsif token_name == :STRING + string_value + else + token_value + end + end + + ESCAPES = /\\["\\\/bfnrt]/ + ESCAPES_REPLACE = { + '\\"' => '"', + "\\\\" => "\\", + "\\/" => '/', + "\\b" => "\b", + "\\f" => "\f", + "\\n" => "\n", + "\\r" => "\r", + "\\t" => "\t", + } + UTF_8 = /\\u(?:([\dAa-f]{4})|\{([\da-f]{4,})\})(?:\\u([\dAa-f]{4}))?/i + VALID_STRING = /\A(?:[^\\]|#{ESCAPES}|#{UTF_8})*\z/o + + def string_value + str = token_value + is_block = str.start_with?('"""') + if is_block + str.gsub!(/\A"""|"""\z/, '') + else + str.gsub!(/\A"|"\z/, '') + end + + if is_block + str = Language::BlockString.trim_whitespace(str) + end + + if !str.valid_encoding? || !str.match?(VALID_STRING) + raise_parse_error("Bad unicode escape in #{str.inspect}") + else + Lexer.replace_escaped_characters_in_place(str) + + if !str.valid_encoding? + raise_parse_error("Bad unicode escape in #{str.inspect}") + else + str + end + end + end + + def line_number + @scanner.string[0..@pos].count("\n") + 1 + end + + def column_number + @scanner.string[0..@pos].split("\n").last.length + end + + def raise_parse_error(message, line = line_number, col = column_number) + raise GraphQL::ParseError.new(message, line, col, @string, filename: @filename) + end + + IGNORE_REGEXP = %r{ + (?: + [, \c\r\n\t]+ | + \#.*$ + )* + }x + IDENTIFIER_REGEXP = /[_A-Za-z][_0-9A-Za-z]*/ + INT_REGEXP = /-?(?:[0]|[1-9][0-9]*)/ + FLOAT_DECIMAL_REGEXP = /[.][0-9]+/ + FLOAT_EXP_REGEXP = /[eE][+-]?[0-9]+/ + NUMERIC_REGEXP = /#{INT_REGEXP}(#{FLOAT_DECIMAL_REGEXP}#{FLOAT_EXP_REGEXP}|#{FLOAT_DECIMAL_REGEXP}|#{FLOAT_EXP_REGEXP})?/ + + KEYWORDS = [ + "on", + "fragment", + "true", + "false", + "null", + "query", + "mutation", + "subscription", + "schema", + "scalar", + "type", + "extend", + "implements", + "interface", + "union", + "enum", + "input", + "directive", + "repeatable" + ].freeze + + KEYWORD_REGEXP = /#{Regexp.union(KEYWORDS.sort)}\b/ + KEYWORD_BY_TWO_BYTES = [ + :INTERFACE, + :MUTATION, + :EXTEND, + :FALSE, + :ENUM, + :TRUE, + :NULL, + nil, + nil, + nil, + nil, + nil, + nil, + nil, + :QUERY, + nil, + nil, + :REPEATABLE, + :IMPLEMENTS, + :INPUT, + :TYPE, + :SCHEMA, + nil, + nil, + nil, + :DIRECTIVE, + :UNION, + nil, + nil, + :SCALAR, + nil, + :FRAGMENT + ] + + # This produces a unique integer for bytes 2 and 3 of each keyword string + # See https://tenderlovemaking.com/2023/09/02/fast-tokenizers-with-stringscanner.html + def _hash key + (key * 18592990) >> 27 & 0x1f + end + + module Punctuation LCURLY = '{' RCURLY = '}' LPAREN = '(' @@ -43,36 +232,31 @@ module Literals COLON = ':' VAR_SIGN = '$' DIR_SIGN = '@' - ELLIPSIS = '...' EQUALS = '=' BANG = '!' PIPE = '|' AMP = '&' end - include Literals + # A sparse array mapping the bytes for each punctuation + # to a symbol name for that punctuation + PUNCTUATION_NAME_FOR_BYTE = Punctuation.constants.each_with_object([]) { |name, arr| + punct = Punctuation.const_get(name) + arr[punct.ord] = name + } QUOTE = '"' UNICODE_DIGIT = /[0-9A-Za-z]/ FOUR_DIGIT_UNICODE = /#{UNICODE_DIGIT}{4}/ - N_DIGIT_UNICODE = %r{#{LCURLY}#{UNICODE_DIGIT}{4,}#{RCURLY}}x + N_DIGIT_UNICODE = %r{#{Punctuation::LCURLY}#{UNICODE_DIGIT}{4,}#{Punctuation::RCURLY}}x UNICODE_ESCAPE = %r{\\u(?:#{FOUR_DIGIT_UNICODE}|#{N_DIGIT_UNICODE})} - # # https://graphql.github.io/graphql-spec/June2018/#sec-String-Value + # # https://graphql.github.io/graphql-spec/June2018/#sec-String-Value STRING_ESCAPE = %r{[\\][\\/bfnrt]} BLOCK_QUOTE = '"""' ESCAPED_QUOTE = /\\"/; STRING_CHAR = /#{ESCAPED_QUOTE}|[^"\\]|#{UNICODE_ESCAPE}|#{STRING_ESCAPE}/ - - LIT_NAME_LUT = Literals.constants.each_with_object({}) { |n, o| - key = Literals.const_get(n) - key = key.is_a?(Regexp) ? key.source.gsub(/(\\b|\\)/, '') : key - o[key] = n - } - - LIT = Regexp.union(Literals.constants.map { |n| Literals.const_get(n) }) - - QUOTED_STRING = %r{#{QUOTE} (?:#{STRING_CHAR})* #{QUOTE}}x - BLOCK_STRING = %r{ + QUOTED_STRING_REGEXP = %r{#{QUOTE} (?:#{STRING_CHAR})* #{QUOTE}}x + BLOCK_STRING_REGEXP = %r{ #{BLOCK_QUOTE} (?: [^"\\] | # Any characters that aren't a quote or slash (? '"', - "\\\\" => "\\", - "\\/" => '/', - "\\b" => "\b", - "\\f" => "\f", - "\\n" => "\n", - "\\r" => "\r", - "\\t" => "\t", - } - UTF_8 = /\\u(?:([\dAa-f]{4})|\{([\da-f]{4,})\})(?:\\u([\dAa-f]{4}))?/i - VALID_STRING = /\A(?:[^\\]|#{ESCAPES}|#{UTF_8})*\z/o - - def emit_block(ts, te, value) - line_incr = value.count("\n") - value = GraphQL::Language::BlockString.trim_whitespace(value) - tok = emit_string(ts, te, value) - @line += line_incr - tok - end - - def emit_string(ts, te, value) - if !value.valid_encoding? || !value.match?(VALID_STRING) - emit(:BAD_UNICODE_ESCAPE, ts, te, value) - else - self.class.replace_escaped_characters_in_place(value) - - if !value.valid_encoding? - emit(:BAD_UNICODE_ESCAPE, ts, te, value) - else - emit(:STRING, ts, te, value) - end + # This is not used during parsing because the parser + # doesn't actually need tokens. + def self.tokenize(string) + lexer = GraphQL::Language::Lexer.new(string) + tokens = [] + prev_token = nil + while (token_name = lexer.advance) + new_token = [ + token_name, + lexer.line_number, + lexer.column_number, + lexer.debug_token_value(token_name), + prev_token, + ] + tokens << new_token + prev_token = new_token end + tokens end - - private - - def scanner(value) - StringScanner.new value - end - end end end diff --git a/lib/graphql/language/nodes.rb b/lib/graphql/language/nodes.rb index 2d1ca407c4..893ccc2dbb 100644 --- a/lib/graphql/language/nodes.rb +++ b/lib/graphql/language/nodes.rb @@ -16,30 +16,30 @@ module DefinitionNode # @return [Integer] The first line of the definition (not the description) attr_reader :definition_line - def initialize(options = {}) - @definition_line = options.delete(:definition_line) - super(options) + def initialize(definition_line: nil, **_rest) + @definition_line = definition_line + super(**_rest) end end - attr_reader :line, :col, :filename + attr_reader :filename - # Initialize a node by extracting its position, - # then calling the class's `initialize_node` method. - # @param options [Hash] Initial attributes for this node - def initialize(options = {}) - if options.key?(:position_source) - position_source = options.delete(:position_source) - @line = position_source[1] - @col = position_source[2] - else - @line = options.delete(:line) - @col = options.delete(:col) - end + def line + @line ||= (@source_string && @pos) ? @source_string[0..@pos].count("\n") + 1 : nil + end - @filename = options.delete(:filename) + def col + @col ||= if @source_string && @pos + if @pos == 0 + 1 + else + @source_string[0..@pos].split("\n").last.length + end + end + end - initialize_node(**options) + def definition_line + @definition_line ||= (@source_string && @definition_pos) ? @source_string[0..@definition_pos].count("\n") + 1 : nil end # Value equality @@ -196,8 +196,8 @@ def children_methods(children_of_type) module_eval <<-RUBY, __FILE__, __LINE__ # Singular method: create a node with these options # and return a new `self` which includes that node in this list. - def merge_#{method_name.to_s.sub(/s$/, "")}(node_opts) - merge(#{method_name}: #{method_name} + [#{node_type.name}.new(node_opts)]) + def merge_#{method_name.to_s.sub(/s$/, "")}(**node_opts) + merge(#{method_name}: #{method_name} + [#{node_type.name}.new(**node_opts)]) end RUBY end @@ -226,13 +226,14 @@ def children end if defined?(@scalar_methods) - if !method_defined?(:initialize_node) - generate_initialize_node + if !@initialize_was_generated + @initialize_was_generated = true + generate_initialize else # This method was defined manually end else - raise "Can't generate_initialize_node because scalar_methods wasn't called; call it before children_methods" + raise "Can't generate_initialize because scalar_methods wasn't called; call it before children_methods" end end @@ -261,7 +262,15 @@ def scalars end end - def generate_initialize_node + DEFAULT_INITIALIZE_OPTIONS = [ + "line: nil", + "col: nil", + "pos: nil", + "filename: nil", + "source_string: nil", + ] + + def generate_initialize scalar_method_names = @scalar_methods # TODO: These probably should be scalar methods, but `types` returns an array [:types, :description].each do |extra_method| @@ -277,16 +286,27 @@ def generate_initialize_node return else arguments = scalar_method_names.map { |m| "#{m}: nil"} + - @children_methods.keys.map { |m| "#{m}: NO_CHILDREN" } + @children_methods.keys.map { |m| "#{m}: NO_CHILDREN" } + + DEFAULT_INITIALIZE_OPTIONS assignments = scalar_method_names.map { |m| "@#{m} = #{m}"} + @children_methods.keys.map { |m| "@#{m} = #{m}.freeze" } + if name.end_with?("Definition") && name != "FragmentDefinition" + arguments << "definition_pos: nil" + assignments << "@definition_pos = definition_pos" + end + keywords = scalar_method_names.map { |m| "#{m}: #{m}"} + @children_methods.keys.map { |m| "#{m}: #{m}" } module_eval <<-RUBY, __FILE__, __LINE__ - def initialize_node #{arguments.join(", ")} + def initialize(#{arguments.join(", ")}) + @line = line + @col = col + @pos = pos + @filename = filename + @source_string = source_string #{assignments.join("\n")} end @@ -336,7 +356,6 @@ class DirectiveLocation < NameOnlyNode end class DirectiveDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name, :repeatable children_methods( @@ -365,17 +384,22 @@ class Field < AbstractNode # @!attribute selections # @return [Array] Selections on this object (or empty array if this is a scalar field) - def initialize_node(attributes) - @name = attributes[:name] - @arguments = attributes[:arguments] || NONE - @directives = attributes[:directives] || NONE - @selections = attributes[:selections] || NONE + def initialize(name: nil, arguments: NONE, directives: NONE, selections: NONE, field_alias: nil, line: nil, col: nil, pos: nil, filename: nil, source_string: nil) + @name = name + @arguments = arguments || NONE + @directives = directives || NONE + @selections = selections || NONE # oops, alias is a keyword: - @alias = attributes[:alias] + @alias = field_alias + @line = line + @col = col + @pos = pos + @filename = filename + @source_string = source_string end - def self.from_a(filename, line, col, graphql_alias, name, arguments, directives, selections) # rubocop:disable Metrics/ParameterLists - self.new(filename: filename, line: line, col: col, alias: graphql_alias, name: name, arguments: arguments, directives: directives, selections: selections) + def self.from_a(filename, line, col, field_alias, name, arguments, directives, selections) # rubocop:disable Metrics/ParameterLists + self.new(filename: filename, line: line, col: col, field_alias: field_alias, name: name, arguments: arguments, directives: directives, selections: selections) end # Override this because default is `:fields` @@ -384,29 +408,33 @@ def self.from_a(filename, line, col, graphql_alias, name, arguments, directives, # A reusable fragment, defined at document-level. class FragmentDefinition < AbstractNode + scalar_methods :name, :type + children_methods({ + selections: GraphQL::Language::Nodes::Field, + directives: GraphQL::Language::Nodes::Directive, + }) + + self.children_method_name = :definitions # @!attribute name # @return [String] the identifier for this fragment, which may be applied with `...#{name}` # @!attribute type # @return [String] the type condition for this fragment (name of type which it may apply to) - def initialize_node(name: nil, type: nil, directives: [], selections: []) + def initialize(name: nil, type: nil, directives: NONE, selections: NONE, filename: nil, pos: nil, source_string: nil, line: nil, col: nil) @name = name @type = type @directives = directives @selections = selections + @filename = filename + @pos = pos + @source_string = source_string + @line = line + @col = col end def self.from_a(filename, line, col, name, type, directives, selections) self.new(filename: filename, line: line, col: col, name: name, type: type, directives: directives, selections: selections) end - - scalar_methods :name, :type - children_methods({ - selections: GraphQL::Language::Nodes::Field, - directives: GraphQL::Language::Nodes::Directive, - }) - - self.children_method_name = :definitions end # Application of a named fragment in a selection @@ -562,7 +590,6 @@ class VariableIdentifier < NameOnlyNode end class SchemaDefinition < AbstractNode - include DefinitionNode scalar_methods :query, :mutation, :subscription children_methods({ directives: GraphQL::Language::Nodes::Directive, @@ -579,7 +606,6 @@ class SchemaExtension < AbstractNode end class ScalarTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name children_methods({ @@ -597,7 +623,6 @@ class ScalarTypeExtension < AbstractNode end class InputValueDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name, :type, :default_value children_methods({ @@ -607,7 +632,6 @@ class InputValueDefinition < AbstractNode end class FieldDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name, :type children_methods({ @@ -628,7 +652,6 @@ def merge(new_options) end class ObjectTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name, :interfaces children_methods({ @@ -648,7 +671,6 @@ class ObjectTypeExtension < AbstractNode end class InterfaceTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name children_methods({ @@ -670,7 +692,6 @@ class InterfaceTypeExtension < AbstractNode end class UnionTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description, :types scalar_methods :name children_methods({ @@ -689,7 +710,6 @@ class UnionTypeExtension < AbstractNode end class EnumValueDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name children_methods({ @@ -699,7 +719,6 @@ class EnumValueDefinition < AbstractNode end class EnumTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name children_methods({ @@ -719,7 +738,6 @@ class EnumTypeExtension < AbstractNode end class InputObjectTypeDefinition < AbstractNode - include DefinitionNode attr_reader :description scalar_methods :name children_methods({ diff --git a/lib/graphql/language/parser.rb b/lib/graphql/language/parser.rb index c7ff65fb76..048fc5888d 100644 --- a/lib/graphql/language/parser.rb +++ b/lib/graphql/language/parser.rb @@ -1,2031 +1,731 @@ -# -# DO NOT MODIFY!!!! -# This file is automatically generated by Racc 1.6.2 -# from Racc grammar file "". -# - -require 'racc/parser.rb' - +# frozen_string_literal: true +require "strscan" +require "graphql/language/nodes" module GraphQL module Language - class Parser < Racc::Parser - -module_eval(<<'...end parser.y/module_eval...', 'parser.y', 453) - -EMPTY_ARRAY = [].freeze - -def initialize(query_string, filename:, trace: Tracing::NullTrace) - raise GraphQL::ParseError.new("No query string was present", nil, nil, query_string) if query_string.nil? - @query_string = query_string - @filename = filename - @trace = trace - @reused_next_token = [nil, nil] -end - -def parse_document - @document ||= begin - # Break the string into tokens - @trace.lex(query_string: @query_string) do - @tokens ||= GraphQL::Language::Lexer.tokenize(@query_string) - end - # From the tokens, build an AST - @trace.parse(query_string: @query_string) do - if @tokens.empty? - raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string) - else - do_parse + class Parser + include GraphQL::Language::Nodes + include EmptyObjects + + class << self + attr_accessor :cache + + def parse(graphql_str, filename: nil, trace: Tracing::NullTrace) + self.new(graphql_str, filename: filename, trace: trace).parse + end + + def parse_file(filename, trace: Tracing::NullTrace) + if cache + cache.fetch(filename) do + parse(File.read(filename), filename: filename, trace: trace) + end + else + parse(File.read(filename), filename: filename, trace: trace) + end + end end - end - end -end - -class << self - attr_accessor :cache - def parse(query_string, filename: nil, trace: GraphQL::Tracing::NullTrace) - new(query_string, filename: filename, trace: trace).parse_document - end - - def parse_file(filename, trace: GraphQL::Tracing::NullTrace) - if cache - cache.fetch(filename) do - parse(File.read(filename), filename: filename, trace: trace) + def initialize(graphql_str, filename: nil, trace: Tracing::NullTrace) + if graphql_str.nil? + raise GraphQL::ParseError.new("No query string was present", nil, nil, nil) + end + @lexer = Lexer.new(graphql_str, filename: filename) + @graphql_str = graphql_str + @filename = filename + @trace = trace end - else - parse(File.read(filename), filename: filename, trace: trace) - end - end -end - -private - -def next_token - lexer_token = @tokens.shift - if lexer_token.nil? - nil - else - @reused_next_token[0] = lexer_token[0] - @reused_next_token[1] = lexer_token - @reused_next_token - end -end - -def get_description(token) - comments = [] - - loop do - prev_token = token - token = token[4] - - break if token.nil? - break if token[0] != :COMMENT - break if prev_token[1] != token[1] + 1 - comments.unshift(token[3].sub(/^#\s*/, "")) - end - - return nil if comments.empty? - - comments.join("\n") -end - -def on_error(parser_token_id, lexer_token, vstack) - if lexer_token == "$" || lexer_token == nil - raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string, filename: @filename) - else - parser_token_name = token_to_str(parser_token_id) - if parser_token_name.nil? - raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename) - else - line = lexer_token[1] - col = lexer_token[2] - if lexer_token[0] == :BAD_UNICODE_ESCAPE - raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename) - else - raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename) + def parse + @document ||= begin + @trace.parse(query_string: @graphql_str) do + document + end + rescue SystemStackError + raise GraphQL::ParseError.new("This query is too large to execute.", nil, nil, @query_str, filename: @filename) + end end - end - end -end - -def make_node(node_name, assigns) - assigns.each do |key, value| - if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol) - assigns[key] = value[3] - end - end - - assigns[:filename] = @filename - - GraphQL::Language::Nodes.const_get(node_name).new(assigns) -end -...end parser.y/module_eval... -##### State transition tables begin ### - -racc_action_table = [ - -2, 38, 11, -102, 12, 13, 14, 11, 182, 12, - 13, 14, 283, -102, -102, 73, 19, -173, 271, 277, - 278, 19, 281, 75, 76, 77, 15, 78, 79, 80, - 81, 15, 89, 35, 91, 73, 28, 35, 35, 100, - -157, 28, 12, 13, 14, 73, 73, 35, 35, 35, - 35, 73, 35, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 62, - 12, 13, 14, 282, 96, 185, 104, 95, 12, 13, - 14, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 92, 12, 13, - 14, 68, 73, 35, 177, 12, 13, 14, 119, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 229, 12, 13, 14, 68, - 232, 12, 13, 14, 163, 123, -102, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 12, 13, 14, 73, 68, 12, 13, - 14, 73, 123, 73, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 73, 134, 96, 225, 68, 12, 13, 14, 141, 73, - 223, 145, 145, 224, 73, 73, 206, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 203, 204, 205, 213, - 214, 222, 210, 211, 212, 225, 73, 12, 13, 14, - 73, 145, 223, 145, 73, 224, 261, 73, 206, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 203, 204, - 205, 213, 214, 222, 210, 211, 212, 225, 73, 12, - 13, 14, 134, 134, 223, 96, 171, 224, 287, 172, - 206, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 203, 204, 205, 213, 214, 222, 210, 211, 212, 303, - 73, 12, 13, 14, 73, 179, 73, 73, 73, 224, - -157, 191, 206, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 203, 204, 205, 213, 214, 222, 210, 211, - 212, 225, 192, 12, 13, 14, 193, 73, 223, 73, - 196, 224, 197, 198, 206, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 203, 204, 205, 213, 214, 222, - 210, 211, 212, 225, 199, 12, 13, 14, 201, 73, - 223, 227, 134, 224, 134, 73, 206, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 203, 204, 205, 213, - 214, 222, 210, 211, 212, 303, 182, 12, 13, 14, - 239, 73, 182, 35, 35, 224, 182, 182, 206, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 203, 204, - 205, 213, 214, 222, 210, 211, 212, 12, 13, 14, - 82, 83, 35, 84, 85, 86, 87, 88, 98, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 35, 35, 257, 73, 264, 12, - 13, 14, 134, 239, 274, 274, 284, 286, 292, 199, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 289, 12, 13, 14, - 73, 299, 304, 307, 308, 310, 286, 73, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 293, 12, 13, 14, 73, nil, - nil, nil, nil, nil, nil, nil, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 311, 12, 13, 14, nil, nil, nil, nil, - nil, nil, nil, nil, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 12, 13, 14, nil, nil, nil, nil, nil, nil, nil, - nil, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 12, 13, 14, - nil, nil, nil, nil, nil, nil, nil, nil, nil, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 12, 13, 14, nil, nil, nil, - nil, nil, nil, nil, nil, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 12, 13, 14, nil, nil, nil, nil, nil, nil, - nil, nil, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 12, 13, - 14, nil, nil, nil, nil, nil, nil, nil, nil, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 12, 13, 14, nil, nil, - nil, nil, nil, nil, nil, nil, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 12, 13, 14, nil, nil, nil, nil, nil, - nil, nil, nil, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 12, - 13, 14, nil, nil, nil, nil, nil, nil, nil, nil, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 12, 13, 14, nil, - nil, nil, nil, nil, nil, nil, nil, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 12, 13, 14, nil, nil, nil, nil, - nil, nil, nil, nil, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 12, 13, 14, nil, nil, nil, nil, nil, nil, nil, - nil, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 12, 13, 14, - nil, nil, nil, nil, nil, nil, nil, nil, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 12, 13, 14, nil, nil, nil, - nil, nil, nil, nil, nil, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 12, 13, 14, nil, nil, nil, nil, nil, nil, - nil, nil, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 12, 13, - 14, nil, nil, nil, nil, nil, nil, nil, nil, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 12, 13, 14, nil, nil, - nil, nil, nil, nil, nil, nil, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 12, 13, 14, nil, nil, nil, nil, nil, - nil, nil, nil, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 12, - 13, 14, nil, nil, nil, nil, nil, 132, nil, nil, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 12, 13, 14, nil, - nil, nil, nil, nil, 132, nil, nil, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 12, 13, 14, nil, nil, nil, nil, - nil, nil, nil, nil, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 12, 13, 14, nil, nil, nil, nil, nil, nil, nil, - nil, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 12, 13, 14, - nil, 169, nil, nil, nil, nil, nil, nil, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 12, 13, 14, nil, nil, nil, - nil, nil, 132, nil, nil, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 12, 13, 14, nil, nil, nil, nil, nil, nil, - nil, nil, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 12, 13, - 14, nil, nil, nil, nil, nil, nil, nil, nil, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 12, 13, 14, nil, nil, - nil, nil, nil, nil, nil, nil, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 12, 13, 14, nil, nil, nil, nil, nil, - nil, nil, nil, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 12, - 13, 14, nil, nil, nil, nil, nil, nil, nil, nil, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 12, 13, 14, nil, - nil, nil, nil, nil, 132, nil, nil, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 12, 13, 14, nil, nil, nil, nil, - nil, nil, nil, nil, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 12, 13, 14, nil, nil, nil, nil, nil, nil, nil, - nil, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 12, 13, 14, - nil, nil, nil, nil, nil, nil, nil, nil, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 12, 13, 14, nil, nil, nil, - nil, nil, nil, nil, nil, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 12, 13, 14, nil, nil, nil, nil, nil, nil, - nil, nil, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 12, 13, - 14, nil, nil, nil, nil, nil, nil, nil, nil, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 12, 13, 14, nil, nil, - nil, nil, nil, 132, nil, nil, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 12, 13, 14, nil, nil, nil, nil, nil, - 132, nil, nil, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 12, - 13, 14, nil, nil, nil, nil, nil, nil, nil, nil, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 12, 13, 14, nil, - nil, nil, nil, nil, nil, nil, nil, 206, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 203, 204, 205, - -163, nil, nil, nil, -163, nil, nil, nil, nil, nil, - -163, nil, -163, -163, -163, -163 ] - -racc_action_check = [ - 3, 1, 3, 105, 3, 3, 3, 0, 143, 0, - 0, 0, 255, 150, 152, 19, 3, 182, 234, 246, - 247, 0, 253, 28, 28, 28, 3, 28, 28, 28, - 28, 0, 38, 3, 40, 105, 3, 255, 0, 69, - 143, 0, 145, 145, 145, 150, 152, 182, 234, 246, - 247, 71, 253, 145, 145, 145, 145, 145, 145, 145, - 145, 145, 145, 145, 145, 145, 145, 145, 145, 11, - 11, 11, 11, 254, 67, 145, 74, 67, 104, 104, - 104, 11, 11, 11, 11, 11, 11, 11, 11, 11, - 11, 11, 11, 11, 11, 11, 11, 61, 61, 61, - 61, 11, 75, 254, 138, 138, 138, 138, 88, 61, - 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, - 61, 61, 61, 61, 61, 175, 175, 175, 175, 61, - 180, 180, 180, 180, 121, 121, 148, 175, 175, 175, - 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, - 175, 175, 175, 134, 134, 134, 148, 175, 141, 141, - 141, 90, 91, 94, 134, 134, 134, 134, 134, 134, - 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, - 97, 99, 102, 171, 134, 171, 171, 171, 106, 107, - 171, 108, 109, 171, 110, 111, 171, 171, 171, 171, - 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, - 171, 171, 171, 171, 171, 224, 112, 224, 224, 224, - 113, 114, 224, 115, 116, 224, 224, 117, 224, 224, - 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, - 224, 224, 224, 224, 224, 224, 224, 262, 118, 262, - 262, 262, 120, 124, 262, 125, 128, 262, 262, 130, - 262, 262, 262, 262, 262, 262, 262, 262, 262, 262, - 262, 262, 262, 262, 262, 262, 262, 262, 262, 286, - 133, 286, 286, 286, 136, 140, 142, 144, 146, 286, - 147, 149, 286, 286, 286, 286, 286, 286, 286, 286, - 286, 286, 286, 286, 286, 286, 286, 286, 286, 286, - 286, 292, 151, 292, 292, 292, 153, 155, 292, 157, - 158, 292, 159, 160, 292, 292, 292, 292, 292, 292, - 292, 292, 292, 292, 292, 292, 292, 292, 292, 292, - 292, 292, 292, 304, 161, 304, 304, 304, 165, 168, - 304, 173, 174, 304, 176, 183, 304, 304, 304, 304, - 304, 304, 304, 304, 304, 304, 304, 304, 304, 304, - 304, 304, 304, 304, 304, 307, 184, 307, 307, 307, - 186, 189, 190, 192, 193, 307, 194, 195, 307, 307, - 307, 307, 307, 307, 307, 307, 307, 307, 307, 307, - 307, 307, 307, 307, 307, 307, 307, 68, 68, 68, - 37, 37, 197, 37, 37, 37, 37, 37, 68, 68, - 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, - 68, 68, 68, 68, 198, 199, 200, 68, 225, 225, - 225, 225, 202, 237, 242, 252, 256, 258, 267, 270, - 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, - 225, 225, 225, 225, 225, 225, 265, 265, 265, 265, - 275, 280, 291, 295, 296, 300, 309, 313, 265, 265, - 265, 265, 265, 265, 265, 265, 265, 265, 265, 265, - 265, 265, 265, 265, 268, 268, 268, 268, 314, nil, - nil, nil, nil, nil, nil, nil, 268, 268, 268, 268, - 268, 268, 268, 268, 268, 268, 268, 268, 268, 268, - 268, 268, 303, 303, 303, 303, nil, nil, nil, nil, - nil, nil, nil, nil, 303, 303, 303, 303, 303, 303, - 303, 303, 303, 303, 303, 303, 303, 303, 303, 303, - 10, 10, 10, nil, nil, nil, nil, nil, nil, nil, - nil, 10, 10, 10, 10, 10, 10, 10, 10, 10, - 10, 10, 10, 10, 10, 10, 10, 15, 15, 15, - nil, nil, nil, nil, nil, nil, nil, nil, nil, 15, - 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 15, 15, 15, 15, 73, 73, 73, nil, nil, nil, - nil, nil, nil, nil, nil, 73, 73, 73, 73, 73, - 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, - 73, 76, 76, 76, nil, nil, nil, nil, nil, nil, - nil, nil, 76, 76, 76, 76, 76, 76, 76, 76, - 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, - 77, nil, nil, nil, nil, nil, nil, nil, nil, 77, - 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, - 77, 77, 77, 77, 77, 78, 78, 78, nil, nil, - nil, nil, nil, nil, nil, nil, 78, 78, 78, 78, - 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, - 78, 78, 79, 79, 79, nil, nil, nil, nil, nil, - nil, nil, nil, 79, 79, 79, 79, 79, 79, 79, - 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, - 80, 80, nil, nil, nil, nil, nil, nil, nil, nil, - 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, - 80, 80, 80, 80, 80, 80, 81, 81, 81, nil, - nil, nil, nil, nil, nil, nil, nil, 81, 81, 81, - 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, - 81, 81, 81, 82, 82, 82, nil, nil, nil, nil, - nil, nil, nil, nil, 82, 82, 82, 82, 82, 82, - 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, - 83, 83, 83, nil, nil, nil, nil, nil, nil, nil, - nil, 83, 83, 83, 83, 83, 83, 83, 83, 83, - 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, - nil, nil, nil, nil, nil, nil, nil, nil, 84, 84, - 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, - 84, 84, 84, 84, 85, 85, 85, nil, nil, nil, - nil, nil, nil, nil, nil, 85, 85, 85, 85, 85, - 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, - 85, 86, 86, 86, nil, nil, nil, nil, nil, nil, - nil, nil, 86, 86, 86, 86, 86, 86, 86, 86, - 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, - 87, nil, nil, nil, nil, nil, nil, nil, nil, 87, - 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, - 87, 87, 87, 87, 87, 95, 95, 95, nil, nil, - nil, nil, nil, nil, nil, nil, 95, 95, 95, 95, - 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, - 95, 95, 96, 96, 96, nil, nil, nil, nil, nil, - nil, nil, nil, 96, 96, 96, 96, 96, 96, 96, - 96, 96, 96, 96, 96, 96, 96, 96, 96, 98, - 98, 98, nil, nil, nil, nil, nil, 98, nil, nil, - 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, - 98, 98, 98, 98, 98, 98, 100, 100, 100, nil, - nil, nil, nil, nil, 100, nil, nil, 100, 100, 100, - 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, - 100, 100, 100, 119, 119, 119, nil, nil, nil, nil, - nil, nil, nil, nil, 119, 119, 119, 119, 119, 119, - 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, - 123, 123, 123, nil, nil, nil, nil, nil, nil, nil, - nil, 123, 123, 123, 123, 123, 123, 123, 123, 123, - 123, 123, 123, 123, 123, 123, 123, 126, 126, 126, - nil, 126, nil, nil, nil, nil, nil, nil, 126, 126, - 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, - 126, 126, 126, 126, 132, 132, 132, nil, nil, nil, - nil, nil, 132, nil, nil, 132, 132, 132, 132, 132, - 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, - 132, 179, 179, 179, nil, nil, nil, nil, nil, nil, - nil, nil, 179, 179, 179, 179, 179, 179, 179, 179, - 179, 179, 179, 179, 179, 179, 179, 179, 185, 185, - 185, nil, nil, nil, nil, nil, nil, nil, nil, 185, - 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, - 185, 185, 185, 185, 185, 187, 187, 187, nil, nil, - nil, nil, nil, nil, nil, nil, 187, 187, 187, 187, - 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, - 187, 187, 191, 191, 191, nil, nil, nil, nil, nil, - nil, nil, nil, 191, 191, 191, 191, 191, 191, 191, - 191, 191, 191, 191, 191, 191, 191, 191, 191, 196, - 196, 196, nil, nil, nil, nil, nil, nil, nil, nil, - 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, - 196, 196, 196, 196, 196, 196, 201, 201, 201, nil, - nil, nil, nil, nil, 201, nil, nil, 201, 201, 201, - 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, - 201, 201, 201, 223, 223, 223, nil, nil, nil, nil, - nil, nil, nil, nil, 223, 223, 223, 223, 223, 223, - 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, - 233, 233, 233, nil, nil, nil, nil, nil, nil, nil, - nil, 233, 233, 233, 233, 233, 233, 233, 233, 233, - 233, 233, 233, 233, 233, 233, 233, 239, 239, 239, - nil, nil, nil, nil, nil, nil, nil, nil, 239, 239, - 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, - 239, 239, 239, 239, 248, 248, 248, nil, nil, nil, - nil, nil, nil, nil, nil, 248, 248, 248, 248, 248, - 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, - 248, 274, 274, 274, nil, nil, nil, nil, nil, nil, - nil, nil, 274, 274, 274, 274, 274, 274, 274, 274, - 274, 274, 274, 274, 274, 274, 274, 274, 284, 284, - 284, nil, nil, nil, nil, nil, nil, nil, nil, 284, - 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, - 284, 284, 284, 284, 284, 299, 299, 299, nil, nil, - nil, nil, nil, 299, nil, nil, 299, 299, 299, 299, - 299, 299, 299, 299, 299, 299, 299, 299, 299, 299, - 299, 299, 308, 308, 308, nil, nil, nil, nil, nil, - 308, nil, nil, 308, 308, 308, 308, 308, 308, 308, - 308, 308, 308, 308, 308, 308, 308, 308, 308, 310, - 310, 310, nil, nil, nil, nil, nil, nil, nil, nil, - 310, 310, 310, 310, 310, 310, 310, 310, 310, 310, - 310, 310, 310, 310, 310, 310, 244, 244, 244, nil, - nil, nil, nil, nil, nil, nil, nil, 244, 244, 244, - 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, - 188, nil, nil, nil, 188, nil, nil, nil, nil, nil, - 188, nil, 188, 188, 188, 188 ] - -racc_action_pointer = [ - 5, 1, nil, 0, nil, nil, nil, nil, nil, nil, - 546, 66, nil, nil, nil, 573, nil, nil, nil, -19, - nil, nil, nil, nil, nil, nil, nil, nil, 7, nil, - nil, nil, nil, nil, nil, nil, nil, 393, 32, nil, - 27, nil, nil, nil, nil, nil, nil, nil, nil, nil, - nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, - nil, 94, nil, nil, nil, nil, nil, 67, 403, 24, - nil, 17, nil, 600, 74, 68, 627, 654, 681, 708, - 735, 762, 789, 816, 843, 870, 897, 924, 74, nil, - 127, 153, nil, nil, 129, 951, 978, 146, 1005, 179, - 1032, nil, 175, nil, 74, 1, 186, 155, 172, 173, - 160, 161, 182, 186, 202, 204, 190, 193, 214, 1059, - 250, 126, nil, 1086, 251, 248, 1113, nil, 246, nil, - 248, nil, 1140, 246, 149, nil, 250, nil, 101, nil, - 275, 154, 252, 6, 253, 38, 254, 256, 122, 277, - 11, 310, 12, 314, nil, 283, nil, 285, 306, 320, - 321, 337, nil, nil, nil, 338, nil, nil, 315, nil, - nil, 181, nil, 338, 350, 122, 352, nil, nil, 1167, - 127, nil, 14, 321, 374, 1194, 343, 1221, 1585, 347, - 380, 1248, 350, 351, 384, 385, 1275, 379, 401, 402, - 409, 1302, 440, nil, nil, nil, nil, nil, nil, nil, - nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, - nil, nil, nil, 1329, 213, 435, nil, nil, nil, nil, - nil, nil, nil, 1356, 15, nil, nil, 406, nil, 1383, - nil, nil, 406, nil, 1572, nil, 16, 17, 1410, nil, - nil, nil, 407, 19, 70, 4, 431, nil, 433, nil, - nil, nil, 245, nil, nil, 463, nil, 438, 491, nil, - 442, nil, nil, nil, 1437, 436, nil, nil, nil, nil, - 461, nil, nil, nil, 1464, nil, 277, nil, nil, nil, - nil, 462, 309, nil, nil, 463, 464, nil, nil, 1491, - 437, nil, nil, 519, 341, nil, nil, 373, 1518, 462, - 1545, nil, nil, 443, 464, nil, nil, nil ] - -racc_action_default = [ - -152, -187, -1, -152, -3, -5, -6, -7, -8, -9, - -16, -187, -13, -14, -15, -110, -112, -113, -114, -101, - -121, -122, -123, -124, -125, -126, -127, -128, -187, -131, - -132, -133, -134, -135, -136, -151, -153, -187, -187, -4, - -18, -17, -39, -40, -41, -42, -43, -44, -45, -46, - -47, -48, -49, -50, -51, -52, -53, -54, -55, -56, - -57, -187, -12, -32, -34, -35, -36, -67, -101, -187, - -111, -102, -103, -187, -116, -101, -187, -187, -187, -187, - -187, -187, -187, -187, -187, -187, -187, -187, -187, 318, - -101, -187, -11, -33, -101, -187, -187, -101, -187, -187, - -187, -104, -67, -115, -187, -130, -187, -187, -156, -156, - -101, -101, -101, -101, -156, -156, -101, -101, -101, -187, - -187, -187, -20, -187, -30, -67, -187, -69, -187, -106, - -23, -25, -187, -101, -187, -108, -101, -105, -187, -118, - -187, -187, -137, -141, -101, -187, -101, -144, -146, -187, - -148, -187, -150, -187, -154, -101, -157, -101, -187, -187, - -187, -168, -10, -19, -21, -187, -31, -37, -101, -68, - -70, -187, -24, -187, -187, -187, -187, -117, -119, -187, - -187, -138, -152, -102, -171, -187, -159, -160, -161, -102, - -171, -187, -152, -152, -171, -171, -187, -152, -152, -152, - -183, -187, -30, -58, -59, -60, -61, -62, -63, -71, - -72, -73, -74, -75, -76, -77, -78, -79, -80, -81, - -82, -83, -84, -187, -187, -187, -100, -26, -107, -29, - -109, -120, -129, -187, -152, -174, -139, -158, -161, -187, - -164, -142, -145, -177, -187, -65, -152, -152, -187, -166, - -155, -176, -179, -152, -152, -152, -187, -184, -27, -38, - -85, -86, -187, -88, -90, -187, -92, -187, -187, -97, - -168, -172, -175, -162, -187, -101, -66, -147, -149, -167, - -187, -180, -181, -169, -187, -22, -187, -87, -89, -91, - -93, -187, -187, -96, -98, -187, -187, -178, -64, -187, - -182, -185, -28, -187, -187, -81, -94, -187, -187, -27, - -187, -95, -99, -101, -101, -186, -170, -165 ] - -racc_goto_table = [ - 10, 105, 61, 10, 37, 93, 181, 37, 41, 67, - 133, 209, 136, 200, 208, 70, 266, 74, 167, 178, - 186, 122, 135, 247, 302, 138, 246, 242, 254, 255, - 305, 253, 252, 142, 279, 127, 148, 150, 152, 235, - 1, 279, 279, 162, 173, 312, 2, 236, 276, 285, - 3, 164, 40, 241, 94, 276, 290, 250, 251, 67, - 237, 178, 180, 90, 263, 170, 99, 208, 97, 121, - 183, 102, 189, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 101, 208, 120, 137, - 275, 272, 124, 125, 128, 129, 259, 228, 4, 230, - 314, 39, 288, 126, 140, 208, 143, 147, 149, 151, - 153, 154, 168, 258, 158, 159, 160, 161, 262, 93, - 101, 165, 296, 265, 128, 175, 294, 69, 103, 208, - 187, 174, 67, 234, 176, 208, 256, 300, 140, 144, - 146, 140, 184, 188, 190, 155, 157, 208, nil, nil, - 208, nil, nil, 194, nil, 195, nil, 101, nil, nil, - nil, nil, nil, 101, nil, 101, 202, 101, nil, nil, - nil, 207, nil, 67, nil, nil, nil, 231, nil, nil, - 140, nil, nil, 238, nil, 240, 233, nil, nil, 243, - nil, nil, nil, nil, 243, nil, 244, nil, 101, nil, - nil, 244, nil, nil, 101, nil, nil, nil, nil, nil, - nil, 309, nil, nil, nil, nil, nil, nil, nil, nil, - 313, 260, nil, 267, 207, nil, nil, nil, nil, nil, - nil, 270, nil, nil, nil, nil, nil, 273, 233, nil, - nil, nil, nil, nil, 207, nil, 280, nil, nil, nil, - 244, nil, nil, nil, nil, nil, nil, 244, nil, nil, - nil, nil, 207, 291, nil, nil, 295, nil, nil, nil, - nil, nil, 297, 298, nil, nil, nil, nil, nil, nil, - nil, nil, 301, nil, nil, nil, 207, nil, nil, nil, - nil, nil, 207, nil, nil, nil, nil, nil, nil, nil, - nil, 295, nil, nil, 207, nil, nil, 207, 315, nil, - nil, 316, 317 ] - -racc_goto_check = [ - 10, 49, 15, 10, 33, 24, 73, 33, 16, 16, - 19, 37, 19, 81, 30, 29, 46, 13, 23, 57, - 78, 18, 14, 76, 22, 56, 34, 75, 76, 76, - 22, 34, 75, 49, 80, 36, 49, 49, 49, 82, - 1, 80, 80, 14, 19, 22, 2, 73, 32, 20, - 3, 18, 11, 73, 28, 32, 46, 73, 73, 16, - 78, 57, 56, 12, 37, 36, 13, 30, 29, 17, - 49, 16, 49, 13, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 50, 30, 13, 28, - 31, 82, 13, 16, 16, 13, 23, 14, 4, 14, - 20, 4, 37, 35, 10, 30, 72, 72, 13, 13, - 13, 13, 28, 19, 13, 13, 13, 16, 44, 24, - 50, 16, 81, 45, 16, 15, 48, 51, 55, 30, - 79, 13, 16, 83, 13, 30, 84, 85, 10, 74, - 74, 10, 13, 16, 13, 74, 74, 30, nil, nil, - 30, nil, nil, 13, nil, 13, nil, 50, nil, nil, - nil, nil, nil, 50, nil, 50, 13, 50, nil, nil, - nil, 10, nil, 16, nil, nil, nil, 16, nil, nil, - 10, nil, nil, 16, nil, 16, 33, nil, nil, 16, - nil, nil, nil, nil, 16, nil, 33, nil, 50, nil, - nil, 33, nil, nil, 50, nil, nil, nil, nil, nil, - nil, 19, nil, nil, nil, nil, nil, nil, nil, nil, - 19, 16, nil, 16, 10, nil, nil, nil, nil, nil, - nil, 16, nil, nil, nil, nil, nil, 16, 33, nil, - nil, nil, nil, nil, 10, nil, 16, nil, nil, nil, - 33, nil, nil, nil, nil, nil, nil, 33, nil, nil, - nil, nil, 10, 16, nil, nil, 16, nil, nil, nil, - nil, nil, 16, 13, nil, nil, nil, nil, nil, nil, - nil, nil, 16, nil, nil, nil, 10, nil, nil, nil, - nil, nil, 10, nil, nil, nil, nil, nil, nil, nil, - nil, 16, nil, nil, 10, nil, nil, 10, 16, nil, - nil, 13, 13 ] - -racc_goto_pointer = [ - nil, 40, 46, 50, 98, nil, nil, nil, nil, nil, - 0, 42, 23, -2, -77, -9, -2, -22, -70, -88, - -209, nil, -262, -106, -56, nil, nil, nil, -13, 0, - -157, -154, -198, 4, -166, 7, -61, -160, nil, nil, - nil, nil, nil, nil, -106, -102, -209, nil, -142, -74, - 15, 112, nil, nil, nil, 54, -79, -119, nil, nil, - nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, - nil, nil, -2, -137, 31, -164, -170, nil, -125, -15, - -213, -148, -143, -49, -64, -147 ] - -racc_goto_default = [ - nil, nil, nil, nil, nil, 5, 6, 7, 8, 9, - 59, nil, nil, nil, 166, nil, 131, nil, nil, nil, - nil, 130, 219, nil, 63, 64, 65, 66, nil, 42, - 60, 226, 245, 248, nil, nil, nil, 306, 215, 216, - 217, 218, 220, 221, nil, nil, nil, 268, 269, 71, - 72, nil, 16, 17, 18, nil, nil, 139, 20, 21, - 22, 23, 24, 25, 26, 27, 29, 30, 31, 32, - 33, 34, 156, nil, nil, nil, nil, 36, nil, nil, - 249, nil, nil, nil, nil, nil ] - -racc_reduce_table = [ - 0, 0, :racc_error, - 1, 40, :_reduce_none, - 1, 41, :_reduce_2, - 1, 42, :_reduce_3, - 2, 42, :_reduce_4, - 1, 43, :_reduce_none, - 1, 43, :_reduce_none, - 1, 43, :_reduce_none, - 1, 44, :_reduce_none, - 1, 44, :_reduce_none, - 5, 47, :_reduce_10, - 3, 47, :_reduce_11, - 2, 47, :_reduce_12, - 1, 49, :_reduce_none, - 1, 49, :_reduce_none, - 1, 49, :_reduce_none, - 0, 50, :_reduce_16, - 1, 50, :_reduce_none, - 0, 51, :_reduce_18, - 3, 51, :_reduce_19, - 1, 56, :_reduce_20, - 2, 56, :_reduce_21, - 5, 57, :_reduce_22, - 1, 58, :_reduce_23, - 2, 58, :_reduce_24, - 1, 60, :_reduce_25, - 3, 60, :_reduce_26, - 0, 59, :_reduce_27, - 2, 59, :_reduce_28, - 3, 53, :_reduce_29, - 0, 62, :_reduce_30, - 1, 62, :_reduce_31, - 1, 54, :_reduce_32, - 2, 54, :_reduce_33, - 1, 63, :_reduce_none, - 1, 63, :_reduce_none, - 1, 63, :_reduce_none, - 4, 64, :_reduce_37, - 6, 64, :_reduce_38, - 1, 55, :_reduce_none, - 1, 55, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 69, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 68, :_reduce_none, - 1, 70, :_reduce_none, - 1, 70, :_reduce_none, - 1, 70, :_reduce_none, - 1, 70, :_reduce_none, - 1, 70, :_reduce_none, - 1, 70, :_reduce_none, - 3, 71, :_reduce_64, - 1, 73, :_reduce_65, - 2, 73, :_reduce_66, - 0, 67, :_reduce_67, - 3, 67, :_reduce_68, - 1, 74, :_reduce_69, - 2, 74, :_reduce_70, - 3, 75, :_reduce_71, - 1, 61, :_reduce_72, - 1, 61, :_reduce_73, - 1, 61, :_reduce_74, - 1, 61, :_reduce_75, - 1, 61, :_reduce_76, - 1, 61, :_reduce_none, - 1, 61, :_reduce_none, - 1, 61, :_reduce_none, - 1, 61, :_reduce_none, - 1, 76, :_reduce_none, - 1, 76, :_reduce_none, - 1, 76, :_reduce_none, - 1, 77, :_reduce_84, - 2, 81, :_reduce_85, - 2, 79, :_reduce_86, - 3, 79, :_reduce_87, - 1, 83, :_reduce_88, - 2, 83, :_reduce_89, - 2, 82, :_reduce_90, - 3, 82, :_reduce_91, - 1, 84, :_reduce_92, - 2, 84, :_reduce_93, - 3, 85, :_reduce_94, - 2, 80, :_reduce_95, - 3, 80, :_reduce_96, - 1, 86, :_reduce_97, - 2, 86, :_reduce_98, - 3, 87, :_reduce_99, - 1, 78, :_reduce_100, - 0, 52, :_reduce_101, - 1, 52, :_reduce_none, - 1, 88, :_reduce_103, - 2, 88, :_reduce_104, - 3, 89, :_reduce_105, - 3, 65, :_reduce_106, - 5, 66, :_reduce_107, - 3, 66, :_reduce_108, - 6, 48, :_reduce_109, - 0, 90, :_reduce_110, - 1, 90, :_reduce_none, - 1, 45, :_reduce_none, - 1, 45, :_reduce_none, - 1, 45, :_reduce_none, - 3, 91, :_reduce_115, - 0, 94, :_reduce_116, - 3, 94, :_reduce_117, - 1, 95, :_reduce_none, - 2, 95, :_reduce_119, - 3, 96, :_reduce_120, - 1, 92, :_reduce_none, - 1, 92, :_reduce_none, - 1, 92, :_reduce_none, - 1, 92, :_reduce_none, - 1, 92, :_reduce_none, - 1, 92, :_reduce_none, - 1, 46, :_reduce_none, - 1, 46, :_reduce_none, - 6, 103, :_reduce_129, - 3, 103, :_reduce_130, - 1, 104, :_reduce_none, - 1, 104, :_reduce_none, - 1, 104, :_reduce_none, - 1, 104, :_reduce_none, - 1, 104, :_reduce_none, - 1, 104, :_reduce_none, - 4, 105, :_reduce_137, - 5, 106, :_reduce_138, - 6, 106, :_reduce_139, - 5, 106, :_reduce_140, - 4, 106, :_reduce_141, - 6, 107, :_reduce_142, - 5, 107, :_reduce_143, - 4, 107, :_reduce_144, - 6, 108, :_reduce_145, - 4, 108, :_reduce_146, - 7, 109, :_reduce_147, - 4, 109, :_reduce_148, - 7, 110, :_reduce_149, - 4, 110, :_reduce_150, - 1, 116, :_reduce_none, - 0, 72, :_reduce_none, - 1, 72, :_reduce_none, - 4, 97, :_reduce_154, - 6, 98, :_reduce_155, - 0, 113, :_reduce_156, - 1, 113, :_reduce_none, - 3, 111, :_reduce_158, - 2, 111, :_reduce_159, - 2, 111, :_reduce_160, - 1, 117, :_reduce_161, - 3, 117, :_reduce_162, - 1, 118, :_reduce_163, - 2, 118, :_reduce_164, - 6, 119, :_reduce_165, - 1, 115, :_reduce_166, - 2, 115, :_reduce_167, - 0, 120, :_reduce_168, - 3, 120, :_reduce_169, - 6, 121, :_reduce_170, - 0, 112, :_reduce_171, - 3, 112, :_reduce_172, - 0, 122, :_reduce_173, - 1, 122, :_reduce_174, - 2, 122, :_reduce_175, - 6, 99, :_reduce_176, - 1, 114, :_reduce_177, - 3, 114, :_reduce_178, - 6, 100, :_reduce_179, - 7, 101, :_reduce_180, - 7, 102, :_reduce_181, - 8, 93, :_reduce_182, - 0, 123, :_reduce_none, - 1, 123, :_reduce_none, - 1, 124, :_reduce_185, - 3, 124, :_reduce_186 ] - -racc_reduce_n = 187 - -racc_shift_n = 318 - -racc_token_table = { - false => 0, - :error => 1, - :LCURLY => 2, - :RCURLY => 3, - :QUERY => 4, - :MUTATION => 5, - :SUBSCRIPTION => 6, - :LPAREN => 7, - :RPAREN => 8, - :VAR_SIGN => 9, - :COLON => 10, - :BANG => 11, - :LBRACKET => 12, - :RBRACKET => 13, - :EQUALS => 14, - :ON => 15, - :SCHEMA => 16, - :SCALAR => 17, - :TYPE => 18, - :IMPLEMENTS => 19, - :INTERFACE => 20, - :UNION => 21, - :ENUM => 22, - :INPUT => 23, - :DIRECTIVE => 24, - :IDENTIFIER => 25, - :FRAGMENT => 26, - :REPEATABLE => 27, - :TRUE => 28, - :FALSE => 29, - :NULL => 30, - :FLOAT => 31, - :INT => 32, - :STRING => 33, - :DIR_SIGN => 34, - :ELLIPSIS => 35, - :EXTEND => 36, - :AMP => 37, - :PIPE => 38 } - -racc_nt_base = 39 - -racc_use_result_var = true - -Racc_arg = [ - racc_action_table, - racc_action_check, - racc_action_default, - racc_action_pointer, - racc_goto_table, - racc_goto_check, - racc_goto_default, - racc_goto_pointer, - racc_nt_base, - racc_reduce_table, - racc_token_table, - racc_shift_n, - racc_reduce_n, - racc_use_result_var ] - -Racc_token_to_s_table = [ - "$end", - "error", - "LCURLY", - "RCURLY", - "QUERY", - "MUTATION", - "SUBSCRIPTION", - "LPAREN", - "RPAREN", - "VAR_SIGN", - "COLON", - "BANG", - "LBRACKET", - "RBRACKET", - "EQUALS", - "ON", - "SCHEMA", - "SCALAR", - "TYPE", - "IMPLEMENTS", - "INTERFACE", - "UNION", - "ENUM", - "INPUT", - "DIRECTIVE", - "IDENTIFIER", - "FRAGMENT", - "REPEATABLE", - "TRUE", - "FALSE", - "NULL", - "FLOAT", - "INT", - "STRING", - "DIR_SIGN", - "ELLIPSIS", - "EXTEND", - "AMP", - "PIPE", - "$start", - "target", - "document", - "definitions_list", - "definition", - "executable_definition", - "type_system_definition", - "type_system_extension", - "operation_definition", - "fragment_definition", - "operation_type", - "operation_name_opt", - "variable_definitions_opt", - "directives_list_opt", - "selection_set", - "selection_list", - "name", - "variable_definitions_list", - "variable_definition", - "type", - "default_value_opt", - "nullable_type", - "literal_value", - "selection_set_opt", - "selection", - "field", - "fragment_spread", - "inline_fragment", - "arguments_opt", - "name_without_on", - "schema_keyword", - "enum_name", - "enum_value_definition", - "description_opt", - "enum_value_definitions", - "arguments_list", - "argument", - "input_value", - "null_value", - "enum_value", - "list_value", - "object_literal_value", - "variable", - "object_value", - "list_value_list", - "object_value_list", - "object_value_field", - "object_literal_value_list", - "object_literal_value_field", - "directives_list", - "directive", - "fragment_name_opt", - "schema_definition", - "type_definition", - "directive_definition", - "operation_type_definition_list_opt", - "operation_type_definition_list", - "operation_type_definition", - "scalar_type_definition", - "object_type_definition", - "interface_type_definition", - "union_type_definition", - "enum_type_definition", - "input_object_type_definition", - "schema_extension", - "type_extension", - "scalar_type_extension", - "object_type_extension", - "interface_type_extension", - "union_type_extension", - "enum_type_extension", - "input_object_type_extension", - "implements", - "field_definition_list_opt", - "implements_opt", - "union_members", - "input_value_definition_list", - "description", - "interfaces_list", - "legacy_interfaces_list", - "input_value_definition", - "arguments_definitions_opt", - "field_definition", - "field_definition_list", - "directive_repeatable_opt", - "directive_locations" ] - -Racc_debug_parser = false - -##### State transition tables end ##### - -# reduce 0 omitted - -# reduce 1 omitted - -module_eval(<<'.,.,', 'parser.y', 4) - def _reduce_2(val, _values, result) - result = make_node(:Document, definitions: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 7) - def _reduce_3(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 8) - def _reduce_4(val, _values, result) - val[0] << val[1] - result - end -.,., - -# reduce 5 omitted - -# reduce 6 omitted - -# reduce 7 omitted - -# reduce 8 omitted - -# reduce 9 omitted - -module_eval(<<'.,.,', 'parser.y', 21) - def _reduce_10(val, _values, result) - result = make_node( - :OperationDefinition, { - operation_type: val[0], - name: val[1], - variables: val[2], - directives: val[3], - selections: val[4], - position_source: val[0], - } - ) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 33) - def _reduce_11(val, _values, result) - result = make_node( - :OperationDefinition, { - operation_type: "query", - selections: val[1], - position_source: val[0], - } - ) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 42) - def _reduce_12(val, _values, result) - result = make_node( - :OperationDefinition, { - operation_type: "query", - selections: [], - position_source: val[0], - } - ) - - result - end -.,., - -# reduce 13 omitted - -# reduce 14 omitted - -# reduce 15 omitted - -module_eval(<<'.,.,', 'parser.y', 57) - def _reduce_16(val, _values, result) - result = nil - result - end -.,., - -# reduce 17 omitted - -module_eval(<<'.,.,', 'parser.y', 61) - def _reduce_18(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 62) - def _reduce_19(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 65) - def _reduce_20(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 66) - def _reduce_21(val, _values, result) - val[0] << val[1] - result - end -.,., -module_eval(<<'.,.,', 'parser.y', 70) - def _reduce_22(val, _values, result) - result = make_node(:VariableDefinition, { - name: val[1], - type: val[3], - default_value: val[4], - position_source: val[0], - }) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 79) - def _reduce_23(val, _values, result) - result = val[0] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 80) - def _reduce_24(val, _values, result) - result = make_node(:NonNullType, of_type: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 83) - def _reduce_25(val, _values, result) - result = make_node(:TypeName, name: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 84) - def _reduce_26(val, _values, result) - result = make_node(:ListType, of_type: val[1]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 87) - def _reduce_27(val, _values, result) - result = nil - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 88) - def _reduce_28(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 91) - def _reduce_29(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 94) - def _reduce_30(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 95) - def _reduce_31(val, _values, result) - result = val[0] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 98) - def _reduce_32(val, _values, result) - result = [result] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 99) - def _reduce_33(val, _values, result) - val[0] << val[1] - result - end -.,., - -# reduce 34 omitted + private -# reduce 35 omitted + attr_reader :token_name -# reduce 36 omitted + def advance_token + @token_name = @lexer.advance + end -module_eval(<<'.,.,', 'parser.y', 108) - def _reduce_37(val, _values, result) - result = make_node( - :Field, { - name: val[0], - arguments: val[1], - directives: val[2], - selections: val[3], - position_source: val[0], - } - ) + def pos + @lexer.pos + end - result - end -.,., + def document + any_tokens = advance_token + if !any_tokens + # Only ignored characters is not a valid document + raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @graphql_str) + end + defns = [] + while !@lexer.eos? + defns << definition + end + Document.new(pos: 0, definitions: defns, filename: @filename, source_string: @graphql_str) + end -module_eval(<<'.,.,', 'parser.y', 119) - def _reduce_38(val, _values, result) - result = make_node( - :Field, { - alias: val[0], - name: val[2], - arguments: val[3], - directives: val[4], - selections: val[5], - position_source: val[0], - } + def definition + case token_name + when :FRAGMENT + loc = pos + expect_token :FRAGMENT + f_name = if !at?(:ON) + parse_name + end + expect_token :ON + f_type = parse_type_name + directives = parse_directives + selections = selection_set + Nodes::FragmentDefinition.new( + pos: loc, + name: f_name, + type: f_type, + directives: directives, + selections: selections, + filename: @filename, + source_string: @graphql_str + ) + when :QUERY, :MUTATION, :SUBSCRIPTION, :LCURLY + op_loc = pos + op_type = case token_name + when :LCURLY + "query" + else + parse_operation_type + end + + op_name = at?(:IDENTIFIER) ? parse_name : nil + + variable_definitions = if at?(:LPAREN) + expect_token(:LPAREN) + defs = [] + while !at?(:RPAREN) + loc = pos + expect_token(:VAR_SIGN) + var_name = parse_name + expect_token(:COLON) + var_type = self.type + default_value = if at?(:EQUALS) + advance_token + value + end + + defs << Nodes::VariableDefinition.new(pos: loc, name: var_name, type: var_type, default_value: default_value, filename: @filename, source_string: @graphql_str) + end + expect_token(:RPAREN) + defs + else + EmptyObjects::EMPTY_ARRAY + end + + directives = parse_directives + + OperationDefinition.new( + pos: op_loc, + operation_type: op_type, + name: op_name, + variables: variable_definitions, + directives: directives, + selections: selection_set, + filename: @filename, + source_string: @graphql_str + ) + when :EXTEND + loc = pos + advance_token + case token_name + when :SCALAR + advance_token + name = parse_name + directives = parse_directives + ScalarTypeExtension.new(pos: loc, name: name, directives: directives, filename: @filename, source_string: @graphql_str) + when :TYPE + advance_token + name = parse_name + implements_interfaces = parse_implements + directives = parse_directives + field_defns = at?(:LCURLY) ? parse_field_definitions : EMPTY_ARRAY + + ObjectTypeExtension.new(pos: loc, name: name, interfaces: implements_interfaces, directives: directives, fields: field_defns, filename: @filename, source_string: @graphql_str) + when :INTERFACE + advance_token + name = parse_name + directives = parse_directives + interfaces = parse_implements + fields_definition = at?(:LCURLY) ? parse_field_definitions : EMPTY_ARRAY + InterfaceTypeExtension.new(pos: loc, name: name, directives: directives, fields: fields_definition, interfaces: interfaces, filename: @filename, source_string: @graphql_str) + when :UNION + advance_token + name = parse_name + directives = parse_directives + union_member_types = parse_union_members + UnionTypeExtension.new(pos: loc, name: name, directives: directives, types: union_member_types, filename: @filename, source_string: @graphql_str) + when :ENUM + advance_token + name = parse_name + directives = parse_directives + enum_values_definition = parse_enum_value_definitions + Nodes::EnumTypeExtension.new(pos: loc, name: name, directives: directives, values: enum_values_definition, filename: @filename, source_string: @graphql_str) + when :INPUT + advance_token + name = parse_name + directives = parse_directives + input_fields_definition = parse_input_object_field_definitions + InputObjectTypeExtension.new(pos: loc, name: name, directives: directives, fields: input_fields_definition, filename: @filename, source_string: @graphql_str) + when :SCHEMA + advance_token + directives = parse_directives + query = mutation = subscription = nil + if at?(:LCURLY) + advance_token + while !at?(:RCURLY) + if at?(:QUERY) + advance_token + expect_token(:COLON) + query = parse_name + elsif at?(:MUTATION) + advance_token + expect_token(:COLON) + mutation = parse_name + elsif at?(:SUBSCRIPTION) + advance_token + expect_token(:COLON) + subscription = parse_name + else + expect_one_of([:QUERY, :MUTATION, :SUBSCRIPTION]) + end + end + expect_token :RCURLY + end + SchemaExtension.new( + subscription: subscription, + mutation: mutation, + query: query, + directives: directives, + pos: loc, + filename: @filename, + source_string: @graphql_str, ) + else + expect_one_of([:SCHEMA, :SCALAR, :TYPE, :ENUM, :INPUT, :UNION, :INTERFACE]) + end + else + loc = pos + desc = at?(:STRING) ? string_value : nil + defn_loc = pos + case token_name + when :SCHEMA + advance_token + directives = parse_directives + query = mutation = subscription = nil + expect_token :LCURLY + while !at?(:RCURLY) + if at?(:QUERY) + advance_token + expect_token(:COLON) + query = parse_name + elsif at?(:MUTATION) + advance_token + expect_token(:COLON) + mutation = parse_name + elsif at?(:SUBSCRIPTION) + advance_token + expect_token(:COLON) + subscription = parse_name + else + expect_one_of([:QUERY, :MUTATION, :SUBSCRIPTION]) + end + end + expect_token :RCURLY + SchemaDefinition.new(pos: loc, definition_pos: defn_loc, query: query, mutation: mutation, subscription: subscription, directives: directives, filename: @filename, source_string: @graphql_str) + when :DIRECTIVE + advance_token + expect_token :DIR_SIGN + name = parse_name + arguments_definition = parse_argument_definitions + repeatable = if at?(:REPEATABLE) + advance_token + true + else + false + end + expect_token :ON + directive_locations = [DirectiveLocation.new(pos: pos, name: parse_name, filename: @filename, source_string: @graphql_str)] + while at?(:PIPE) + advance_token + directive_locations << DirectiveLocation.new(pos: pos, name: parse_name, filename: @filename, source_string: @graphql_str) + end + DirectiveDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, arguments: arguments_definition, locations: directive_locations, repeatable: repeatable, filename: @filename, source_string: @graphql_str) + when :TYPE + advance_token + name = parse_name + implements_interfaces = parse_implements + directives = parse_directives + field_defns = at?(:LCURLY) ? parse_field_definitions : EMPTY_ARRAY + + ObjectTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, interfaces: implements_interfaces, directives: directives, fields: field_defns, filename: @filename, source_string: @graphql_str) + when :INTERFACE + advance_token + name = parse_name + directives = parse_directives + interfaces = parse_implements + fields_definition = parse_field_definitions + InterfaceTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, fields: fields_definition, interfaces: interfaces, filename: @filename, source_string: @graphql_str) + when :UNION + advance_token + name = parse_name + directives = parse_directives + union_member_types = parse_union_members + UnionTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, types: union_member_types, filename: @filename, source_string: @graphql_str) + when :SCALAR + advance_token + name = parse_name + directives = parse_directives + ScalarTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, filename: @filename, source_string: @graphql_str) + when :ENUM + advance_token + name = parse_name + directives = parse_directives + enum_values_definition = parse_enum_value_definitions + Nodes::EnumTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, values: enum_values_definition, filename: @filename, source_string: @graphql_str) + when :INPUT + advance_token + name = parse_name + directives = parse_directives + input_fields_definition = parse_input_object_field_definitions + InputObjectTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, fields: input_fields_definition, filename: @filename, source_string: @graphql_str) + else + expect_one_of([:SCHEMA, :SCALAR, :TYPE, :ENUM, :INPUT, :UNION, :INTERFACE]) + end + end + end - result - end -.,., - -# reduce 39 omitted - -# reduce 40 omitted - -# reduce 41 omitted - -# reduce 42 omitted - -# reduce 43 omitted - -# reduce 44 omitted - -# reduce 45 omitted - -# reduce 46 omitted - -# reduce 47 omitted - -# reduce 48 omitted - -# reduce 49 omitted - -# reduce 50 omitted - -# reduce 51 omitted - -# reduce 52 omitted - -# reduce 53 omitted - -# reduce 54 omitted - -# reduce 55 omitted - -# reduce 56 omitted - -# reduce 57 omitted - -# reduce 58 omitted - -# reduce 59 omitted - -# reduce 60 omitted - -# reduce 61 omitted - -# reduce 62 omitted - -# reduce 63 omitted - -module_eval(<<'.,.,', 'parser.y', 165) - def _reduce_64(val, _values, result) - result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 168) - def _reduce_65(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 169) - def _reduce_66(val, _values, result) - result = val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 172) - def _reduce_67(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 173) - def _reduce_68(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 176) - def _reduce_69(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 177) - def _reduce_70(val, _values, result) - val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 180) - def _reduce_71(val, _values, result) - result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 183) - def _reduce_72(val, _values, result) - result = val[0][3].to_f - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 184) - def _reduce_73(val, _values, result) - result = val[0][3].to_i - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 185) - def _reduce_74(val, _values, result) - result = val[0][3] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 186) - def _reduce_75(val, _values, result) - result = true - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 187) - def _reduce_76(val, _values, result) - result = false - result - end -.,., - -# reduce 77 omitted - -# reduce 78 omitted - -# reduce 79 omitted - -# reduce 80 omitted - -# reduce 81 omitted - -# reduce 82 omitted - -# reduce 83 omitted - -module_eval(<<'.,.,', 'parser.y', 198) - def _reduce_84(val, _values, result) - result = make_node(:NullValue, name: val[0], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 199) - def _reduce_85(val, _values, result) - result = make_node(:VariableIdentifier, name: val[1], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 202) - def _reduce_86(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 203) - def _reduce_87(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 206) - def _reduce_88(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 207) - def _reduce_89(val, _values, result) - val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 210) - def _reduce_90(val, _values, result) - result = make_node(:InputObject, arguments: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 211) - def _reduce_91(val, _values, result) - result = make_node(:InputObject, arguments: val[1], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 214) - def _reduce_92(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 215) - def _reduce_93(val, _values, result) - val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 218) - def _reduce_94(val, _values, result) - result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 222) - def _reduce_95(val, _values, result) - result = make_node(:InputObject, arguments: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 223) - def _reduce_96(val, _values, result) - result = make_node(:InputObject, arguments: val[1], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 226) - def _reduce_97(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 227) - def _reduce_98(val, _values, result) - val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 230) - def _reduce_99(val, _values, result) - result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 232) - def _reduce_100(val, _values, result) - result = make_node(:Enum, name: val[0], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 235) - def _reduce_101(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -# reduce 102 omitted - -module_eval(<<'.,.,', 'parser.y', 239) - def _reduce_103(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 240) - def _reduce_104(val, _values, result) - val[0] << val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 242) - def _reduce_105(val, _values, result) - result = make_node(:Directive, name: val[1], arguments: val[2], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 245) - def _reduce_106(val, _values, result) - result = make_node(:FragmentSpread, name: val[1], directives: val[2], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 249) - def _reduce_107(val, _values, result) - result = make_node(:InlineFragment, { - type: val[2], - directives: val[3], - selections: val[4], - position_source: val[0] - }) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 257) - def _reduce_108(val, _values, result) - result = make_node(:InlineFragment, { - type: nil, - directives: val[1], - selections: val[2], - position_source: val[0] - }) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 267) - def _reduce_109(val, _values, result) - result = make_node(:FragmentDefinition, { - name: val[1], - type: val[3], - directives: val[4], - selections: val[5], - position_source: val[0], - } - ) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 278) - def _reduce_110(val, _values, result) - result = nil - result - end -.,., - -# reduce 111 omitted - -# reduce 112 omitted - -# reduce 113 omitted - -# reduce 114 omitted - -module_eval(<<'.,.,', 'parser.y', 287) - def _reduce_115(val, _values, result) - result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[2]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 290) - def _reduce_116(val, _values, result) - result = {} - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 291) - def _reduce_117(val, _values, result) - result = val[1] - result - end -.,., - -# reduce 118 omitted - -module_eval(<<'.,.,', 'parser.y', 295) - def _reduce_119(val, _values, result) - result = val[0].merge(val[1]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 298) - def _reduce_120(val, _values, result) - result = { val[0][3].to_sym => val[2] } - result - end -.,., - -# reduce 121 omitted - -# reduce 122 omitted - -# reduce 123 omitted - -# reduce 124 omitted - -# reduce 125 omitted - -# reduce 126 omitted - -# reduce 127 omitted - -# reduce 128 omitted - -module_eval(<<'.,.,', 'parser.y', 313) - def _reduce_129(val, _values, result) - result = make_node(:SchemaExtension, position_source: val[0], directives: val[2], **val[4]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 314) - def _reduce_130(val, _values, result) - result = make_node(:SchemaExtension, position_source: val[0], directives: val[2]) - result - end -.,., - -# reduce 131 omitted - -# reduce 132 omitted - -# reduce 133 omitted - -# reduce 134 omitted - -# reduce 135 omitted - -# reduce 136 omitted - -module_eval(<<'.,.,', 'parser.y', 324) - def _reduce_137(val, _values, result) - result = make_node(:ScalarTypeExtension, name: val[2], directives: val[3], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 328) - def _reduce_138(val, _values, result) - result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: val[4], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 329) - def _reduce_139(val, _values, result) - result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 330) - def _reduce_140(val, _values, result) - result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 331) - def _reduce_141(val, _values, result) - result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 334) - def _reduce_142(val, _values, result) - result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 335) - def _reduce_143(val, _values, result) - result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 336) - def _reduce_144(val, _values, result) - result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 339) - def _reduce_145(val, _values, result) - result = make_node(:UnionTypeExtension, name: val[2], directives: val[3], types: val[5], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 340) - def _reduce_146(val, _values, result) - result = make_node(:UnionTypeExtension, name: val[2], directives: val[3], types: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 343) - def _reduce_147(val, _values, result) - result = make_node(:EnumTypeExtension, name: val[2], directives: val[3], values: val[5], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 344) - def _reduce_148(val, _values, result) - result = make_node(:EnumTypeExtension, name: val[2], directives: val[3], values: [], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 347) - def _reduce_149(val, _values, result) - result = make_node(:InputObjectTypeExtension, name: val[2], directives: val[3], fields: val[5], position_source: val[0]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 348) - def _reduce_150(val, _values, result) - result = make_node(:InputObjectTypeExtension, name: val[2], directives: val[3], fields: [], position_source: val[0]) - result - end -.,., - -# reduce 151 omitted - -# reduce 152 omitted - -# reduce 153 omitted - -module_eval(<<'.,.,', 'parser.y', 358) - def _reduce_154(val, _values, result) - result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 363) - def _reduce_155(val, _values, result) - result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 367) - def _reduce_156(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., - -# reduce 157 omitted - -module_eval(<<'.,.,', 'parser.y', 371) - def _reduce_158(val, _values, result) - result = val[2] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 372) - def _reduce_159(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 373) - def _reduce_160(val, _values, result) - result = val[1] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 376) - def _reduce_161(val, _values, result) - result = [make_node(:TypeName, name: val[0], position_source: val[0])] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 377) - def _reduce_162(val, _values, result) - val[0] << make_node(:TypeName, name: val[2], position_source: val[2]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 380) - def _reduce_163(val, _values, result) - result = [make_node(:TypeName, name: val[0], position_source: val[0])] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 381) - def _reduce_164(val, _values, result) - val[0] << make_node(:TypeName, name: val[1], position_source: val[1]) - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 385) - def _reduce_165(val, _values, result) - result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 389) - def _reduce_166(val, _values, result) - result = [val[0]] - result - end -.,., - -module_eval(<<'.,.,', 'parser.y', 390) - def _reduce_167(val, _values, result) - val[0] << val[1] - result - end -.,., + def parse_input_object_field_definitions + if at?(:LCURLY) + expect_token :LCURLY + list = [] + while !at?(:RCURLY) + list << parse_input_value_definition + end + expect_token :RCURLY + list + else + EMPTY_ARRAY + end + end -module_eval(<<'.,.,', 'parser.y', 393) - def _reduce_168(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., + def parse_enum_value_definitions + if at?(:LCURLY) + expect_token :LCURLY + list = [] + while !at?(:RCURLY) + v_loc = pos + description = if at?(:STRING); string_value; end + defn_loc = pos + enum_value = expect_token_value(:IDENTIFIER) + v_directives = parse_directives + list << EnumValueDefinition.new(pos: v_loc, definition_pos: defn_loc, description: description, name: enum_value, directives: v_directives, filename: @filename, source_string: @graphql_str) + end + expect_token :RCURLY + list + else + EMPTY_ARRAY + end + end -module_eval(<<'.,.,', 'parser.y', 394) - def _reduce_169(val, _values, result) - result = val[1] - result - end -.,., + def parse_union_members + if at?(:EQUALS) + expect_token :EQUALS + list = [parse_type_name] + while at?(:PIPE) + advance_token + list << parse_type_name + end + list + else + EMPTY_ARRAY + end + end -module_eval(<<'.,.,', 'parser.y', 398) - def _reduce_170(val, _values, result) - result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def parse_implements + if at?(:IMPLEMENTS) + advance_token + list = [] + while true + advance_token if at?(:AMP) + break unless at?(:IDENTIFIER) + list << parse_type_name + end + list + else + EMPTY_ARRAY + end + end - result - end -.,., + def parse_field_definitions + expect_token :LCURLY + list = [] + while !at?(:RCURLY) + loc = pos + description = if at?(:STRING); string_value; end + defn_loc = pos + name = parse_name + arguments_definition = parse_argument_definitions + expect_token :COLON + type = self.type + directives = parse_directives + + list << FieldDefinition.new(pos: loc, definition_pos: defn_loc, description: description, name: name, arguments: arguments_definition, type: type, directives: directives, filename: @filename, source_string: @graphql_str) + end + expect_token :RCURLY + list + end -module_eval(<<'.,.,', 'parser.y', 402) - def _reduce_171(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., + def parse_argument_definitions + if at?(:LPAREN) + advance_token + list = [] + while !at?(:RPAREN) + list << parse_input_value_definition + end + expect_token :RPAREN + list + else + EMPTY_ARRAY + end + end -module_eval(<<'.,.,', 'parser.y', 403) - def _reduce_172(val, _values, result) - result = val[1] - result - end -.,., + def parse_input_value_definition + loc = pos + description = if at?(:STRING); string_value; end + defn_loc = pos + name = parse_name + expect_token :COLON + type = self.type + default_value = if at?(:EQUALS) + advance_token + value + else + nil + end + directives = parse_directives + InputValueDefinition.new(pos: loc, definition_pos: defn_loc, description: description, name: name, type: type, default_value: default_value, directives: directives, filename: @filename, source_string: @graphql_str) + end -module_eval(<<'.,.,', 'parser.y', 406) - def _reduce_173(val, _values, result) - result = EMPTY_ARRAY - result - end -.,., + def type + type = case token_name + when :IDENTIFIER + parse_type_name + when :LBRACKET + list_type + end + + if at?(:BANG) + type = Nodes::NonNullType.new(pos: pos, of_type: type) + expect_token(:BANG) + end + type + end -module_eval(<<'.,.,', 'parser.y', 407) - def _reduce_174(val, _values, result) - result = [val[0]] - result - end -.,., + def list_type + loc = pos + expect_token(:LBRACKET) + type = Nodes::ListType.new(pos: loc, of_type: self.type) + expect_token(:RBRACKET) + type + end -module_eval(<<'.,.,', 'parser.y', 408) - def _reduce_175(val, _values, result) - val[0] << val[1] - result - end -.,., + def parse_operation_type + val = if at?(:QUERY) + "query" + elsif at?(:MUTATION) + "mutation" + elsif at?(:SUBSCRIPTION) + "subscription" + else + expect_one_of([:QUERY, :MUTATION, :SUBSCRIPTION]) + end + advance_token + val + end -module_eval(<<'.,.,', 'parser.y', 412) - def _reduce_176(val, _values, result) - result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def selection_set + expect_token(:LCURLY) + selections = [] + while @token_name != :RCURLY + selections << if at?(:ELLIPSIS) + loc = pos + advance_token + case token_name + when :ON, :DIR_SIGN, :LCURLY + if_type = if at?(:ON) + advance_token + parse_type_name + else + nil + end + + directives = parse_directives + + Nodes::InlineFragment.new(pos: loc, type: if_type, directives: directives, selections: selection_set, filename: @filename, source_string: @graphql_str) + else + name = parse_name_without_on + directives = parse_directives + + # Can this ever happen? + # expect_token(:IDENTIFIER) if at?(:ON) + + FragmentSpread.new(pos: loc, name: name, directives: directives, filename: @filename, source_string: @graphql_str) + end + else + loc = pos + name = parse_name + + field_alias = nil + + if at?(:COLON) + advance_token + field_alias = name + name = parse_name + end + + arguments = at?(:LPAREN) ? parse_arguments : nil + directives = at?(:DIR_SIGN) ? parse_directives : nil + selection_set = at?(:LCURLY) ? self.selection_set : nil + + Nodes::Field.new(pos: loc, field_alias: field_alias, name: name, arguments: arguments, directives: directives, selections: selection_set, filename: @filename, source_string: @graphql_str) + end + end + expect_token(:RCURLY) + selections + end - result - end -.,., + def parse_name + case token_name + when :IDENTIFIER + expect_token_value(:IDENTIFIER) + when :SCHEMA + advance_token + "schema" + when :SCALAR + advance_token + "scalar" + when :IMPLEMENTS + advance_token + "implements" + when :INTERFACE + advance_token + "interface" + when :UNION + advance_token + "union" + when :ENUM + advance_token + "enum" + when :INPUT + advance_token + "input" + when :DIRECTIVE + advance_token + "directive" + when :TYPE + advance_token + "type" + when :QUERY + advance_token + "query" + when :MUTATION + advance_token + "mutation" + when :SUBSCRIPTION + advance_token + "subscription" + when :TRUE + advance_token + "true" + when :FALSE + advance_token + "false" + when :FRAGMENT + advance_token + "fragment" + when :REPEATABLE + advance_token + "repeatable" + when :NULL + advance_token + "null" + else + expect_token(:NAME) + end + end -module_eval(<<'.,.,', 'parser.y', 416) - def _reduce_177(val, _values, result) - result = [make_node(:TypeName, name: val[0], position_source: val[0])] - result - end -.,., + def parse_name_without_on + if at?(:ON) + expect_token(:IDENTIFIER) + else + parse_name + end + end -module_eval(<<'.,.,', 'parser.y', 417) - def _reduce_178(val, _values, result) - val[0] << make_node(:TypeName, name: val[2], position_source: val[2]) - result - end -.,., + # Any identifier, but not true, false, or null + def parse_enum_name + if at?(:TRUE) || at?(:FALSE) || at?(:NULL) + expect_token(:IDENTIFIER) + else + parse_name + end + end -module_eval(<<'.,.,', 'parser.y', 421) - def _reduce_179(val, _values, result) - result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def parse_type_name + TypeName.new(pos: pos, name: parse_name, filename: @filename, source_string: @graphql_str) + end - result - end -.,., + def parse_directives + if at?(:DIR_SIGN) + dirs = [] + while at?(:DIR_SIGN) + loc = pos + advance_token + name = parse_name + arguments = parse_arguments + + dirs << Nodes::Directive.new(pos: loc, name: name, arguments: arguments, filename: @filename, source_string: @graphql_str) + end + dirs + else + EMPTY_ARRAY + end + end -module_eval(<<'.,.,', 'parser.y', 426) - def _reduce_180(val, _values, result) - result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def parse_arguments + if at?(:LPAREN) + advance_token + args = [] + while !at?(:RPAREN) + loc = pos + name = parse_name + expect_token(:COLON) + args << Nodes::Argument.new(pos: loc, name: name, value: value, filename: @filename, source_string: @graphql_str) + end + if args.empty? + expect_token(:ARGUMENT_NAME) # At least one argument is required + end + expect_token(:RPAREN) + args + else + EMPTY_ARRAY + end + end - result - end -.,., + def string_value + token_value = @lexer.string_value + expect_token :STRING + token_value + end -module_eval(<<'.,.,', 'parser.y', 431) - def _reduce_181(val, _values, result) - result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def value + case token_name + when :INT + expect_token_value(:INT).to_i + when :FLOAT + expect_token_value(:FLOAT).to_f + when :STRING + string_value + when :TRUE + advance_token + true + when :FALSE + advance_token + false + when :NULL + advance_token + NullValue.new(pos: pos, name: "null", filename: @filename, source_string: @graphql_str) + when :IDENTIFIER + Nodes::Enum.new(pos: pos, name: expect_token_value(:IDENTIFIER), filename: @filename, source_string: @graphql_str) + when :LBRACKET + advance_token + list = [] + while !at?(:RBRACKET) + list << value + end + expect_token(:RBRACKET) + list + when :LCURLY + start = pos + advance_token + args = [] + while !at?(:RCURLY) + loc = pos + n = parse_name + expect_token(:COLON) + args << Argument.new(pos: loc, name: n, value: value, filename: @filename, source_string: @graphql_str) + end + expect_token(:RCURLY) + InputObject.new(pos: start, arguments: args, filename: @filename, source_string: @graphql_str) + when :VAR_SIGN + loc = pos + advance_token + VariableIdentifier.new(pos: loc, name: parse_name, filename: @filename, source_string: @graphql_str) + else + expect_token(:VALUE) + end + end - result - end -.,., + def at?(expected_token_name) + @token_name == expected_token_name + end -module_eval(<<'.,.,', 'parser.y', 436) - def _reduce_182(val, _values, result) - result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) + def expect_token(expected_token_name) + unless @token_name == expected_token_name + raise_parse_error("Expected #{expected_token_name}, actual: #{token_name || "(none)"} (#{debug_token_value.inspect})") + end + advance_token + end - result - end -.,., + def expect_one_of(token_names) + raise_parse_error("Expected one of #{token_names.join(", ")}, actual: #{token_name || "NOTHING"} (#{debug_token_value.inspect})") + end -# reduce 183 omitted + def raise_parse_error(message) + message += " at [#{@lexer.line_number}, #{@lexer.column_number}]" + raise GraphQL::ParseError.new( + message, + @lexer.line_number, + @lexer.column_number, + @graphql_str, + filename: @filename, + ) -# reduce 184 omitted + end -module_eval(<<'.,.,', 'parser.y', 444) - def _reduce_185(val, _values, result) - result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])] - result - end -.,., + # Only use when we care about the expected token's value + def expect_token_value(tok) + token_value = @lexer.token_value + expect_token(tok) + token_value + end -module_eval(<<'.,.,', 'parser.y', 445) - def _reduce_186(val, _values, result) - val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2]) - result + # token_value works for when the scanner matched something + # which is usually fine and it's good for it to be fast at that. + def debug_token_value + if token_name && Lexer::Punctuation.const_defined?(token_name) + Lexer::Punctuation.const_get(token_name) + elsif token_name == :ELLIPSIS + "..." + else + @lexer.token_value + end + end + end end -.,., - -def _reduce_none(val, _values, result) - val[0] end - - end # class Parser - end # module Language -end # module GraphQL diff --git a/lib/graphql/language/parser.y b/lib/graphql/language/parser.y deleted file mode 100644 index 25630e6402..0000000000 --- a/lib/graphql/language/parser.y +++ /dev/null @@ -1,560 +0,0 @@ -class GraphQL::Language::Parser -rule - target: document - - document: definitions_list { result = make_node(:Document, definitions: val[0])} - - definitions_list: - definition { result = [val[0]]} - | definitions_list definition { val[0] << val[1] } - - definition: - executable_definition - | type_system_definition - | type_system_extension - - executable_definition: - operation_definition - | fragment_definition - - operation_definition: - operation_type operation_name_opt variable_definitions_opt directives_list_opt selection_set { - result = make_node( - :OperationDefinition, { - operation_type: val[0], - name: val[1], - variables: val[2], - directives: val[3], - selections: val[4], - position_source: val[0], - } - ) - } - | LCURLY selection_list RCURLY { - result = make_node( - :OperationDefinition, { - operation_type: "query", - selections: val[1], - position_source: val[0], - } - ) - } - | LCURLY RCURLY { - result = make_node( - :OperationDefinition, { - operation_type: "query", - selections: [], - position_source: val[0], - } - ) - } - - operation_type: - QUERY - | MUTATION - | SUBSCRIPTION - - operation_name_opt: - /* none */ { result = nil } - | name - - variable_definitions_opt: - /* none */ { result = EMPTY_ARRAY } - | LPAREN variable_definitions_list RPAREN { result = val[1] } - - variable_definitions_list: - variable_definition { result = [val[0]] } - | variable_definitions_list variable_definition { val[0] << val[1] } - - variable_definition: - VAR_SIGN name COLON type default_value_opt { - result = make_node(:VariableDefinition, { - name: val[1], - type: val[3], - default_value: val[4], - position_source: val[0], - }) - } - - type: - nullable_type { result = val[0] } - | nullable_type BANG { result = make_node(:NonNullType, of_type: val[0]) } - - nullable_type: - name { result = make_node(:TypeName, name: val[0])} - | LBRACKET type RBRACKET { result = make_node(:ListType, of_type: val[1]) } - - default_value_opt: - /* none */ { result = nil } - | EQUALS literal_value { result = val[1] } - - selection_set: - LCURLY selection_list RCURLY { result = val[1] } - - selection_set_opt: - /* none */ { result = EMPTY_ARRAY } - | selection_set { result = val[0] } - - selection_list: - selection { result = [result] } - | selection_list selection { val[0] << val[1] } - - selection: - field - | fragment_spread - | inline_fragment - - field: - name arguments_opt directives_list_opt selection_set_opt { - result = make_node( - :Field, { - name: val[0], - arguments: val[1], - directives: val[2], - selections: val[3], - position_source: val[0], - } - ) - } - | name COLON name arguments_opt directives_list_opt selection_set_opt { - result = make_node( - :Field, { - alias: val[0], - name: val[2], - arguments: val[3], - directives: val[4], - selections: val[5], - position_source: val[0], - } - ) - } - - name: - name_without_on - | ON - - schema_keyword: - SCHEMA - | SCALAR - | TYPE - | IMPLEMENTS - | INTERFACE - | UNION - | ENUM - | INPUT - | DIRECTIVE - - name_without_on: - IDENTIFIER - | FRAGMENT - | REPEATABLE - | TRUE - | FALSE - | NULL - | operation_type - | schema_keyword - - enum_name: /* any identifier, but not "true", "false" or "null" */ - IDENTIFIER - | FRAGMENT - | REPEATABLE - | ON - | operation_type - | schema_keyword - - enum_value_definition: - description_opt enum_name directives_list_opt { result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) } - - enum_value_definitions: - enum_value_definition { result = [val[0]] } - | enum_value_definitions enum_value_definition { result = val[0] << val[1] } - - arguments_opt: - /* none */ { result = EMPTY_ARRAY } - | LPAREN arguments_list RPAREN { result = val[1] } - - arguments_list: - argument { result = [val[0]] } - | arguments_list argument { val[0] << val[1] } - - argument: - name COLON input_value { result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0])} - - literal_value: - FLOAT { result = val[0][3].to_f } - | INT { result = val[0][3].to_i } - | STRING { result = val[0][3] } - | TRUE { result = true } - | FALSE { result = false } - | null_value - | enum_value - | list_value - | object_literal_value - - input_value: - literal_value - | variable - | object_value - - null_value: NULL { result = make_node(:NullValue, name: val[0], position_source: val[0]) } - variable: VAR_SIGN name { result = make_node(:VariableIdentifier, name: val[1], position_source: val[0]) } - - list_value: - LBRACKET RBRACKET { result = EMPTY_ARRAY } - | LBRACKET list_value_list RBRACKET { result = val[1] } - - list_value_list: - input_value { result = [val[0]] } - | list_value_list input_value { val[0] << val[1] } - - object_value: - LCURLY RCURLY { result = make_node(:InputObject, arguments: [], position_source: val[0])} - | LCURLY object_value_list RCURLY { result = make_node(:InputObject, arguments: val[1], position_source: val[0])} - - object_value_list: - object_value_field { result = [val[0]] } - | object_value_list object_value_field { val[0] << val[1] } - - object_value_field: - name COLON input_value { result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0])} - - /* like the previous, but with literals only: */ - object_literal_value: - LCURLY RCURLY { result = make_node(:InputObject, arguments: [], position_source: val[0])} - | LCURLY object_literal_value_list RCURLY { result = make_node(:InputObject, arguments: val[1], position_source: val[0])} - - object_literal_value_list: - object_literal_value_field { result = [val[0]] } - | object_literal_value_list object_literal_value_field { val[0] << val[1] } - - object_literal_value_field: - name COLON literal_value { result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0])} - - enum_value: enum_name { result = make_node(:Enum, name: val[0], position_source: val[0]) } - - directives_list_opt: - /* none */ { result = EMPTY_ARRAY } - | directives_list - - directives_list: - directive { result = [val[0]] } - | directives_list directive { val[0] << val[1] } - - directive: DIR_SIGN name arguments_opt { result = make_node(:Directive, name: val[1], arguments: val[2], position_source: val[0]) } - - fragment_spread: - ELLIPSIS name_without_on directives_list_opt { result = make_node(:FragmentSpread, name: val[1], directives: val[2], position_source: val[0]) } - - inline_fragment: - ELLIPSIS ON type directives_list_opt selection_set { - result = make_node(:InlineFragment, { - type: val[2], - directives: val[3], - selections: val[4], - position_source: val[0] - }) - } - | ELLIPSIS directives_list_opt selection_set { - result = make_node(:InlineFragment, { - type: nil, - directives: val[1], - selections: val[2], - position_source: val[0] - }) - } - - fragment_definition: - FRAGMENT fragment_name_opt ON type directives_list_opt selection_set { - result = make_node(:FragmentDefinition, { - name: val[1], - type: val[3], - directives: val[4], - selections: val[5], - position_source: val[0], - } - ) - } - - fragment_name_opt: - /* none */ { result = nil } - | name_without_on - - type_system_definition: - schema_definition - | type_definition - | directive_definition - - schema_definition: - SCHEMA directives_list_opt operation_type_definition_list_opt { result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[2]) } - - operation_type_definition_list_opt: - /* none */ { result = {} } - | LCURLY operation_type_definition_list RCURLY { result = val[1] } - - operation_type_definition_list: - operation_type_definition - | operation_type_definition_list operation_type_definition { result = val[0].merge(val[1]) } - - operation_type_definition: - operation_type COLON name { result = { val[0][3].to_sym => val[2] } } - - type_definition: - scalar_type_definition - | object_type_definition - | interface_type_definition - | union_type_definition - | enum_type_definition - | input_object_type_definition - - type_system_extension: - schema_extension - | type_extension - - schema_extension: - EXTEND SCHEMA directives_list_opt LCURLY operation_type_definition_list RCURLY { result = make_node(:SchemaExtension, position_source: val[0], directives: val[2], **val[4]) } - | EXTEND SCHEMA directives_list { result = make_node(:SchemaExtension, position_source: val[0], directives: val[2]) } - - type_extension: - scalar_type_extension - | object_type_extension - | interface_type_extension - | union_type_extension - | enum_type_extension - | input_object_type_extension - - scalar_type_extension: EXTEND SCALAR name directives_list { result = make_node(:ScalarTypeExtension, name: val[2], directives: val[3], position_source: val[0]) } - - object_type_extension: - /* TODO - This first one shouldn't be necessary but parser is getting confused */ - EXTEND TYPE name implements field_definition_list_opt { result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: val[4], position_source: val[0]) } - | EXTEND TYPE name implements_opt directives_list_opt field_definition_list_opt { result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], position_source: val[0]) } - | EXTEND TYPE name implements_opt directives_list { result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: [], position_source: val[0]) } - | EXTEND TYPE name implements { result = make_node(:ObjectTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: [], position_source: val[0]) } - - interface_type_extension: - EXTEND INTERFACE name implements_opt directives_list_opt field_definition_list_opt { result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], position_source: val[0]) } - | EXTEND INTERFACE name implements_opt directives_list { result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: val[4], fields: [], position_source: val[0]) } - | EXTEND INTERFACE name implements { result = make_node(:InterfaceTypeExtension, name: val[2], interfaces: val[3], directives: [], fields: [], position_source: val[0]) } - - union_type_extension: - EXTEND UNION name directives_list_opt EQUALS union_members { result = make_node(:UnionTypeExtension, name: val[2], directives: val[3], types: val[5], position_source: val[0]) } - | EXTEND UNION name directives_list { result = make_node(:UnionTypeExtension, name: val[2], directives: val[3], types: [], position_source: val[0]) } - - enum_type_extension: - EXTEND ENUM name directives_list_opt LCURLY enum_value_definitions RCURLY { result = make_node(:EnumTypeExtension, name: val[2], directives: val[3], values: val[5], position_source: val[0]) } - | EXTEND ENUM name directives_list { result = make_node(:EnumTypeExtension, name: val[2], directives: val[3], values: [], position_source: val[0]) } - - input_object_type_extension: - EXTEND INPUT name directives_list_opt LCURLY input_value_definition_list RCURLY { result = make_node(:InputObjectTypeExtension, name: val[2], directives: val[3], fields: val[5], position_source: val[0]) } - | EXTEND INPUT name directives_list { result = make_node(:InputObjectTypeExtension, name: val[2], directives: val[3], fields: [], position_source: val[0]) } - - description: STRING - - description_opt: - /* none */ - | description - - scalar_type_definition: - description_opt SCALAR name directives_list_opt { - result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - object_type_definition: - description_opt TYPE name implements_opt directives_list_opt field_definition_list_opt { - result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - implements_opt: - /* none */ { result = EMPTY_ARRAY } - | implements - - implements: - IMPLEMENTS AMP interfaces_list { result = val[2] } - | IMPLEMENTS interfaces_list { result = val[1] } - | IMPLEMENTS legacy_interfaces_list { result = val[1] } - - interfaces_list: - name { result = [make_node(:TypeName, name: val[0], position_source: val[0])] } - | interfaces_list AMP name { val[0] << make_node(:TypeName, name: val[2], position_source: val[2]) } - - legacy_interfaces_list: - name { result = [make_node(:TypeName, name: val[0], position_source: val[0])] } - | legacy_interfaces_list name { val[0] << make_node(:TypeName, name: val[1], position_source: val[1]) } - - input_value_definition: - description_opt name COLON type default_value_opt directives_list_opt { - result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - input_value_definition_list: - input_value_definition { result = [val[0]] } - | input_value_definition_list input_value_definition { val[0] << val[1] } - - arguments_definitions_opt: - /* none */ { result = EMPTY_ARRAY } - | LPAREN input_value_definition_list RPAREN { result = val[1] } - - field_definition: - description_opt name arguments_definitions_opt COLON type directives_list_opt { - result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - field_definition_list_opt: - /* none */ { result = EMPTY_ARRAY } - | LCURLY field_definition_list RCURLY { result = val[1] } - - field_definition_list: - /* none - this is not actually valid but graphql-ruby used to print this */ { result = EMPTY_ARRAY } - | field_definition { result = [val[0]] } - | field_definition_list field_definition { val[0] << val[1] } - - interface_type_definition: - description_opt INTERFACE name implements_opt directives_list_opt field_definition_list_opt { - result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - union_members: - name { result = [make_node(:TypeName, name: val[0], position_source: val[0])]} - | union_members PIPE name { val[0] << make_node(:TypeName, name: val[2], position_source: val[2]) } - - union_type_definition: - description_opt UNION name directives_list_opt EQUALS union_members { - result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - enum_type_definition: - description_opt ENUM name directives_list_opt LCURLY enum_value_definitions RCURLY { - result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - input_object_type_definition: - description_opt INPUT name directives_list_opt LCURLY input_value_definition_list RCURLY { - result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - directive_definition: - description_opt DIRECTIVE DIR_SIGN name arguments_definitions_opt directive_repeatable_opt ON directive_locations { - result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) - } - - directive_repeatable_opt: - /* nothing */ - | REPEATABLE - - directive_locations: - name { result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])] } - | directive_locations PIPE name { val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2]) } -end - ----- header ---- - - ----- inner ---- - -EMPTY_ARRAY = [].freeze - -def initialize(query_string, filename:, trace: Tracing::NullTrace) - raise GraphQL::ParseError.new("No query string was present", nil, nil, query_string) if query_string.nil? - @query_string = query_string - @filename = filename - @trace = trace - @reused_next_token = [nil, nil] -end - -def parse_document - @document ||= begin - # Break the string into tokens - @trace.lex(query_string: @query_string) do - @tokens ||= GraphQL::Language::Lexer.tokenize(@query_string) - end - # From the tokens, build an AST - @trace.parse(query_string: @query_string) do - if @tokens.empty? - raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string) - else - do_parse - end - end - end -end - -class << self - attr_accessor :cache - - def parse(query_string, filename: nil, trace: GraphQL::Tracing::NullTrace) - new(query_string, filename: filename, trace: trace).parse_document - end - - def parse_file(filename, trace: GraphQL::Tracing::NullTrace) - if cache - cache.fetch(filename) do - parse(File.read(filename), filename: filename, trace: trace) - end - else - parse(File.read(filename), filename: filename, trace: trace) - end - end -end - -private - -def next_token - lexer_token = @tokens.shift - if lexer_token.nil? - nil - else - @reused_next_token[0] = lexer_token[0] - @reused_next_token[1] = lexer_token - @reused_next_token - end -end - -def get_description(token) - comments = [] - - loop do - prev_token = token - token = token[4] - - break if token.nil? - break if token[0] != :COMMENT - break if prev_token[1] != token[1] + 1 - - comments.unshift(token[3].sub(/^#\s*/, "")) - end - - return nil if comments.empty? - - comments.join("\n") -end - -def on_error(parser_token_id, lexer_token, vstack) - if lexer_token == "$" || lexer_token == nil - raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string, filename: @filename) - else - parser_token_name = token_to_str(parser_token_id) - if parser_token_name.nil? - raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename) - else - line = lexer_token[1] - col = lexer_token[2] - if lexer_token[0] == :BAD_UNICODE_ESCAPE - raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename) - else - raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename) - end - end - end -end - -def make_node(node_name, assigns) - assigns.each do |key, value| - if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol) - assigns[key] = value[3] - end - end - - assigns[:filename] = @filename - - GraphQL::Language::Nodes.const_get(node_name).new(assigns) -end diff --git a/lib/graphql/language/printer.rb b/lib/graphql/language/printer.rb index a3bf122c87..44fb005bfe 100644 --- a/lib/graphql/language/printer.rb +++ b/lib/graphql/language/printer.rb @@ -337,14 +337,16 @@ def print_union_type_definition(union_type, extension: false) print_string("union ") print_string(union_type.name) print_directives(union_type.directives) - print_string(" = ") - i = 0 - union_type.types.each do |t| - if i > 0 - print_string(" | ") + if union_type.types.any? + print_string(" = ") + i = 0 + union_type.types.each do |t| + if i > 0 + print_string(" | ") + end + print_string(t.name) + i += 1 end - print_string(t.name) - i += 1 end end @@ -353,12 +355,14 @@ def print_enum_type_definition(enum_type, extension: false) print_string("enum ") print_string(enum_type.name) print_directives(enum_type.directives) - print_string(" {\n") - enum_type.values.each.with_index do |value, i| - print_description(value, indent: " ", first_in_block: i == 0) - print_enum_value_definition(value) + if enum_type.values.any? + print_string(" {\n") + enum_type.values.each.with_index do |value, i| + print_description(value, indent: " ", first_in_block: i == 0) + print_enum_value_definition(value) + end + print_string("}") end - print_string("}") end def print_enum_value_definition(enum_value) diff --git a/lib/graphql/tracing/trace.rb b/lib/graphql/tracing/trace.rb index 718a691f88..8834c2d59c 100644 --- a/lib/graphql/tracing/trace.rb +++ b/lib/graphql/tracing/trace.rb @@ -15,6 +15,7 @@ def initialize(multiplex: nil, query: nil, **_options) @query = query end + # The Ruby parser doesn't call this method (`graphql/c_parser` does.) def lex(query_string:) yield end diff --git a/spec/graphql/analysis/ast_spec.rb b/spec/graphql/analysis/ast_spec.rb index c9dbd1d16e..0d14142c3a 100644 --- a/spec/graphql/analysis/ast_spec.rb +++ b/spec/graphql/analysis/ast_spec.rb @@ -299,7 +299,11 @@ def f1(arg:) end # The query_trace is on the list _first_ because it finished first - _lex, _parse, _validate, query_trace, multiplex_trace, *_rest = traces + if USING_C_PARSER + _lex, _parse, _validate, query_trace, multiplex_trace, *_rest = traces + else + _parse, _validate, query_trace, multiplex_trace, *_rest = traces + end assert_equal "analyze_multiplex", multiplex_trace[:key] assert_instance_of GraphQL::Execution::Multiplex, multiplex_trace[:multiplex] diff --git a/spec/graphql/language/clexer_spec.rb b/spec/graphql/language/clexer_spec.rb index a0e083b30d..998d9d54e9 100644 --- a/spec/graphql/language/clexer_spec.rb +++ b/spec/graphql/language/clexer_spec.rb @@ -6,6 +6,10 @@ describe GraphQL::CParser::Lexer do subject { GraphQL::CParser::Lexer } + def assert_bad_unicode(string, _message = nil) + assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize(string).first[0] + end + it "makes tokens like the other lexer" do str = "{ f1(type: \"str\") ...F2 }\nfragment F2 on SomeType { f2 }" # Don't include prev_token here diff --git a/spec/graphql/language/lexer_examples.rb b/spec/graphql/language/lexer_examples.rb index bd1004a677..38267a8990 100644 --- a/spec/graphql/language/lexer_examples.rb +++ b/spec/graphql/language/lexer_examples.rb @@ -55,7 +55,7 @@ def self.included(child_mod) it "force encodes to utf-8" do # string that will be invalid utf-8 once force encoded string = "vandflyver \xC5rhus".dup.force_encoding("ASCII-8BIT") - assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize(string).first.name + assert_bad_unicode(string) end it "makes utf-8 arguments named type" do @@ -64,12 +64,6 @@ def self.included(child_mod) assert_equal Encoding::UTF_8, tokens[2].value.encoding end - it "makes utf-8 comments" do - tokens = subject.tokenize("# 不要!\n{") - comment_token = tokens.first.prev_token - assert_equal "# 不要!", comment_token.to_s - end - it "keeps track of previous_token" do assert_equal tokens[0], tokens[1].prev_token end @@ -153,19 +147,22 @@ def self.included(child_mod) end it "rejects bad unicode, even when there's good unicode in the string" do - assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize('"\\u0XXF \\u0009"').first.name + assert_bad_unicode('"\\u0XXF \\u0009"', "Bad unicode escape in \"\\\\u0XXF \\\\u0009\"") end it "rejects truly invalid UTF-8 bytes" do error_filename = "spec/support/parser/filename_example_invalid_utf8.graphql" - assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize(File.read(error_filename)).first.name + text = File.read(error_filename) + assert_bad_unicode(text) end it "rejects unicode that's well-formed but results in invalidly-encoded strings" do # when the string here gets tokenized into an actual `:STRING`, it results in `valid_encoding?` being false for # the ruby string so application code usually blows up trying to manipulate it - assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize('"\\udc00\\udf2c"').first.name - assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize('"\\u{dc00}\\u{df2c}"').first.name + text1 = '"\\udc00\\udf2c"' + assert_bad_unicode(text1, 'Bad unicode escape in "\\xED\\xB0\\x80\\xED\\xBC\\xAC"') + text2 = '"\\u{dc00}\\u{df2c}"' + assert_bad_unicode(text2, 'Bad unicode escape in "\\xED\\xB0\\x80\\xED\\xBC\\xAC"') end it "clears the previous_token between runs" do diff --git a/spec/graphql/language/lexer_spec.rb b/spec/graphql/language/lexer_spec.rb index fe5aa19b06..dffb128674 100644 --- a/spec/graphql/language/lexer_spec.rb +++ b/spec/graphql/language/lexer_spec.rb @@ -4,4 +4,11 @@ describe GraphQL::Language::Lexer do subject { GraphQL::Language::Lexer } include LexerExamples + + def assert_bad_unicode(string, expected_err_message = "Parse error on bad Unicode escape sequence") + err = assert_raises(GraphQL::ParseError) do + subject.tokenize(string) + end + assert_equal expected_err_message, err.message + end end diff --git a/spec/graphql/language/parser_spec.rb b/spec/graphql/language/parser_spec.rb index 6cf54acc21..d7a0438016 100644 --- a/spec/graphql/language/parser_spec.rb +++ b/spec/graphql/language/parser_spec.rb @@ -8,7 +8,11 @@ err = assert_raises GraphQL::ParseError do subject.parse("{ foo(query: \"\xBF\") }") end - expected_message = 'Parse error on bad Unicode escape sequence: "{ foo(query: \"\xBF\") }" (error) at [1, 1]' + expected_message = if USING_C_PARSER + 'Parse error on bad Unicode escape sequence: "{ foo(query: \"\xBF\") }" (error) at [1, 1]' + else + 'Parse error on bad Unicode escape sequence' + end assert_equal expected_message, err.message end @@ -31,7 +35,7 @@ expected_msg = if USING_C_PARSER "syntax error, unexpected invalid token (\"\\xF0\"), expecting LCURLY at [1, 7]" else - "Parse error on \"😘\" (error) at [1, 7]" + "Expected LCURLY, actual: UNKNOWN_CHAR (\"\\xF0\") at [1, 7]" end assert_equal expected_msg, err.message @@ -355,12 +359,21 @@ query = GraphQL::Query.new(schema, "{ t: __typename }") subject.parse("{ t: __typename }", trace: query.current_trace) traces = TestTracing.traces - assert_equal 2, traces.length + expected_traces = if USING_C_PARSER + 2 + else + 1 + end + assert_equal expected_traces, traces.length lex_trace, parse_trace = traces - assert_equal "{ t: __typename }", lex_trace[:query_string] - assert_equal "lex", lex_trace[:key] - assert_instance_of Array, lex_trace[:result] + if USING_C_PARSER + assert_equal "{ t: __typename }", lex_trace[:query_string] + assert_equal "lex", lex_trace[:key] + assert_instance_of Array, lex_trace[:result] + else + parse_trace = lex_trace + end assert_equal "{ t: __typename }", parse_trace[:query_string] assert_equal "parse", parse_trace[:key] diff --git a/spec/graphql/query_spec.rb b/spec/graphql/query_spec.rb index a2536bfe74..821658bac1 100644 --- a/spec/graphql/query_spec.rb +++ b/spec/graphql/query_spec.rb @@ -656,11 +656,10 @@ def self.after_query(q) assert_equal 1, res["errors"].length if USING_C_PARSER expected_err = "syntax error, unexpected end of file at [1, 2]" - expected_locations = [{"line" => 1, "column" => 2}] else - expected_err = "Unexpected end of document" - expected_locations = [] + expected_err = "Expected NAME, actual: (none) (\" \") at [1, 2]" end + expected_locations = [{"line" => 1, "column" => 2}] assert_equal expected_err, res["errors"][0]["message"] assert_equal expected_locations, res["errors"][0]["locations"] @@ -669,7 +668,7 @@ def self.after_query(q) expected_error = if USING_C_PARSER "syntax error, unexpected INT (\"1\") at [4, 26]" else - %|Parse error on "1" (INT) at [4, 26]| + %|Expected NAME, actual: INT ("1") at [4, 26]| end assert_equal expected_error, res["errors"][0]["message"] assert_equal({"line" => 4, "column" => 26}, res["errors"][0]["locations"][0]) diff --git a/spec/graphql/schema/non_null_spec.rb b/spec/graphql/schema/non_null_spec.rb index d7f6b67544..5435853d0e 100644 --- a/spec/graphql/schema/non_null_spec.rb +++ b/spec/graphql/schema/non_null_spec.rb @@ -48,7 +48,7 @@ expected_err = if USING_C_PARSER "syntax error, unexpected BANG (\"!\"), expecting RPAREN or VAR_SIGN at [2, 21]" else - 'Parse error on "!" (BANG) at [2, 21]' + "Expected VAR_SIGN, actual: BANG (\"!\") at [2, 21]" end assert_equal [expected_err], res["errors"].map { |e| e["message"] } diff --git a/spec/graphql/static_validation/validator_spec.rb b/spec/graphql/static_validation/validator_spec.rb index 7f6dd24896..f531ee7aaa 100644 --- a/spec/graphql/static_validation/validator_spec.rb +++ b/spec/graphql/static_validation/validator_spec.rb @@ -16,8 +16,12 @@ validator.validate(query) end - assert_equal 3, traces.length - _lex_trace, _parse_trace, validate_trace = traces + if USING_C_PARSER + assert_equal 3, traces.length + else + assert_equal 2, traces.length + end + validate_trace = traces.last assert_equal "validate", validate_trace[:key] assert_equal true, validate_trace[:validate] assert_instance_of GraphQL::Query, validate_trace[:query] diff --git a/spec/graphql/tracing/appsignal_trace_spec.rb b/spec/graphql/tracing/appsignal_trace_spec.rb index 20278f2d37..972acbde52 100644 --- a/spec/graphql/tracing/appsignal_trace_spec.rb +++ b/spec/graphql/tracing/appsignal_trace_spec.rb @@ -68,7 +68,7 @@ class TestSchema < GraphQL::Schema expected_trace = [ "execute.graphql", "analyze.graphql", - "lex.graphql", + (USING_C_PARSER ? "lex.graphql" : nil), "parse.graphql", "validate.graphql", "analyze.graphql", @@ -80,7 +80,7 @@ class TestSchema < GraphQL::Schema "Named.resolve_type.graphql", "Thing.authorized.graphql", "execute.graphql", - ] + ].compact assert_equal expected_trace, Appsignal.instrumented end @@ -109,7 +109,7 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema _res = AppsignalAndDatadogTestSchema.execute("{ int thing { str } named { ... on Thing { str } } }") expected_appsignal_trace = [ "execute.graphql", - "lex.graphql", + (USING_C_PARSER ? "lex.graphql" : nil), "parse.graphql", "analyze.graphql", "validate.graphql", @@ -122,11 +122,11 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema "Named.resolve_type.graphql", "Thing.authorized.graphql", "execute.graphql", - ] + ].compact expected_datadog_trace = [ "graphql.execute_multiplex", - "graphql.lex", + (USING_C_PARSER ? "graphql.lex" : nil), "graphql.parse", "graphql.analyze_multiplex", "graphql.validate", @@ -139,7 +139,7 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema "graphql.resolve_type", "graphql.authorized", "graphql.execute_query_lazy", - ] + ].compact assert_equal expected_appsignal_trace, Appsignal.instrumented assert_equal expected_datadog_trace, Datadog::SPAN_TAGS @@ -150,7 +150,7 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema it "works when the modules are included in reverse order" do _res = AppsignalAndDatadogReverseOrderTestSchema.execute("{ int thing { str } named { ... on Thing { str } } }") expected_appsignal_trace = [ - "lex.graphql", + (USING_C_PARSER ? "lex.graphql" : nil), "parse.graphql", "execute.graphql", "analyze.graphql", @@ -164,11 +164,11 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema "Named.resolve_type.graphql", "Thing.authorized.graphql", "execute.graphql", - ] + ].compact expected_datadog_trace = [ "graphql.execute_multiplex", - "graphql.lex", + (USING_C_PARSER ? "graphql.lex" : nil), "graphql.parse", "graphql.analyze_multiplex", "graphql.validate", @@ -181,7 +181,7 @@ class AppsignalAndDatadogReverseOrderTestSchema < GraphQL::Schema "graphql.resolve_type", "graphql.authorized", "graphql.execute_query_lazy", - ] + ].compact assert_equal expected_appsignal_trace, Appsignal.instrumented assert_equal expected_datadog_trace, Datadog::SPAN_TAGS diff --git a/spec/graphql/tracing/data_dog_trace_spec.rb b/spec/graphql/tracing/data_dog_trace_spec.rb index 0161e91bee..0721e32510 100644 --- a/spec/graphql/tracing/data_dog_trace_spec.rb +++ b/spec/graphql/tracing/data_dog_trace_spec.rb @@ -80,7 +80,7 @@ def prepare_span(trace_key, data, span) it "sets custom tags tags" do DataDogTraceTest::CustomTracerTestSchema.execute("{ thing { str } }") expected_custom_tags = [ - ["custom:lex", "query_string"], + (USING_C_PARSER ? ["custom:lex", "query_string"] : nil), ["custom:parse", "query_string"], ["custom:execute_multiplex", "multiplex"], ["custom:analyze_multiplex", "multiplex"], @@ -91,7 +91,7 @@ def prepare_span(trace_key, data, span) ["custom:execute_field", "arguments,ast_node,field,object,query"], ["custom:authorized", "object,query,type"], ["custom:execute_query_lazy", "multiplex,query"], - ] + ].compact actual_custom_tags = Datadog::SPAN_TAGS.reject { |t| t[0] == "operation" || t[0] == "component" || t[0].is_a?(Symbol) } assert_equal expected_custom_tags, actual_custom_tags diff --git a/spec/graphql/tracing/data_dog_tracing_spec.rb b/spec/graphql/tracing/data_dog_tracing_spec.rb index 1e048c456a..46e56810d8 100644 --- a/spec/graphql/tracing/data_dog_tracing_spec.rb +++ b/spec/graphql/tracing/data_dog_tracing_spec.rb @@ -75,7 +75,7 @@ def prepare_span(trace_key, data, span) it "sets custom tags tags" do DataDogTest::CustomTracerTestSchema.execute("{ thing { str } }") expected_custom_tags = [ - ["custom:lex", "query_string"], + (USING_C_PARSER ? ["custom:lex", "query_string"] : nil), ["custom:parse", "query_string"], ["custom:execute_multiplex", "multiplex"], ["custom:analyze_multiplex", "multiplex"], @@ -86,7 +86,7 @@ def prepare_span(trace_key, data, span) ["custom:execute_field", "field,query,ast_node,arguments,object,owner,path"], ["custom:authorized", "context,type,object,path"], ["custom:execute_query_lazy", "multiplex,query"], - ] + ].compact actual_custom_tags = Datadog::SPAN_TAGS.reject { |t| t[0] == "operation" || t[0] == "component" || t[0].is_a?(Symbol) } assert_equal expected_custom_tags, actual_custom_tags diff --git a/spec/graphql/tracing/notifications_trace_spec.rb b/spec/graphql/tracing/notifications_trace_spec.rb index bde843382a..6ea26a4bd7 100644 --- a/spec/graphql/tracing/notifications_trace_spec.rb +++ b/spec/graphql/tracing/notifications_trace_spec.rb @@ -26,7 +26,7 @@ class Schema < GraphQL::Schema expected_event_keys = [ 'execute_multiplex.graphql', 'analyze_multiplex.graphql', - 'lex.graphql', + (USING_C_PARSER ? 'lex.graphql' : nil), 'parse.graphql', 'validate.graphql', 'analyze_query.graphql', @@ -34,7 +34,7 @@ class Schema < GraphQL::Schema 'authorized.graphql', 'execute_field.graphql', 'execute_query_lazy.graphql' - ] + ].compact assert_equal expected_event_keys, dispatched_events.keys diff --git a/spec/graphql/tracing/platform_trace_spec.rb b/spec/graphql/tracing/platform_trace_spec.rb index b39b8c4376..48cc9a10bc 100644 --- a/spec/graphql/tracing/platform_trace_spec.rb +++ b/spec/graphql/tracing/platform_trace_spec.rb @@ -78,7 +78,7 @@ def platform_resolve_type_key(type) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -88,7 +88,7 @@ def platform_resolve_type_key(type) "Cheese.authorized", "eql", "Cheese.authorized", # This is the lazy part, calling the proc - ] + ].compact assert_equal expected_trace, CustomPlatformTrace::TRACE end @@ -100,7 +100,7 @@ def platform_resolve_type_key(type) "v", "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -110,7 +110,7 @@ def platform_resolve_type_key(type) "Cheese.authorized", "eql", "Cheese.authorized", # This is the lazy part, calling the proc - ] + ].compact query = GraphQL::Query.new(schema, query_str) # First, validate @@ -126,7 +126,7 @@ def platform_resolve_type_key(type) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -161,7 +161,7 @@ def platform_resolve_type_key(type) "DynamicFields.authorized", "D._", "E.f", - ] + ].compact assert_equal expected_trace, CustomPlatformTrace::TRACE end @@ -183,7 +183,7 @@ def platform_resolve_type_key(type) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -193,7 +193,7 @@ def platform_resolve_type_key(type) "TracingScalar.authorized", "T.t", "eql", - ] + ].compact assert_equal expected_trace, CustomPlatformTrace::TRACE end end @@ -214,7 +214,7 @@ def platform_resolve_type_key(type) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -225,7 +225,7 @@ def platform_resolve_type_key(type) "T.t", "T.t", "eql", - ] + ].compact assert_equal expected_trace, CustomPlatformTrace::TRACE end end diff --git a/spec/graphql/tracing/platform_tracing_spec.rb b/spec/graphql/tracing/platform_tracing_spec.rb index db0f2536cb..a0d41faae3 100644 --- a/spec/graphql/tracing/platform_tracing_spec.rb +++ b/spec/graphql/tracing/platform_tracing_spec.rb @@ -56,7 +56,7 @@ def platform_trace(platform_key, key, data) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -66,7 +66,7 @@ def platform_trace(platform_key, key, data) "Cheese.authorized", "eql", "Cheese.authorized", # This is the lazy part, calling the proc - ] + ].compact assert_equal expected_trace, CustomPlatformTracer::TRACE end @@ -78,7 +78,7 @@ def platform_trace(platform_key, key, data) "v", "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -88,7 +88,7 @@ def platform_trace(platform_key, key, data) "Cheese.authorized", "eql", "Cheese.authorized", # This is the lazy part, calling the proc - ] + ].compact query = GraphQL::Query.new(schema, query_str) # First, validate @@ -109,7 +109,7 @@ def platform_trace(platform_key, key, data) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -121,7 +121,7 @@ def platform_trace(platform_key, key, data) "Edible.resolve_type", "Milk.authorized", "DynamicFields.authorized", - ] + ].compact assert_equal expected_trace, CustomPlatformTracer::TRACE end @@ -133,7 +133,7 @@ def platform_trace(platform_key, key, data) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -168,7 +168,7 @@ def platform_trace(platform_key, key, data) "DynamicFields.authorized", "D._", "E.f", - ] + ].compact assert_equal expected_trace, CustomPlatformTracer::TRACE end @@ -190,7 +190,7 @@ def platform_trace(platform_key, key, data) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -200,7 +200,7 @@ def platform_trace(platform_key, key, data) "TracingScalar.authorized", "T.t", "eql", - ] + ].compact assert_equal expected_trace, CustomPlatformTracer::TRACE end end @@ -221,7 +221,7 @@ def platform_trace(platform_key, key, data) expected_trace = [ "em", "am", - "l", + (USING_C_PARSER ? "l" : nil), "p", "v", "aq", @@ -232,7 +232,7 @@ def platform_trace(platform_key, key, data) "T.t", "T.t", "eql", - ] + ].compact assert_equal expected_trace, CustomPlatformTracer::TRACE end end diff --git a/spec/graphql/tracing/statsd_trace_spec.rb b/spec/graphql/tracing/statsd_trace_spec.rb index 1a146d77d4..21774abd6c 100644 --- a/spec/graphql/tracing/statsd_trace_spec.rb +++ b/spec/graphql/tracing/statsd_trace_spec.rb @@ -50,7 +50,7 @@ def thing; :thing; end expected_timings = [ "graphql.execute_multiplex", "graphql.analyze_multiplex", - "graphql.lex", + (USING_C_PARSER ? "graphql.lex" : nil), "graphql.parse", "graphql.validate", "graphql.analyze_query", @@ -59,7 +59,7 @@ def thing; :thing; end "graphql.Query.thing", "graphql.authorized.Thing", "graphql.execute_query_lazy" - ] + ].compact assert_equal expected_timings, TraceMockStatsd.timings end end diff --git a/spec/graphql/tracing/statsd_tracing_spec.rb b/spec/graphql/tracing/statsd_tracing_spec.rb index 899f5848a6..bd7f26136d 100644 --- a/spec/graphql/tracing/statsd_tracing_spec.rb +++ b/spec/graphql/tracing/statsd_tracing_spec.rb @@ -50,7 +50,7 @@ def thing; :thing; end expected_timings = [ "graphql.execute_multiplex", "graphql.analyze_multiplex", - "graphql.lex", + (USING_C_PARSER ? "graphql.lex" : nil), "graphql.parse", "graphql.validate", "graphql.analyze_query", @@ -59,7 +59,7 @@ def thing; :thing; end "graphql.Query.thing", "graphql.authorized.Thing", "graphql.execute_query_lazy" - ] + ].compact assert_equal expected_timings, MockStatsd.timings end end diff --git a/spec/integration/rails/graphql/tracing/active_support_notifications_tracing_spec.rb b/spec/integration/rails/graphql/tracing/active_support_notifications_tracing_spec.rb index 65e47c6e97..a6b843629d 100644 --- a/spec/integration/rails/graphql/tracing/active_support_notifications_tracing_spec.rb +++ b/spec/integration/rails/graphql/tracing/active_support_notifications_tracing_spec.rb @@ -37,7 +37,7 @@ end expected_traces = [ - "lex.graphql", + (USING_C_PARSER ? "lex.graphql" : nil), "parse.graphql", "validate.graphql", "analyze_query.graphql",