File: parser_support.rb

package info (click to toggle)
puppet-agent 8.10.0-6
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 27,404 kB
  • sloc: ruby: 286,820; sh: 492; xml: 116; makefile: 88; cs: 68
file content (252 lines) | stat: -rw-r--r-- 8,491 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
# frozen_string_literal: true

require_relative '../../../puppet/parser/functions'
require_relative '../../../puppet/parser/files'
require_relative '../../../puppet/resource/type_collection'
require_relative '../../../puppet/resource/type'
require 'monitor'

module Puppet::Pops
module Parser
# Supporting logic for the parser.
# This supporting logic has slightly different responsibilities compared to the original Puppet::Parser::Parser.
# It is only concerned with parsing.
#
class Parser
  # Note that the name of the contained class and the file name (currently parser_support.rb)
  # needs to be different as the class is generated by Racc, and this file (parser_support.rb) is included as a mix in
  #

  # Simplify access to the Model factory
  # Note that the parser/parser support does not have direct knowledge about the Model.
  # All model construction/manipulation is made by the Factory.
  #
  Factory = Model::Factory

  attr_accessor :lexer
  attr_reader :definitions

  # Returns the token text of the given lexer token, or nil, if token is nil
  def token_text t
    return t if t.nil?

    if t.is_a?(Factory) && t.model_class <= Model::QualifiedName
      t['value']
    elsif t.is_a?(Model::QualifiedName)
      t.value
    else
      # else it is a lexer token
      t[:value]
    end
  end

  # Produces the fully qualified name, with the full (current) namespace for a given name.
  #
  # This is needed because class bodies are lazily evaluated and an inner class' container(s) may not
  # have been evaluated before some external reference is made to the inner class; its must therefore know its complete name
  # before evaluation-time.
  #
  def classname(name)
    [namespace, name].join('::').sub(/^::/, '')
  end

  # Raises a Parse error with location information. Information about file is always obtained from the
  # lexer. Line and position is produced if the given semantic is a Positioned object and have been given an offset.
  #
  def error(semantic, message)
    except = Puppet::ParseError.new(message)
    if semantic.is_a?(LexerSupport::TokenValue)
      except.file = semantic[:file];
      except.line = semantic[:line];
      except.pos = semantic[:pos];
    else
      locator = @lexer.locator
      except.file = locator.file
      if semantic.is_a?(Factory)
        offset = semantic['offset']
        unless offset.nil?
          except.line = locator.line_for_offset(offset)
          except.pos = locator.pos_on_line(offset)
        end
      end
    end
    raise except
  end

  # Parses a file expected to contain pp DSL logic.
  def parse_file(file)
    unless Puppet::FileSystem.exist?(file)
      unless file =~ /\.pp$/
        file += ".pp"
      end
    end
    @lexer.file = file
    _parse
  end

  def initialize
    @lexer = Lexer2.new
    @namestack = []
    @definitions = []
  end

  # This is a callback from the generated parser (when an error occurs while parsing)
  #
  def on_error(token, value, stack)
    if token == 0 # denotes end of file
      value_at = 'end of input'
    else
      value_at = "'#{value[:value]}'"
    end
    error = Issues::SYNTAX_ERROR.format(:where => value_at)
    error = "#{error}, token: #{token}" if @yydebug

    # Note, old parser had processing of "expected token here" - do not try to reinstate:
    # The 'expected' is only of value at end of input, otherwise any parse error involving a
    # start of a pair will be reported as expecting the close of the pair - e.g. "$x.each |$x {|", would
    # report that "seeing the '{', the '}' is expected. That would be wrong.
    # Real "expected" tokens are very difficult to compute (would require parsing of racc output data). Output of the stack
    # could help, but can require extensive backtracking and produce many options.
    #
    # The lexer should handle the "expected instead of end of file for strings, and interpolation", other expectancies
    # must be handled by the grammar. The lexer may have enqueued tokens far ahead - the lexer's opinion about this
    # is not trustworthy.
    #
    file = nil
    line = nil
    pos  = nil
    if token != 0
      file = value[:file]
      locator = value.locator
      if locator.is_a?(Puppet::Pops::Parser::Locator::SubLocator)
        # The error occurs when doing sub-parsing and the token must be transformed
        # Transpose the local offset, length to global "coordinates"
        global_offset, _ = locator.to_global(value.offset, value.length)
        line = locator.locator.line_for_offset(global_offset)
        pos = locator.locator.pos_on_line(global_offset)
      else
        line = value[:line]
        pos  = value[:pos]
      end
    else
      # At end of input, use what the lexer thinks is the source file
      file = lexer.file
    end
    file = nil unless file.is_a?(String) && !file.empty?
    raise Puppet::ParseErrorWithIssue.new(error, file, line, pos, nil, Issues::SYNTAX_ERROR.issue_code)
  end

  # Parses a String of pp DSL code.
  #
  def parse_string(code, path = nil)
    @lexer.lex_string(code, path)
    _parse()
  end

  # Mark the factory wrapped model object with location information
  # @return [Factory] the given factory
  # @api private
  #
  def loc(factory, start_locatable, end_locatable = nil)
    factory.record_position(@lexer.locator, start_locatable, end_locatable)
  end

  # Mark the factory wrapped heredoc model object with location information
  # @return [Factory] the given factory
  # @api private
  #
  def heredoc_loc(factory, start_locatable, end_locatable = nil)
    factory.record_heredoc_position(start_locatable, end_locatable)
  end

  def aryfy(o)
    o = [o] unless o.is_a?(Array)
    o
  end

  def namespace
    @namestack.join('::')
  end

  def namestack(name)
    @namestack << name
  end

  def namepop
    @namestack.pop
  end

  def add_definition(definition)
    @definitions << definition.model
    definition
  end

  # Transforms an array of expressions containing literal name expressions to calls if followed by an
  # expression, or expression list
  #
  def transform_calls(expressions)
    # Factory transform raises an error if a non qualified name is followed by an argument list
    # since there is no way that that can be transformed back to sanity. This occurs in situations like this:
    #
    #  $a = 10, notice hello
    #
    # where the "10, notice" forms an argument list. The parser builds an Array with the expressions and includes
    # the comma tokens to enable the error to be reported against the first comma.
    #

    Factory.transform_calls(expressions)
  rescue Factory::ArgsToNonCallError => e
    # e.args[1] is the first comma token in the list
    # e.name_expr is the function name expression
    if e.name_expr.is_a?(Factory) && e.name_expr.model_class <= Model::QualifiedName
      error(e.args[1], _("attempt to pass argument list to the function '%{name}' which cannot be called without parentheses") % { name: e.name_expr['value'] })
    else
      error(e.args[1], _("illegal comma separated argument list"))
    end
  end

  # Transforms a LEFT followed by the result of attribute_operations, this may be a call or an invalid sequence
  def transform_resource_wo_title(left, resource, lbrace_token, rbrace_token)
    Factory.transform_resource_wo_title(left, resource, lbrace_token, rbrace_token)
  end

  # Creates a program with the given body.
  #
  def create_program(body)
    locator = @lexer.locator
    Factory.PROGRAM(body, definitions, locator)
  end

  # Creates an empty program with a single No-op at the input's EOF offset with 0 length.
  #
  def create_empty_program
    locator = @lexer.locator
    no_op = Factory.literal(nil)
    # Create a synthetic NOOP token at EOF offset with 0 size. The lexer does not produce an EOF token that is
    # visible to the grammar rules. Creating this token is mainly to reuse the positioning logic as it
    # expects a token decorated with location information.
    _, token = @lexer.emit_completed([:NOOP, '', 0], locator.string.bytesize)
    loc(no_op, token)
    # Program with a Noop
    Factory.PROGRAM(no_op, [], locator)
  end

  # Performs the parsing and returns the resulting model.
  # The lexer holds state, and this is setup with {#parse_string}, or {#parse_file}.
  #
  # @api private
  #
  def _parse
    begin
      @yydebug = false
      main = yyparse(@lexer, :scan)
    end
    main
  ensure
    @lexer.clear
    @namestack = []
    @definitions = []
  end
end
end
end