diff --git a/lib/puppet/pops/issues.rb b/lib/puppet/pops/issues.rb index 91430f762..e025410fb 100644 --- a/lib/puppet/pops/issues.rb +++ b/lib/puppet/pops/issues.rb @@ -1,548 +1,640 @@ # Defines classes to deal with issues, and message formatting and defines constants with Issues. # @api public # module Puppet::Pops::Issues # Describes an issue, and can produce a message for an occurrence of the issue. # class Issue # The issue code # @return [Symbol] attr_reader :issue_code # A block producing the message # @return [Proc] attr_reader :message_block # Names that must be bound in an occurrence of the issue to be able to produce a message. # These are the names in addition to requirements stipulated by the Issue formatter contract; i.e. :label`, # and `:semantic`. # attr_reader :arg_names # If this issue can have its severity lowered to :warning, :deprecation, or :ignored attr_writer :demotable # Configures the Issue with required arguments (bound by occurrence), and a block producing a message. def initialize issue_code, *args, &block @issue_code = issue_code @message_block = block @arg_names = args @demotable = true end # Returns true if it is allowed to demote this issue def demotable? @demotable end # Formats a message for an occurrence of the issue with argument bindings passed in a hash. # The hash must contain a LabelProvider bound to the key `label` and the semantic model element # bound to the key `semantic`. All required arguments as specified by `arg_names` must be bound # in the given `hash`. # @api public # def format(hash ={}) # Create a Message Data where all hash keys become methods for convenient interpolation # in issue text. msgdata = MessageData.new(*arg_names) begin # Evaluate the message block in the msg data's binding msgdata.format(hash, &message_block) rescue StandardError => e Puppet::Pops::Issues::MessageData raise RuntimeError, "Error while reporting issue: #{issue_code}. #{e.message}", caller end end end # Provides a binding of arguments passed to Issue.format to method names available # in the issue's message producing block. # @api private # class MessageData def initialize *argnames singleton = class << self; self end argnames.each do |name| singleton.send(:define_method, name) do @data[name] end end end def format(hash, &block) @data = hash instance_eval &block end # Returns the label provider given as a key in the hash passed to #format. # If given an argument, calls #label on the label provider (caller would otherwise have to # call label.label(it) # def label(it = nil) raise "Label provider key :label must be set to produce the text of the message!" unless @data[:label] it.nil? ? @data[:label] : @data[:label].label(it) end # Returns the label provider given as a key in the hash passed to #format. # def semantic raise "Label provider key :semantic must be set to produce the text of the message!" unless @data[:semantic] @data[:semantic] end end # Defines an issue with the given `issue_code`, additional required parameters, and a block producing a message. # The block is evaluated in the context of a MessageData which provides convenient access to all required arguments # via accessor methods. In addition to accessors for specified arguments, these are also available: # * `label` - a `LabelProvider` that provides human understandable names for model elements and production of article (a/an/the). # * `semantic` - the model element for which the issue is reported # # @param issue_code [Symbol] the issue code for the issue used as an identifier, should be the same as the constant # the issue is bound to. # @param args [Symbol] required arguments that must be passed when formatting the message, may be empty # @param block [Proc] a block producing the message string, evaluated in a MessageData scope. The produced string # should not end with a period as additional information may be appended. # # @see MessageData # @api public # def self.issue (issue_code, *args, &block) Issue.new(issue_code, *args, &block) end # Creates a non demotable issue. # @see Issue.issue # def self.hard_issue(issue_code, *args, &block) result = Issue.new(issue_code, *args, &block) result.demotable = false result end # @comment Here follows definitions of issues. The intent is to provide a list from which yardoc can be generated # containing more detailed information / explanation of the issue. # These issues are set as constants, but it is unfortunately not possible for the created object to easily know which # name it is bound to. Instead the constant has to be repeated. (Alternatively, it could be done by instead calling # #const_set on the module, but the extra work required to get yardoc output vs. the extra effort to repeat the name # twice makes it not worth it (if doable at all, since there is no tag to artificially construct a constant, and # the parse tag does not produce any result for a constant assignment). # This is allowed (3.1) and has not yet been deprecated. # @todo configuration # NAME_WITH_HYPHEN = issue :NAME_WITH_HYPHEN, :name do "#{label.a_an_uc(semantic)} may not have a name containing a hyphen. The name '#{name}' is not legal" end # When a variable name contains a hyphen and these are illegal. # It is possible to control if a hyphen is legal in a name or not using the setting TODO # @todo describe the setting # @api public # @todo configuration if this is error or warning # VAR_WITH_HYPHEN = issue :VAR_WITH_HYPHEN, :name do "A variable name may not contain a hyphen. The name '#{name}' is not legal" end # A class, definition, or node may only appear at top level or inside other classes # @todo Is this really true for nodes? Can they be inside classes? Isn't that too late? # @api public # NOT_TOP_LEVEL = hard_issue :NOT_TOP_LEVEL do "Classes, definitions, and nodes may only appear at toplevel or inside other classes" end CROSS_SCOPE_ASSIGNMENT = hard_issue :CROSS_SCOPE_ASSIGNMENT, :name do "Illegal attempt to assign to '#{name}'. Cannot assign to variables in other namespaces" end # Assignment can only be made to certain types of left hand expressions such as variables. ILLEGAL_ASSIGNMENT = hard_issue :ILLEGAL_ASSIGNMENT do "Illegal attempt to assign to '#{label.a_an(semantic)}'. Not an assignable reference" end # Variables are immutable, cannot reassign in the same assignment scope ILLEGAL_REASSIGNMENT = hard_issue :ILLEGAL_REASSIGNMENT, :name do "Cannot reassign variable #{name}" end ILLEGAL_RESERVED_ASSIGNMENT = hard_issue :ILLEGAL_RESERVED_ASSIGNMENT, :name do "Attempt to assign to a reserved variable name: '#{name}'" end # Assignment cannot be made to numeric match result variables ILLEGAL_NUMERIC_ASSIGNMENT = issue :ILLEGAL_NUMERIC_ASSIGNMENT, :varname do "Illegal attempt to assign to the numeric match result variable '$#{varname}'. Numeric variables are not assignable" end # parameters cannot have numeric names, clashes with match result variables ILLEGAL_NUMERIC_PARAMETER = issue :ILLEGAL_NUMERIC_PARAMETER, :name do "The numeric parameter name '$#{name}' cannot be used (clashes with numeric match result variables)" end # In certain versions of Puppet it may be allowed to assign to a not already assigned key # in an array or a hash. This is an optional validation that may be turned on to prevent accidental # mutation. # ILLEGAL_INDEXED_ASSIGNMENT = issue :ILLEGAL_INDEXED_ASSIGNMENT do "Illegal attempt to assign via [index/key]. Not an assignable reference" end # When indexed assignment ($x[]=) is allowed, the leftmost expression must be # a variable expression. # ILLEGAL_ASSIGNMENT_VIA_INDEX = hard_issue :ILLEGAL_ASSIGNMENT_VIA_INDEX do "Illegal attempt to assign to #{label.a_an(semantic)} via [index/key]. Not an assignable reference" end APPENDS_DELETES_NO_LONGER_SUPPORTED = hard_issue :APPENDS_DELETES_NO_LONGER_SUPPORTED, :operator do "The operator '#{operator}' is no longer supported. See http://links.puppetlabs.com/remove-plus-equals" end # For unsupported operators (e.g. += and -= in puppet 4). # UNSUPPORTED_OPERATOR = hard_issue :UNSUPPORTED_OPERATOR, :operator do "The operator '#{operator}' is not supported." end # For operators that are not supported in specific contexts (e.g. '* =>' in # resource defaults) # UNSUPPORTED_OPERATOR_IN_CONTEXT = hard_issue :UNSUPPORTED_OPERATOR_IN_CONTEXT, :operator do "The operator '#{operator}' in #{label.a_an(semantic)} is not supported." end # For non applicable operators (e.g. << on Hash). # OPERATOR_NOT_APPLICABLE = hard_issue :OPERATOR_NOT_APPLICABLE, :operator, :left_value do "Operator '#{operator}' is not applicable to #{label.a_an(left_value)}." end COMPARISON_NOT_POSSIBLE = hard_issue :COMPARISON_NOT_POSSIBLE, :operator, :left_value, :right_value, :detail do "Comparison of: #{label(left_value)} #{operator} #{label(right_value)}, is not possible. Caused by '#{detail}'." end MATCH_NOT_REGEXP = hard_issue :MATCH_NOT_REGEXP, :detail do "Can not convert right match operand to a regular expression. Caused by '#{detail}'." end MATCH_NOT_STRING = hard_issue :MATCH_NOT_STRING, :left_value do "Left match operand must result in a String value. Got #{label.a_an(left_value)}." end # Some expressions/statements may not produce a value (known as right-value, or rvalue). # This may vary between puppet versions. # NOT_RVALUE = issue :NOT_RVALUE do "Invalid use of expression. #{label.a_an_uc(semantic)} does not produce a value" end # Appending to attributes is only allowed in certain types of resource expressions. # ILLEGAL_ATTRIBUTE_APPEND = hard_issue :ILLEGAL_ATTRIBUTE_APPEND, :name, :parent do "Illegal +> operation on attribute #{name}. This operator can not be used in #{label.a_an(parent)}" end ILLEGAL_NAME = hard_issue :ILLEGAL_NAME, :name do "Illegal name. The given name #{name} does not conform to the naming rule /^((::)?[a-z_]\w*)(::[a-z]\w*)*$/" end ILLEGAL_VAR_NAME = hard_issue :ILLEGAL_VAR_NAME, :name do "Illegal variable name, The given name '#{name}' does not conform to the naming rule /^((::)?[a-z]\w*)*((::)?[a-z_]\w*)$/" end ILLEGAL_NUMERIC_VAR_NAME = hard_issue :ILLEGAL_NUMERIC_VAR_NAME, :name do "Illegal numeric variable name, The given name '#{name}' must be a decimal value if it starts with a digit 0-9" end # In case a model is constructed programmatically, it must create valid type references. # ILLEGAL_CLASSREF = hard_issue :ILLEGAL_CLASSREF, :name do "Illegal type reference. The given name '#{name}' does not conform to the naming rule" end # This is a runtime issue - storeconfigs must be on in order to collect exported. This issue should be # set to :ignore when just checking syntax. # @todo should be a :warning by default # RT_NO_STORECONFIGS = issue :RT_NO_STORECONFIGS do "You cannot collect exported resources without storeconfigs being set; the collection will be ignored" end # This is a runtime issue - storeconfigs must be on in order to export a resource. This issue should be # set to :ignore when just checking syntax. # @todo should be a :warning by default # RT_NO_STORECONFIGS_EXPORT = issue :RT_NO_STORECONFIGS_EXPORT do "You cannot collect exported resources without storeconfigs being set; the export is ignored" end # A hostname may only contain letters, digits, '_', '-', and '.'. # ILLEGAL_HOSTNAME_CHARS = hard_issue :ILLEGAL_HOSTNAME_CHARS, :hostname do "The hostname '#{hostname}' contains illegal characters (only letters, digits, '_', '-', and '.' are allowed)" end # A hostname may only contain letters, digits, '_', '-', and '.'. # ILLEGAL_HOSTNAME_INTERPOLATION = hard_issue :ILLEGAL_HOSTNAME_INTERPOLATION do "An interpolated expression is not allowed in a hostname of a node" end # Issues when an expression is used where it is not legal. # E.g. an arithmetic expression where a hostname is expected. # ILLEGAL_EXPRESSION = hard_issue :ILLEGAL_EXPRESSION, :feature, :container do "Illegal expression. #{label.a_an_uc(semantic)} is unacceptable as #{feature} in #{label.a_an(container)}" end # Issues when a variable is not a NAME # ILLEGAL_VARIABLE_EXPRESSION = hard_issue :ILLEGAL_VARIABLE_EXPRESSION do "Illegal variable expression. #{label.a_an_uc(semantic)} did not produce a variable name (String or Numeric)." end # Issues when an expression is used illegaly in a query. # query only supports == and !=, and not <, > etc. # ILLEGAL_QUERY_EXPRESSION = hard_issue :ILLEGAL_QUERY_EXPRESSION do "Illegal query expression. #{label.a_an_uc(semantic)} cannot be used in a query" end # If an attempt is made to make a resource default virtual or exported. # NOT_VIRTUALIZEABLE = hard_issue :NOT_VIRTUALIZEABLE do "Resource Defaults are not virtualizable" end # When an attempt is made to use multiple keys (to produce a range in Ruby - e.g. $arr[2,-1]). # This is not supported in 3x, but it allowed in 4x. # UNSUPPORTED_RANGE = issue :UNSUPPORTED_RANGE, :count do "Attempt to use unsupported range in #{label.a_an(semantic)}, #{count} values given for max 1" end ILLEGAL_RELATIONSHIP_OPERAND_TYPE = issue :ILLEGAL_RELATIONSHIP_OPERAND_TYPE, :operand do "Illegal relationship operand, can not form a relationship with #{label.a_an(operand)}. A Catalog type is required." end NOT_CATALOG_TYPE = issue :NOT_CATALOG_TYPE, :type do "Illegal relationship operand, can not form a relationship with something of type #{type}. A Catalog type is required." end BAD_STRING_SLICE_ARITY = issue :BAD_STRING_SLICE_ARITY, :actual do "String supports [] with one or two arguments. Got #{actual}" end BAD_STRING_SLICE_TYPE = issue :BAD_STRING_SLICE_TYPE, :actual do "String-Type [] requires all arguments to be integers (or default). Got #{actual}" end BAD_ARRAY_SLICE_ARITY = issue :BAD_ARRAY_SLICE_ARITY, :actual do "Array supports [] with one or two arguments. Got #{actual}" end BAD_HASH_SLICE_ARITY = issue :BAD_HASH_SLICE_ARITY, :actual do "Hash supports [] with one or more arguments. Got #{actual}" end BAD_INTEGER_SLICE_ARITY = issue :BAD_INTEGER_SLICE_ARITY, :actual do "Integer-Type supports [] with one or two arguments (from, to). Got #{actual}" end BAD_INTEGER_SLICE_TYPE = issue :BAD_INTEGER_SLICE_TYPE, :actual do "Integer-Type [] requires all arguments to be integers (or default). Got #{actual}" end BAD_COLLECTION_SLICE_TYPE = issue :BAD_COLLECTION_SLICE_TYPE, :actual do "A Type's size constraint arguments must be a single Integer type, or 1-2 integers (or default). Got #{label.a_an(actual)}" end BAD_FLOAT_SLICE_ARITY = issue :BAD_INTEGER_SLICE_ARITY, :actual do "Float-Type supports [] with one or two arguments (from, to). Got #{actual}" end BAD_FLOAT_SLICE_TYPE = issue :BAD_INTEGER_SLICE_TYPE, :actual do "Float-Type [] requires all arguments to be floats, or integers (or default). Got #{actual}" end BAD_SLICE_KEY_TYPE = issue :BAD_SLICE_KEY_TYPE, :left_value, :expected_classes, :actual do expected_text = if expected_classes.size > 1 "one of #{expected_classes.join(', ')} are" else "#{expected_classes[0]} is" end "#{label.a_an_uc(left_value)}[] cannot use #{actual} where #{expected_text} expected" end BAD_TYPE_SLICE_TYPE = issue :BAD_TYPE_SLICE_TYPE, :base_type, :actual do "#{base_type}[] arguments must be types. Got #{actual}" end BAD_TYPE_SLICE_ARITY = issue :BAD_TYPE_SLICE_ARITY, :base_type, :min, :max, :actual do base_type_label = base_type.is_a?(String) ? base_type : label.a_an_uc(base_type) if max == -1 || max == 1.0 / 0.0 # Infinity "#{base_type_label}[] accepts #{min} or more arguments. Got #{actual}" elsif max && max != min "#{base_type_label}[] accepts #{min} to #{max} arguments. Got #{actual}" else "#{base_type_label}[] accepts #{min} #{label.plural_s(min, 'argument')}. Got #{actual}" end end BAD_TYPE_SPECIALIZATION = hard_issue :BAD_TYPE_SPECIALIZATION, :type, :message do "Error creating type specialization of #{label.a_an(type)}, #{message}" end ILLEGAL_TYPE_SPECIALIZATION = issue :ILLEGAL_TYPE_SPECIALIZATION, :kind do "Cannot specialize an already specialized #{kind} type" end ILLEGAL_RESOURCE_SPECIALIZATION = issue :ILLEGAL_RESOURCE_SPECIALIZATION, :actual do "First argument to Resource[] must be a resource type or a String. Got #{actual}." end EMPTY_RESOURCE_SPECIALIZATION = issue :EMPTY_RESOURCE_SPECIALIZATION do "Arguments to Resource[] are all empty/undefined" end ILLEGAL_HOSTCLASS_NAME = hard_issue :ILLEGAL_HOSTCLASS_NAME, :name do "Illegal Class name in class reference. #{label.a_an_uc(name)} cannot be used where a String is expected" end ILLEGAL_DEFINITION_NAME = hard_issue :ILLEGAL_DEFINTION_NAME, :name do "Unacceptable name. The name '#{name}' is unacceptable as the name of #{label.a_an(semantic)}" end CAPTURES_REST_NOT_LAST = hard_issue :CAPTURES_REST_NOT_LAST, :param_name do "Parameter $#{param_name} is not last, and has 'captures rest'" end CAPTURES_REST_NOT_SUPPORTED = hard_issue :CAPTURES_REST_NOT_SUPPORTED, :container, :param_name do "Parameter $#{param_name} has 'captures rest' - not supported in #{label.a_an(container)}" end REQUIRED_PARAMETER_AFTER_OPTIONAL = hard_issue :REQUIRED_PARAMETER_AFTER_OPTIONAL, :param_name do "Parameter $#{param_name} is required but appears after optional parameters" end MISSING_REQUIRED_PARAMETER = hard_issue :MISSING_REQUIRED_PARAMETER, :param_name do "Parameter $#{param_name} is required but no value was given" end NOT_NUMERIC = issue :NOT_NUMERIC, :value do "The value '#{value}' cannot be converted to Numeric." end UNKNOWN_FUNCTION = issue :UNKNOWN_FUNCTION, :name do "Unknown function: '#{name}'." end UNKNOWN_VARIABLE = issue :UNKNOWN_VARIABLE, :name do "Unknown variable: '#{name}'." end RUNTIME_ERROR = issue :RUNTIME_ERROR, :detail do "Error while evaluating #{label.a_an(semantic)}, #{detail}" end UNKNOWN_RESOURCE_TYPE = issue :UNKNOWN_RESOURCE_TYPE, :type_name do "Resource type not found: #{type_name.capitalize}" end ILLEGAL_RESOURCE_TYPE = hard_issue :ILLEGAL_RESOURCE_TYPE, :actual do "Illegal Resource Type expression, expected result to be a type name, or untitled Resource, got #{actual}" end DUPLICATE_TITLE = issue :DUPLICATE_TITLE, :title do "The title '#{title}' has already been used in this resource expression" end DUPLICATE_ATTRIBUTE = issue :DUPLICATE_ATTRIBUE, :attribute do "The attribute '#{attribute}' has already been set in this resource body" end MISSING_TITLE = hard_issue :MISSING_TITLE do "Missing title. The title expression resulted in undef" end MISSING_TITLE_AT = hard_issue :MISSING_TITLE_AT, :index do "Missing title at index #{index}. The title expression resulted in an undef title" end ILLEGAL_TITLE_TYPE_AT = hard_issue :ILLEGAL_TITLE_TYPE_AT, :index, :actual do "Illegal title type at index #{index}. Expected String, got #{actual}" end EMPTY_STRING_TITLE_AT = hard_issue :EMPTY_STRING_TITLE_AT, :index do "Empty string title at #{index}. Title strings must have a length greater than zero." end UNKNOWN_RESOURCE = issue :UNKNOWN_RESOURCE, :type_name, :title do "Resource not found: #{type_name.capitalize}['#{title}']" end UNKNOWN_RESOURCE_PARAMETER = issue :UNKNOWN_RESOURCE_PARAMETER, :type_name, :title, :param_name do "The resource #{type_name.capitalize}['#{title}'] does not have a parameter called '#{param_name}'" end DIV_BY_ZERO = hard_issue :DIV_BY_ZERO do "Division by 0" end RESULT_IS_INFINITY = hard_issue :RESULT_IS_INFINITY, :operator do "The result of the #{operator} expression is Infinity" end # TODO_HEREDOC EMPTY_HEREDOC_SYNTAX_SEGMENT = issue :EMPTY_HEREDOC_SYNTAX_SEGMENT, :syntax do "Heredoc syntax specification has empty segment between '+' : '#{syntax}'" end ILLEGAL_EPP_PARAMETERS = issue :ILLEGAL_EPP_PARAMETERS do "Ambiguous EPP parameter expression. Probably missing '<%-' before parameters to remove leading whitespace" end DISCONTINUED_IMPORT = hard_issue :DISCONTINUED_IMPORT do "Use of 'import' has been discontinued in favor of a manifest directory. See http://links.puppetlabs.com/puppet-import-deprecation" end IDEM_EXPRESSION_NOT_LAST = issue :IDEM_EXPRESSION_NOT_LAST do "This #{label.label(semantic)} has no effect. A value-producing expression without other effect may only be placed last in a block/sequence" end IDEM_NOT_ALLOWED_LAST = hard_issue :IDEM_NOT_ALLOWED_LAST, :container do "This #{label.label(semantic)} has no effect. #{label.a_an_uc(container)} can not end with a value-producing expression without other effect" end RESERVED_WORD = hard_issue :RESERVED_WORD, :word do "Use of reserved word: #{word}, must be quoted if intended to be a String value" end RESERVED_TYPE_NAME = hard_issue :RESERVED_TYPE_NAME, :name do "The name: '#{name}' is already defined by Puppet and can not be used as the name of #{label.a_an(semantic)}." end UNMATCHED_SELECTOR = hard_issue :UNMATCHED_SELECTOR, :param_value do "No matching entry for selector parameter with value '#{param_value}'" end ILLEGAL_NODE_INHERITANCE = issue :ILLEGAL_NODE_INHERITANCE do "Node inheritance is not supported in Puppet >= 4.0.0. See http://links.puppetlabs.com/puppet-node-inheritance-deprecation" end ILLEGAL_OVERRIDEN_TYPE = issue :ILLEGAL_OVERRIDEN_TYPE, :actual do "Resource Override can only operate on resources, got: #{label.label(actual)}" end RESERVED_PARAMETER = hard_issue :RESERVED_PARAMETER, :container, :param_name do "The parameter $#{param_name} redefines a built in parameter in #{label.the(container)}" end TYPE_MISMATCH = hard_issue :TYPE_MISMATCH, :expected, :actual do "Expected value of type #{expected}, got #{actual}" end MULTIPLE_ATTRIBUTES_UNFOLD = hard_issue :MULTIPLE_ATTRIBUTES_UNFOLD do "Unfolding of attributes from Hash can only be used once per resource body" end + + SYNTAX_ERROR = hard_issue :SYNTAX_ERROR, :where do + "Syntax error at #{where}" + end + + ILLEGAL_CLASS_REFERENCE = hard_issue :ILLEGAL_CLASS_REFERENCE do + 'Illegal class reference' + end + + ILLEGAL_FULLY_QUALIFIED_CLASS_REFERENCE = hard_issue :ILLEGAL_FULLY_QUALIFIED_CLASS_REFERENCE do + 'Illegal fully qualified class reference' + end + + ILLEGAL_FULLY_QUALIFIED_NAME = hard_issue :ILLEGAL_FULLY_QUALIFIED_NAME do + 'Illegal fully qualified name' + end + + ILLEGAL_NAME_OR_BARE_WORD = hard_issue :ILLEGAL_NAME_OR_BARE_WORD do + 'Illegal name or bare word' + end + + ILLEGAL_NUMBER = hard_issue :ILLEGAL_NUMBER do + 'Illegal number' + end + + ILLEGAL_UNICODE_ESCAPE = issue :ILLEGAL_UNICODE_ESCAPE do + "Unicode escape '\\u' was not followed by 4 hex digits" + end + + INVALID_HEX_NUMBER = hard_issue :INVALID_HEX_NUMBER, :value do + "Not a valid hex number #{value}" + end + + INVALID_OCTAL_NUMBER = hard_issue :INVALID_OCTAL_NUMBER, :value do + "Not a valid octal number #{value}" + end + + INVALID_DECIMAL_NUMBER = hard_issue :INVALID_DECIMAL_NUMBER, :value do + "Not a valid decimal number #{value}" + end + + NO_INPUT_TO_LEXER = hard_issue :NO_INPUT_TO_LEXER do + "Internal Error: No string or file given to lexer to process." + end + + UNRECOGNIZED_ESCAPE = issue :UNRECOGNIZED_ESCAPE, :ch do + "Unrecognized escape sequence '\\#{ch}'" + end + + UNCLOSED_QUOTE = hard_issue :UNCLOSED_QUOTE, :after, :followed_by do + "Unclosed quote after #{after} followed by '#{followed_by}'" + end + + EPP_INTERNAL_ERROR = hard_issue :EPP_INTERNAL_ERROR, :error do + "Internal error: #{error}" + end + + EPP_UNBALANCED_TAG = hard_issue :EPP_UNBALANCED_TAG do + 'Unbalanced epp tag, reached without closing tag.' + end + + EPP_UNBALANCED_COMMENT = hard_issue :EPP_UNBALANCED_COMMENT do + 'Reaching end after opening <%# without seeing %>' + end + + EPP_UNBALANCED_EXPRESSION = hard_issue :EPP_UNBALANCED_EXPRESSION do + 'Unbalanced embedded expression - opening <% and reaching end of input' + end + + HEREDOC_UNCLOSED_PARENTHESIS = hard_issue :HEREDOC_UNCLOSED_PARENTHESIS, :followed_by do + "Unclosed parenthesis after '@(' followed by '#{followed_by}'" + end + + HEREDOC_WITHOUT_END_TAGGED_LINE = hard_issue :HEREDOC_WITHOUT_END_TAGGED_LINE do + 'Heredoc without end-tagged line' + end + + HEREDOC_INVALID_ESCAPE = hard_issue :HEREDOC_INVALID_ESCAPE, :actual do + "Invalid heredoc escape char. Only t, r, n, s, u, L, $ allowed. Got '#{actual}'" + end + + HEREDOC_INVALID_SYNTAX = hard_issue :HEREDOC_INVALID_SYNTAX do + 'Invalid syntax in heredoc expected @(endtag[:syntax][/escapes])' + end + + HEREDOC_WITHOUT_TEXT = hard_issue :HEREDOC_WITHOUT_TEXT do + 'Heredoc without any following lines of text' + end + + HEREDOC_MULTIPLE_AT_ESCAPES = hard_issue :HEREDOC_MULTIPLE_AT_ESCAPES, :escapes do + "An escape char for @() may only appear once. Got '#{escapes.join(', ')}'" + end end diff --git a/lib/puppet/pops/parser/epp_support.rb b/lib/puppet/pops/parser/epp_support.rb index 9329ec0b8..1fa533cf9 100644 --- a/lib/puppet/pops/parser/epp_support.rb +++ b/lib/puppet/pops/parser/epp_support.rb @@ -1,247 +1,256 @@ # This module is an integral part of the Lexer. # It handles scanning of EPP (Embedded Puppet), a form of string/expression interpolation similar to ERB. # require 'strscan' module Puppet::Pops::Parser::EppSupport TOKEN_RENDER_STRING = [:RENDER_STRING, nil, 0] TOKEN_RENDER_EXPR = [:RENDER_EXPR, nil, 0] # Scans all of the content and returns it in an array # Note that the terminating [false, false] token is included in the result. # def fullscan_epp result = [] scan_epp {|token, value| result.push([token, value]) } result end # A block must be passed to scan. It will be called with two arguments, a symbol for the token, # and an instance of LexerSupport::TokenValue # PERFORMANCE NOTE: The TokenValue is designed to reduce the amount of garbage / temporary data # and to only convert the lexer's internal tokens on demand. It is slightly more costly to create an # instance of a class defined in Ruby than an Array or Hash, but the gain is much bigger since transformation # logic is avoided for many of its members (most are never used (e.g. line/pos information which is only of # value in general for error messages, and for some expressions (which the lexer does not know about). # def scan_epp # PERFORMANCE note: it is faster to access local variables than instance variables. # This makes a small but notable difference since instance member access is avoided for # every token in the lexed content. # scn = @scanner ctx = @lexing_context queue = @token_queue - lex_error "Internal Error: No string or file given to lexer to process." unless scn + lex_error(Puppet::Pops::Issues::EPP_INTERNAL_ERROR, :error => 'No string or file given to lexer to process.') unless scn ctx[:epp_mode] = :text enqueue_completed([:EPP_START, nil, 0], 0) interpolate_epp # This is the lexer's main loop until queue.empty? && scn.eos? do if token = queue.shift || lex_token yield [ ctx[:after] = token[0], token[1] ] end end if ctx[:epp_open_position] - lex_error("Unbalanced epp tag, reached without closing tag.", ctx[:epp_position]) + lex_error(Puppet::Pops::Issues::EPP_UNBALANCED_TAG, {}, ctx[:epp_position]) end # Signals end of input yield [false, false] end def interpolate_epp(skip_leading=false) scn = @scanner ctx = @lexing_context eppscanner = EppScanner.new(scn) before = scn.pos s = eppscanner.scan(skip_leading) case eppscanner.mode when :text # Should be at end of scan, or something is terribly wrong - lex_error("Internal error: template scanner returns text mode and is not and end of input") unless @scanner.eos? + unless @scanner.eos? + lex_error(Puppet::Pops::Issues::EPP_INTERNAL_ERROR, :error => 'template scanner returns text mode and is not and end of input') + end if s # s may be nil if scanned text ends with an epp tag (i.e. no trailing text). enqueue_completed([:RENDER_STRING, s, scn.pos - before], before) end ctx[:epp_open_position] = nil # do nothing else, scanner is at the end when :error - lex_error(eppscanner.message()) + lex_error(eppscanner.issue) when :epp # It is meaningless to render empty string segments, and it is harmful to do this at # the start of the scan as it prevents specification of parameters with <%- ($x, $y) -%> # if s && s.length > 0 enqueue_completed([:RENDER_STRING, s, scn.pos - before], before) end # switch epp_mode to general (embedded) pp logic (non rendered result) ctx[:epp_mode] = :epp ctx[:epp_open_position] = scn.pos when :expr # It is meaningless to render an empty string segment if s && s.length > 0 enqueue_completed([:RENDER_STRING, s, scn.pos - before], before) end enqueue_completed(TOKEN_RENDER_EXPR, before) # switch mode to "epp expr interpolation" ctx[:epp_mode] = :expr ctx[:epp_open_position] = scn.pos else - lex_error("Internal Error, Unknown mode #{eppscanner.mode} returned by template scanner") + lex_error(Puppet::Pops::Issues::EPP_INTERNAL_ERROR, :error => "Unknown mode #{eppscanner.mode} returned by template scanner") end nil end # A scanner specialized in processing text with embedded EPP (Embedded Puppet) tags. # The scanner is initialized with a StringScanner which it mutates as scanning takes place. # The intent is to use one instance of EppScanner per wanted scan, and this instance represents # the state after the scan. # # @example Sample usage # a = "some text <% pp code %> some more text" # scan = StringScanner.new(a) # eppscan = EppScanner.new(scan) # str = eppscan.scan # eppscan.mode # => :epp # eppscan.lines # => 0 # eppscan # # The scanner supports # * scanning text until <%, <%-, <%= # * while scanning text: # * tokens <%% and %%> are translated to <% and %> respetively and is returned as text. # * tokens <%# and %> (or ending with -%>) and the enclosed text is a comment and is not included in the returned text # * text following a comment that ends with -%> gets trailing whitespace (up to and including a line break) trimmed # and this whitespace is not included in the returned text. # * The continuation {#mode} is set to one of: # * `:epp` - for a <% token # * `:expr` - for a <%= token # * `:text` - when there was no continuation mode (e.g. when input ends with text) # * ':error` - if the tokens are unbalanced (reaching the end without a closing matching token). An error message # is then also available via the method {#message}. # # Note that the intent is to use this specialized scanner to scan the text parts, when continuation mode is `:epp` or `:expr` # the pp lexer should advance scanning (using the string scanner) until it reaches and consumes a `-%>` or '%>ยด token. If it # finds a `-%> token it should pass this on as a `skip_leading` parameter when it performs the next {#scan}. # class EppScanner # The original scanner used by the lexer/container using EppScanner attr_reader :scanner # The resulting mode after the scan. # The mode is one of `:text` (the initial mode), `:epp` embedded code (no output), `:expr` (embedded # expression), or `:error` # attr_reader :mode - # An error message if `mode == :error`, `nil` otherwise. - attr_reader :message + # An error issue if `mode == :error`, `nil` otherwise. + attr_reader :issue # If the first scan should skip leading whitespace (typically detected by the pp lexer when the # pp mode end-token is found (i.e. `-%>`) and then passed on to the scanner. # attr_reader :skip_leading # Creates an EppScanner based on a StringScanner that represents the state where EppScanner should start scanning. # The given scanner will be mutated (i.e. position moved) to reflect the EppScanner's end state after a scan. # def initialize(scanner) @scanner = scanner end + # Here for backwards compatibility. + # @deprecated Use issue instead + # @return [String] the issue message + def message + @issue.nil? ? nil : @issue.format + end + # Scans from the current position in the configured scanner, advances this scanner's position until the end # of the input, or to the first position after a mode switching token (`<%`, `<%-` or `<%=`). Number of processed # lines and continuation mode can be obtained via {#lines}, and {#mode}. # # @return [String, nil] the scanned and processed text, or nil if at the end of the input. # def scan(skip_leading=false) @mode = :text @skip_leading = skip_leading return nil if scanner.eos? s = "" until scanner.eos? part = @scanner.scan_until(/(<%)|\z/) if @skip_leading part.gsub!(/^[ \t]*\r?\n?/,'') @skip_leading = false end # The spec for %%> is to transform it into a literal %>. This is done here, as %%> otherwise would go # undetected in text mode. (i.e. it is not really necessary to escape %> with %%> in text mode unless # adding checks stating that a literal %> is illegal in text (unbalanced). # part.gsub!(/%%>/, '%>') s += part case @scanner.peek(1) when "" # at the end # if s ends with <% then this is an error (unbalanced <% %>) if s.end_with? "<%" @mode = :error - @message = "Unbalanced embedded expression - opening <% and reaching end of input" + @issue = Puppet::Pops::Issues::EPP_UNBALANCED_EXPRESSION else mode = :epp end return s when "-" # trim trailing whitespace on same line from accumulated s # return text and signal switch to pp mode @scanner.getch # drop the - s.gsub!(/\r?\n?[ \t]*<%\z/, '') @mode = :epp return s when "%" # verbatim text # keep the scanned <%, and continue scanning after skipping one % # (i.e. do nothing here) @scanner.getch # drop the % to get a literal <% in the output when "=" # expression # return text and signal switch to expression mode # drop the scanned <%, and skip past -%>, or %>, but also skip %%> @scanner.getch # drop the = s.slice!(-2..-1) @mode = :expr return s when "#" # template comment # drop the scanned <%, and skip past -%>, or %>, but also skip %%> s.slice!(-2..-1) # unless there is an immediate termination i.e. <%#%> scan for the next %> that is not # preceded by a % (i.e. skip %%>) part = scanner.scan_until(/[^%]%>/) unless part - @message = "Reaching end after opening <%# without seeing %>" + @issue = Puppet::Pops::Issues::EPP_UNBALANCED_COMMENT @mode = :error return s end @skip_leading = true if part.end_with?("-%>") # Continue scanning for more text else # Switch to pp after having removed the <% s.slice!(-2..-1) @mode = :epp return s end end end end end diff --git a/lib/puppet/pops/parser/evaluating_parser.rb b/lib/puppet/pops/parser/evaluating_parser.rb index 4e84b153e..3cb1b6b6c 100644 --- a/lib/puppet/pops/parser/evaluating_parser.rb +++ b/lib/puppet/pops/parser/evaluating_parser.rb @@ -1,148 +1,150 @@ # Does not support "import" and parsing ruby files # class Puppet::Pops::Parser::EvaluatingParser attr_reader :parser def initialize() @parser = Puppet::Pops::Parser::Parser.new() end def parse_string(s, file_source = 'unknown') @file_source = file_source clear() # Handling of syntax error can be much improved (in general), now it bails out of the parser # and does not have as rich information (when parsing a string), need to update it with the file source # (ideally, a syntax error should be entered as an issue, and not just thrown - but that is a general problem # and an improvement that can be made in the eparser (rather than here). # Also a possible improvement (if the YAML parser returns positions) is to provide correct output of position. # begin assert_and_report(parser.parse_string(s)) + rescue Puppet::ParseErrorWithIssue => e + raise e rescue Puppet::ParseError => e # TODO: This is not quite right, why does not the exception have the correct file? e.file = @file_source unless e.file.is_a?(String) && !e.file.empty? raise e end end def parse_file(file) @file_source = file clear() assert_and_report(parser.parse_file(file)) end def evaluate_string(scope, s, file_source='unknown') evaluate(scope, parse_string(s, file_source)) end def evaluate_file(file) evaluate(parse_file(file)) end def clear() @acceptor = nil end # Create a closure that can be called in the given scope def closure(model, scope) Puppet::Pops::Evaluator::Closure.new(evaluator, model, scope) end def evaluate(scope, model) return nil unless model evaluator.evaluate(model, scope) end def evaluator # Do not use the cached evaluator if this is a migration run if (Puppet.lookup(:migration_checker) { nil }) return Puppet::Pops::Evaluator::EvaluatorImpl.new() end @@evaluator ||= Puppet::Pops::Evaluator::EvaluatorImpl.new() @@evaluator end def convert_to_3x(object, scope) val = evaluator.convert(object, scope, nil) end def validate(parse_result) resulting_acceptor = acceptor() validator(resulting_acceptor).validate(parse_result) resulting_acceptor end def acceptor() Puppet::Pops::Validation::Acceptor.new end def validator(acceptor) Puppet::Pops::Validation::ValidatorFactory_4_0.new().validator(acceptor) end def assert_and_report(parse_result) return nil unless parse_result if parse_result.source_ref.nil? or parse_result.source_ref == '' parse_result.source_ref = @file_source end validation_result = validate(parse_result) Puppet::Pops::IssueReporter.assert_and_report(validation_result, :emit_warnings => true) parse_result end def quote(x) self.class.quote(x) end # Translates an already parsed string that contains control characters, quotes # and backslashes into a quoted string where all such constructs have been escaped. # Parsing the return value of this method using the puppet parser should yield # exactly the same string as the argument passed to this method # # The method makes an exception for the two character sequences \$ and \s. They # will not be escaped since they have a special meaning in puppet syntax. # # TODO: Handle \uXXXX characters ?? # # @param x [String] The string to quote and "unparse" # @return [String] The quoted string # def self.quote(x) escaped = '"' p = nil x.each_char do |c| case p when nil # do nothing when "\t" escaped << '\\t' when "\n" escaped << '\\n' when "\f" escaped << '\\f' # TODO: \cx is a range of characters - skip for now # when "\c" # escaped << '\\c' when '"' escaped << '\\"' when '\\' escaped << if c == '$' || c == 's'; p; else '\\\\'; end # don't escape \ when followed by s or $ else escaped << p end p = c end escaped << p unless p.nil? escaped << '"' end class EvaluatingEppParser < Puppet::Pops::Parser::EvaluatingParser def initialize() @parser = Puppet::Pops::Parser::EppParser.new() end end end diff --git a/lib/puppet/pops/parser/heredoc_support.rb b/lib/puppet/pops/parser/heredoc_support.rb index 79d269aa7..74a307d36 100644 --- a/lib/puppet/pops/parser/heredoc_support.rb +++ b/lib/puppet/pops/parser/heredoc_support.rb @@ -1,139 +1,140 @@ module Puppet::Pops::Parser::HeredocSupport + include Puppet::Pops::Parser::LexerSupport # Pattern for heredoc `@(endtag[:syntax][/escapes]) # Produces groups for endtag (group 1), syntax (group 2), and escapes (group 3) # PATTERN_HEREDOC = %r{@\(([^:/\r\n\)]+)(?::[:blank:]*([a-z][a-zA-Z0-9_+]+)[:blank:]*)?(?:/((?:\w|[$])*)[:blank:]*)?\)} def heredoc scn = @scanner ctx = @lexing_context locator = @locator before = scn.pos # scanner is at position before @( # find end of the heredoc spec - str = scn.scan_until(/\)/) || lexer.lex_error("Unclosed parenthesis after '@(' followed by '#{followed_by}'") + str = scn.scan_until(/\)/) || lex_error(Puppet::Pops::Issues::HEREDOC_UNCLOSED_PARENTHESIS, :followed_by => followed_by) pos_after_heredoc = scn.pos # Note: allows '+' as separator in syntax, but this needs validation as empty segments are not allowed - unless md = str.match(PATTERN_HEREDOC) - lex_error("Invalid syntax in heredoc expected @(endtag[:syntax][/escapes])") - end + md = str.match(PATTERN_HEREDOC) + lex_error(Puppet::Pops::Issues::HEREDOC_INVALID_SYNTAX) unless md endtag = md[1] syntax = md[2] || '' escapes = md[3] endtag.strip! # Is this a dq string style heredoc? (endtag enclosed in "") if endtag =~ /^"(.*)"$/ dqstring_style = true endtag = $1.strip end - lexer.lex_error("Missing endtag in heredoc") unless endtag.length >= 1 + lex_error(Puppet::Pops::Issues::HEREDOC_MISSING_ENDTAG) unless endtag.length >= 1 resulting_escapes = [] if escapes escapes = "trnsuL$" if escapes.length < 1 escapes = escapes.split('') unless escapes.length == escapes.uniq.length - lex_error("An escape char for @() may only appear once. Got '#{escapes.join(', ')}") + lex_error(Puppet::Pops::Issues::HEREDOC_MULTIPLE_AT_ESCAPES, :escapes => escapes) end resulting_escapes = ["\\"] escapes.each do |e| case e when "t", "r", "n", "s", "u", "$" resulting_escapes << e when "L" resulting_escapes += ["\n", "\r\n"] else - lex_error("Invalid heredoc escape char. Only t, r, n, s, u, L, $ allowed. Got '#{e}'") + lex_error(Puppet::Pops::Issues::HEREDOC_INVALID_ESCAPE, :actual => e) end end end # Produce a heredoc token to make the syntax available to the grammar enqueue_completed([:HEREDOC, syntax, pos_after_heredoc - before], before) # If this is the second or subsequent heredoc on the line, the lexing context's :newline_jump contains # the position after the \n where the next heredoc text should scan. If not set, this is the first # and it should start scanning after the first found \n (or if not found == error). if ctx[:newline_jump] scn.pos = ctx[:newline_jump] else - scn.scan_until(/\n/) || lex_error("Heredoc without any following lines of text") + scn.scan_until(/\n/) || lex_error(Puppet::Pops::Issues::HEREDOC_WITHOUT_TEXT) end # offset 0 for the heredoc, and its line number heredoc_offset = scn.pos heredoc_line = locator.line_for_offset(heredoc_offset)-1 # Compute message to emit if there is no end (to make it refer to the opening heredoc position). - eof_message = positioned_message("Heredoc without end-tagged line") + eof_error = create_lex_error(Puppet::Pops::Issues::HEREDOC_WITHOUT_END_TAGGED_LINE) # Text from this position (+ lexing contexts offset for any preceding heredoc) is heredoc until a line # that terminates the heredoc is found. # (Endline in EBNF form): WS* ('|' WS*)? ('-' WS*)? endtag WS* \r? (\n|$) endline_pattern = /([[:blank:]]*)(?:([|])[[:blank:]]*)?(?:(\-)[[:blank:]]*)?#{Regexp.escape(endtag)}[[:blank:]]*\r?(?:\n|\z)/ lines = [] while !scn.eos? do - one_line = scn.scan_until(/(?:\n|\z)/) || lexer.lex_error_without_pos(eof_message) + one_line = scn.scan_until(/(?:\n|\z)/) + raise eof_error unless one_line if md = one_line.match(endline_pattern) leading = md[1] has_margin = md[2] == '|' remove_break = md[3] == '-' # Record position where next heredoc (from same line as current @()) should start scanning for content ctx[:newline_jump] = scn.pos # Process captured lines - remove leading, and trailing newline str = heredoc_text(lines, leading, has_margin, remove_break) # Use a new lexer instance configured with a sub-locator to enable correct positioning sublexer = self.class.new() locator = Puppet::Pops::Parser::Locator::SubLocator.sub_locator(str, locator.file, heredoc_line, heredoc_offset, leading.length()) # Emit a token that provides the grammar with location information about the lines on which the heredoc # content is based. enqueue([:SUBLOCATE, Puppet::Pops::Parser::LexerSupport::TokenValue.new([:SUBLOCATE, lines, lines.reduce(0) {|size, s| size + s.length} ], heredoc_offset, locator)]) sublexer.lex_unquoted_string(str, locator, resulting_escapes, dqstring_style) sublexer.interpolate_uq_to(self) # Continue scan after @(...) scn.pos = pos_after_heredoc return else lines << one_line end end - lex_error_without_pos(eof_message) + raise eof_error end # Produces the heredoc text string given the individual (unprocessed) lines as an array. # @param lines [Array] unprocessed lines of text in the heredoc w/o terminating line # @param leading [String] the leading text up (up to pipe or other terminating char) # @param has_margin [Boolean] if the left margin should be adjusted as indicated by `leading` # @param remove_break [Boolean] if the line break (\r?\n) at the end of the last line should be removed or not # def heredoc_text(lines, leading, has_margin, remove_break) if has_margin leading_pattern = /^#{Regexp.escape(leading)}/ lines = lines.collect {|s| s.gsub(leading_pattern, '') } end result = lines.join('') result.gsub!(/\r?\n$/, '') if remove_break result end end diff --git a/lib/puppet/pops/parser/lexer2.rb b/lib/puppet/pops/parser/lexer2.rb index 7ac476d36..2193ce8fb 100644 --- a/lib/puppet/pops/parser/lexer2.rb +++ b/lib/puppet/pops/parser/lexer2.rb @@ -1,694 +1,694 @@ # The Lexer is responsbile for turning source text into tokens. # This version is a performance enhanced lexer (in comparison to the 3.x and earlier "future parser" lexer. # # Old returns tokens [:KEY, value, { locator = } # Could return [[token], locator] # or Token.new([token], locator) with the same API x[0] = token_symbol, x[1] = self, x[:key] = (:value, :file, :line, :pos) etc require 'strscan' require 'puppet/pops/parser/lexer_support' require 'puppet/pops/parser/heredoc_support' require 'puppet/pops/parser/interpolation_support' require 'puppet/pops/parser/epp_support' require 'puppet/pops/parser/slurp_support' class Puppet::Pops::Parser::Lexer2 include Puppet::Pops::Parser::LexerSupport include Puppet::Pops::Parser::HeredocSupport include Puppet::Pops::Parser::InterpolationSupport include Puppet::Pops::Parser::SlurpSupport include Puppet::Pops::Parser::EppSupport # ALl tokens have three slots, the token name (a Symbol), the token text (String), and a token text length. # All operator and punctuation tokens reuse singleton arrays Tokens that require unique values create # a unique array per token. # # PEFORMANCE NOTES: # This construct reduces the amount of object that needs to be created for operators and punctuation. # The length is pre-calculated for all singleton tokens. The length is used both to signal the length of # the token, and to advance the scanner position (without having to advance it with a scan(regexp)). # TOKEN_LBRACK = [:LBRACK, '['.freeze, 1].freeze TOKEN_LISTSTART = [:LISTSTART, '['.freeze, 1].freeze TOKEN_RBRACK = [:RBRACK, ']'.freeze, 1].freeze TOKEN_LBRACE = [:LBRACE, '{'.freeze, 1].freeze TOKEN_RBRACE = [:RBRACE, '}'.freeze, 1].freeze TOKEN_SELBRACE = [:SELBRACE, '{'.freeze, 1].freeze TOKEN_LPAREN = [:LPAREN, '('.freeze, 1].freeze TOKEN_RPAREN = [:RPAREN, ')'.freeze, 1].freeze TOKEN_EQUALS = [:EQUALS, '='.freeze, 1].freeze TOKEN_APPENDS = [:APPENDS, '+='.freeze, 2].freeze TOKEN_DELETES = [:DELETES, '-='.freeze, 2].freeze TOKEN_ISEQUAL = [:ISEQUAL, '=='.freeze, 2].freeze TOKEN_NOTEQUAL = [:NOTEQUAL, '!='.freeze, 2].freeze TOKEN_MATCH = [:MATCH, '=~'.freeze, 2].freeze TOKEN_NOMATCH = [:NOMATCH, '!~'.freeze, 2].freeze TOKEN_GREATEREQUAL = [:GREATEREQUAL, '>='.freeze, 2].freeze TOKEN_GREATERTHAN = [:GREATERTHAN, '>'.freeze, 1].freeze TOKEN_LESSEQUAL = [:LESSEQUAL, '<='.freeze, 2].freeze TOKEN_LESSTHAN = [:LESSTHAN, '<'.freeze, 1].freeze TOKEN_FARROW = [:FARROW, '=>'.freeze, 2].freeze TOKEN_PARROW = [:PARROW, '+>'.freeze, 2].freeze TOKEN_LSHIFT = [:LSHIFT, '<<'.freeze, 2].freeze TOKEN_LLCOLLECT = [:LLCOLLECT, '<<|'.freeze, 3].freeze TOKEN_LCOLLECT = [:LCOLLECT, '<|'.freeze, 2].freeze TOKEN_RSHIFT = [:RSHIFT, '>>'.freeze, 2].freeze TOKEN_RRCOLLECT = [:RRCOLLECT, '|>>'.freeze, 3].freeze TOKEN_RCOLLECT = [:RCOLLECT, '|>'.freeze, 2].freeze TOKEN_PLUS = [:PLUS, '+'.freeze, 1].freeze TOKEN_MINUS = [:MINUS, '-'.freeze, 1].freeze TOKEN_DIV = [:DIV, '/'.freeze, 1].freeze TOKEN_TIMES = [:TIMES, '*'.freeze, 1].freeze TOKEN_MODULO = [:MODULO, '%'.freeze, 1].freeze TOKEN_NOT = [:NOT, '!'.freeze, 1].freeze TOKEN_DOT = [:DOT, '.'.freeze, 1].freeze TOKEN_PIPE = [:PIPE, '|'.freeze, 1].freeze TOKEN_AT = [:AT , '@'.freeze, 1].freeze TOKEN_ATAT = [:ATAT , '@@'.freeze, 2].freeze TOKEN_COLON = [:COLON, ':'.freeze, 1].freeze TOKEN_COMMA = [:COMMA, ','.freeze, 1].freeze TOKEN_SEMIC = [:SEMIC, ';'.freeze, 1].freeze TOKEN_QMARK = [:QMARK, '?'.freeze, 1].freeze TOKEN_TILDE = [:TILDE, '~'.freeze, 1].freeze # lexed but not an operator in Puppet TOKEN_REGEXP = [:REGEXP, nil, 0].freeze TOKEN_IN_EDGE = [:IN_EDGE, '->'.freeze, 2].freeze TOKEN_IN_EDGE_SUB = [:IN_EDGE_SUB, '~>'.freeze, 2].freeze TOKEN_OUT_EDGE = [:OUT_EDGE, '<-'.freeze, 2].freeze TOKEN_OUT_EDGE_SUB = [:OUT_EDGE_SUB, '<~'.freeze, 2].freeze # Tokens that are always unique to what has been lexed TOKEN_STRING = [:STRING, nil, 0].freeze TOKEN_WORD = [:WORD, nil, 0].freeze TOKEN_DQPRE = [:DQPRE, nil, 0].freeze TOKEN_DQMID = [:DQPRE, nil, 0].freeze TOKEN_DQPOS = [:DQPRE, nil, 0].freeze TOKEN_NUMBER = [:NUMBER, nil, 0].freeze TOKEN_VARIABLE = [:VARIABLE, nil, 1].freeze TOKEN_VARIABLE_EMPTY = [:VARIABLE, ''.freeze, 1].freeze # HEREDOC has syntax as an argument. TOKEN_HEREDOC = [:HEREDOC, nil, 0].freeze # EPP_START is currently a marker token, may later get syntax TOKEN_EPPSTART = [:EPP_START, nil, 0].freeze TOKEN_EPPEND = [:EPP_END, '%>', 2].freeze TOKEN_EPPEND_TRIM = [:EPP_END_TRIM, '-%>', 3].freeze # This is used for unrecognized tokens, will always be a single character. This particular instance # is not used, but is kept here for documentation purposes. TOKEN_OTHER = [:OTHER, nil, 0] # Keywords are all singleton tokens with pre calculated lengths. # Booleans are pre-calculated (rather than evaluating the strings "false" "true" repeatedly. # KEYWORDS = { "case" => [:CASE, 'case', 4], "class" => [:CLASS, 'class', 5], "default" => [:DEFAULT, 'default', 7], "define" => [:DEFINE, 'define', 6], "if" => [:IF, 'if', 2], "elsif" => [:ELSIF, 'elsif', 5], "else" => [:ELSE, 'else', 4], "inherits" => [:INHERITS, 'inherits', 8], "node" => [:NODE, 'node', 4], "and" => [:AND, 'and', 3], "or" => [:OR, 'or', 2], "undef" => [:UNDEF, 'undef', 5], "false" => [:BOOLEAN, false, 5], "true" => [:BOOLEAN, true, 4], "in" => [:IN, 'in', 2], "unless" => [:UNLESS, 'unless', 6], "function" => [:FUNCTION, 'function', 8], "type" => [:TYPE, 'type', 4], "attr" => [:ATTR, 'attr', 4], "private" => [:PRIVATE, 'private', 7], } KEYWORDS.each {|k,v| v[1].freeze; v.freeze } KEYWORDS.freeze # Reverse lookup of keyword name to string KEYWORD_NAMES = {} KEYWORDS.each {|k, v| KEYWORD_NAMES[v[0]] = k } KEYWORD_NAMES.freeze PATTERN_WS = %r{[[:blank:]\r]+} # The single line comment includes the line ending. PATTERN_COMMENT = %r{#.*\r?} PATTERN_MLCOMMENT = %r{/\*(.*?)\*/}m PATTERN_REGEX = %r{/[^/\n]*/} PATTERN_REGEX_END = %r{/} PATTERN_REGEX_A = %r{\A/} # for replacement to "" PATTERN_REGEX_Z = %r{/\Z} # for replacement to "" PATTERN_REGEX_ESC = %r{\\/} # for replacement to "/" # The 3x patterns: # PATTERN_CLASSREF = %r{((::){0,1}[A-Z][-\w]*)+} # PATTERN_NAME = %r{((::)?[a-z0-9][-\w]*)(::[a-z0-9][-\w]*)*} # The NAME and CLASSREF in 4x are strict. Each segment must start with # a letter a-z and may not contain dashes (\w includes letters, digits and _). # PATTERN_CLASSREF = %r{((::){0,1}[A-Z][\w]*)+} PATTERN_NAME = %r{^((::)?[a-z][\w]*)(::[a-z][\w]*)*$} PATTERN_BARE_WORD = %r{((?:::){0,1}(?:[a-z_](?:[\w-]*[\w])?))+} PATTERN_DOLLAR_VAR = %r{\$(::)?(\w+::)*\w+} PATTERN_NUMBER = %r{\b(?:0[xX][0-9A-Fa-f]+|0?\d+(?:\.\d+)?(?:[eE]-?\d+)?)\b} # PERFORMANCE NOTE: # Comparison against a frozen string is faster (than unfrozen). # STRING_BSLASH_BSLASH = '\\'.freeze attr_reader :locator def initialize() end # Clears the lexer state (it is not required to call this as it will be garbage collected # and the next lex call (lex_string, lex_file) will reset the internal state. # def clear() # not really needed, but if someone wants to ensure garbage is collected as early as possible @scanner = nil @locator = nil @lexing_context = nil end # Convenience method, and for compatibility with older lexer. Use the lex_string instead which allows # passing the path to use without first having to call file= (which reads the file if it exists). # (Bad form to use overloading of assignment operator for something that is not really an assignment. Also, # overloading of = does not allow passing more than one argument). # def string=(string) lex_string(string, '') end def lex_string(string, path='') initvars @scanner = StringScanner.new(string) @locator = Puppet::Pops::Parser::Locator.locator(string, path) end # Lexes an unquoted string. # @param string [String] the string to lex # @param locator [Puppet::Pops::Parser::Locator] the locator to use (a default is used if nil is given) # @param escapes [Array] array of character strings representing the escape sequences to transform # @param interpolate [Boolean] whether interpolation of expressions should be made or not. # def lex_unquoted_string(string, locator, escapes, interpolate) initvars @scanner = StringScanner.new(string) @locator = locator || Puppet::Pops::Parser::Locator.locator(string, '') @lexing_context[:escapes] = escapes || UQ_ESCAPES @lexing_context[:uq_slurp_pattern] = (interpolate || !escapes.empty?) ? SLURP_UQ_PATTERN : SLURP_ALL_PATTERN end # Convenience method, and for compatibility with older lexer. Use the lex_file instead. # (Bad form to use overloading of assignment operator for something that is not really an assignment). # def file=(file) lex_file(file) end # TODO: This method should not be used, callers should get the locator since it is most likely required to # compute line, position etc given offsets. # def file @locator ? @locator.file : nil end # Initializes lexing of the content of the given file. An empty string is used if the file does not exist. # def lex_file(file) initvars contents = Puppet::FileSystem.exist?(file) ? Puppet::FileSystem.read(file) : "" @scanner = StringScanner.new(contents.freeze) @locator = Puppet::Pops::Parser::Locator.locator(contents, file) end def initvars @token_queue = [] # NOTE: additional keys are used; :escapes, :uq_slurp_pattern, :newline_jump, :epp_* @lexing_context = { :brace_count => 0, :after => nil, } end # Scans all of the content and returns it in an array # Note that the terminating [false, false] token is included in the result. # def fullscan result = [] scan {|token, value| result.push([token, value]) } result end # A block must be passed to scan. It will be called with two arguments, a symbol for the token, # and an instance of LexerSupport::TokenValue # PERFORMANCE NOTE: The TokenValue is designed to reduce the amount of garbage / temporary data # and to only convert the lexer's internal tokens on demand. It is slightly more costly to create an # instance of a class defined in Ruby than an Array or Hash, but the gain is much bigger since transformation # logic is avoided for many of its members (most are never used (e.g. line/pos information which is only of # value in general for error messages, and for some expressions (which the lexer does not know about). # def scan # PERFORMANCE note: it is faster to access local variables than instance variables. # This makes a small but notable difference since instance member access is avoided for # every token in the lexed content. # scn = @scanner ctx = @lexing_context queue = @token_queue - lex_error_without_pos("Internal Error: No string or file given to lexer to process.") unless scn + lex_error_without_pos(Puppet::Pops::Issues::NO_INPUT_TO_LEXER) unless scn scn.skip(PATTERN_WS) # This is the lexer's main loop until queue.empty? && scn.eos? do if token = queue.shift || lex_token ctx[:after] = token[0] yield token end end # Signals end of input yield [false, false] end # This lexes one token at the current position of the scanner. # PERFORMANCE NOTE: Any change to this logic should be performance measured. # def lex_token # Using three char look ahead (may be faster to do 2 char look ahead since only 2 tokens require a third scn = @scanner ctx = @lexing_context before = @scanner.pos # A look ahead of 3 characters is used since the longest operator ambiguity is resolved at that point. # PERFORMANCE NOTE: It is faster to peek once and use three separate variables for lookahead 0, 1 and 2. # la = scn.peek(3) return nil if la.empty? # Ruby 1.8.7 requires using offset and length (or integers are returned. # PERFORMANCE NOTE. # It is slightly faster to use these local variables than accessing la[0], la[1] etc. in ruby 1.9.3 # But not big enough to warrant two completely different implementations. # la0 = la[0,1] la1 = la[1,1] la2 = la[2,1] # PERFORMANCE NOTE: # A case when, where all the cases are literal values is the fastest way to map from data to code. # It is much faster than using a hash with lambdas, hash with symbol used to then invoke send etc. # This case statement is evaluated for most character positions in puppet source, and great care must # be taken to not introduce performance regressions. # case la0 when '.' emit(TOKEN_DOT, before) when ',' emit(TOKEN_COMMA, before) when '[' if (before == 0 || scn.string[locator.char_offset(before)-1,1] =~ /[[:blank:]\r\n]+/) emit(TOKEN_LISTSTART, before) else emit(TOKEN_LBRACK, before) end when ']' emit(TOKEN_RBRACK, before) when '(' emit(TOKEN_LPAREN, before) when ')' emit(TOKEN_RPAREN, before) when ';' emit(TOKEN_SEMIC, before) when '?' emit(TOKEN_QMARK, before) when '*' emit(TOKEN_TIMES, before) when '%' if la1 == '>' && ctx[:epp_mode] scn.pos += 2 if ctx[:epp_mode] == :expr enqueue_completed(TOKEN_EPPEND, before) end ctx[:epp_mode] = :text interpolate_epp else emit(TOKEN_MODULO, before) end when '{' # The lexer needs to help the parser since the technology used cannot deal with # lookahead of same token with different precedence. This is solved by making left brace # after ? into a separate token. # ctx[:brace_count] += 1 emit(if ctx[:after] == :QMARK TOKEN_SELBRACE else TOKEN_LBRACE end, before) when '}' ctx[:brace_count] -= 1 emit(TOKEN_RBRACE, before) # TOKENS @, @@, @( when '@' case la1 when '@' emit(TOKEN_ATAT, before) # TODO; Check if this is good for the grammar when '(' heredoc else emit(TOKEN_AT, before) end # TOKENS |, |>, |>> when '|' emit(case la1 when '>' la2 == '>' ? TOKEN_RRCOLLECT : TOKEN_RCOLLECT else TOKEN_PIPE end, before) # TOKENS =, =>, ==, =~ when '=' emit(case la1 when '=' TOKEN_ISEQUAL when '>' TOKEN_FARROW when '~' TOKEN_MATCH else TOKEN_EQUALS end, before) # TOKENS '+', '+=', and '+>' when '+' emit(case la1 when '=' TOKEN_APPENDS when '>' TOKEN_PARROW else TOKEN_PLUS end, before) # TOKENS '-', '->', and epp '-%>' (end of interpolation with trim) when '-' if ctx[:epp_mode] && la1 == '%' && la2 == '>' scn.pos += 3 if ctx[:epp_mode] == :expr enqueue_completed(TOKEN_EPPEND_TRIM, before) end interpolate_epp(:with_trim) else emit(case la1 when '>' TOKEN_IN_EDGE when '=' TOKEN_DELETES else TOKEN_MINUS end, before) end # TOKENS !, !=, !~ when '!' emit(case la1 when '=' TOKEN_NOTEQUAL when '~' TOKEN_NOMATCH else TOKEN_NOT end, before) # TOKENS ~>, ~ when '~' emit(la1 == '>' ? TOKEN_IN_EDGE_SUB : TOKEN_TILDE, before) when '#' scn.skip(PATTERN_COMMENT) nil # TOKENS '/', '/*' and '/ regexp /' when '/' case la1 when '*' scn.skip(PATTERN_MLCOMMENT) nil else # regexp position is a regexp, else a div if regexp_acceptable? && value = scn.scan(PATTERN_REGEX) # Ensure an escaped / was not matched while value[-2..-2] == STRING_BSLASH_BSLASH # i.e. \\ value += scn.scan_until(PATTERN_REGEX_END) end regex = value.sub(PATTERN_REGEX_A, '').sub(PATTERN_REGEX_Z, '').gsub(PATTERN_REGEX_ESC, '/') emit_completed([:REGEX, Regexp.new(regex), scn.pos-before], before) else emit(TOKEN_DIV, before) end end # TOKENS <, <=, <|, <<|, <<, <-, <~ when '<' emit(case la1 when '<' if la2 == '|' TOKEN_LLCOLLECT else TOKEN_LSHIFT end when '=' TOKEN_LESSEQUAL when '|' TOKEN_LCOLLECT when '-' TOKEN_OUT_EDGE when '~' TOKEN_OUT_EDGE_SUB else TOKEN_LESSTHAN end, before) # TOKENS >, >=, >> when '>' emit(case la1 when '>' TOKEN_RSHIFT when '=' TOKEN_GREATEREQUAL else TOKEN_GREATERTHAN end, before) # TOKENS :, ::CLASSREF, ::NAME when ':' if la1 == ':' before = scn.pos # PERFORMANCE NOTE: This could potentially be speeded up by using a case/when listing all # upper case letters. Alternatively, the 'A', and 'Z' comparisons may be faster if they are # frozen. # if la2 >= 'A' && la2 <= 'Z' # CLASSREF or error value = scn.scan(PATTERN_CLASSREF) if value after = scn.pos emit_completed([:CLASSREF, value.freeze, after-before], before) else # move to faulty position ('::' was ok) scn.pos = scn.pos + 3 - lex_error("Illegal fully qualified class reference") + lex_error(Puppet::Pops::Issues::ILLEGAL_FULLY_QUALIFIED_CLASS_REFERENCE) end else value = scn.scan(PATTERN_BARE_WORD) if value if value =~ PATTERN_NAME emit_completed([:NAME, value.freeze, scn.pos-before], before) else emit_completed([:WORD, value.freeze, scn.pos - before], before) end else # move to faulty position ('::' was ok) scn.pos = scn.pos + 2 - lex_error("Illegal fully qualified name") + lex_error(Puppet::Pops::Issues::ILLEGAL_FULLY_QUALIFIED_NAME) end end else emit(TOKEN_COLON, before) end when '$' if value = scn.scan(PATTERN_DOLLAR_VAR) emit_completed([:VARIABLE, value[1..-1].freeze, scn.pos - before], before) else # consume the $ and let higher layer complain about the error instead of getting a syntax error emit(TOKEN_VARIABLE_EMPTY, before) end when '"' # Recursive string interpolation, 'interpolate' either returns a STRING token, or # a DQPRE with the rest of the string's tokens placed in the @token_queue interpolate_dq when "'" emit_completed([:STRING, slurp_sqstring.freeze, scn.pos - before], before) when '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' value = scn.scan(PATTERN_NUMBER) if value length = scn.pos - before assert_numeric(value, length) emit_completed([:NUMBER, value.freeze, length], before) else # move to faulty position ([0-9] was ok) scn.pos = scn.pos + 1 - lex_error("Illegal number") + lex_error(Puppet::Pops::Issues::ILLEGAL_NUMBER) end when 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '_' value = scn.scan(PATTERN_BARE_WORD) if value && value =~ PATTERN_NAME emit_completed(KEYWORDS[value] || [:NAME, value.freeze, scn.pos - before], before) elsif value emit_completed([:WORD, value.freeze, scn.pos - before], before) else # move to faulty position ([a-z_] was ok) scn.pos = scn.pos + 1 fully_qualified = scn.match?(/::/) if fully_qualified - lex_error("Illegal fully qualified name") + lex_error(Puppet::Pops::Issues::ILLEGAL_FULLY_QUALIFIED_NAME) else - lex_error("Illegal name or bare word") + lex_error(Puppet::Pops::Issues::ILLEGAL_NAME_OR_BARE_WORD) end end when 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' value = scn.scan(PATTERN_CLASSREF) if value emit_completed([:CLASSREF, value.freeze, scn.pos - before], before) else # move to faulty position ([A-Z] was ok) scn.pos = scn.pos + 1 - lex_error("Illegal class reference") + lex_error(Puppet::Pops::Issues::ILLEGAL_CLASS_REFERENCE) end when "\n" # If heredoc_cont is in effect there are heredoc text lines to skip over # otherwise just skip the newline. # if ctx[:newline_jump] scn.pos = ctx[:newline_jump] ctx[:newline_jump] = nil else scn.pos += 1 end return nil when ' ', "\t", "\r" scn.skip(PATTERN_WS) return nil else # In case of unicode spaces of various kinds that are captured by a regexp, but not by the # simpler case expression above (not worth handling those special cases with better performance). if scn.skip(PATTERN_WS) nil else # "unrecognized char" emit([:OTHER, la0, 1], before) end end end # Emits (produces) a token [:tokensymbol, TokenValue] and moves the scanner's position past the token # def emit(token, byte_offset) @scanner.pos = byte_offset + token[2] [token[0], TokenValue.new(token, byte_offset, @locator)] end # Emits the completed token on the form [:tokensymbol, TokenValue. This method does not alter # the scanner's position. # def emit_completed(token, byte_offset) [token[0], TokenValue.new(token, byte_offset, @locator)] end # Enqueues a completed token at the given offset def enqueue_completed(token, byte_offset) @token_queue << emit_completed(token, byte_offset) end # Allows subprocessors for heredoc etc to enqueue tokens that are tokenized by a different lexer instance # def enqueue(emitted_token) @token_queue << emitted_token end # Answers after which tokens it is acceptable to lex a regular expression. # PERFORMANCE NOTE: # It may be beneficial to turn this into a hash with default value of true for missing entries. # A case expression with literal values will however create a hash internally. Since a reference is # always needed to the hash, this access is almost as costly as a method call. # def regexp_acceptable? case @lexing_context[:after] # Ends of (potential) R-value generating expressions when :RPAREN, :RBRACK, :RRCOLLECT, :RCOLLECT false # End of (potential) R-value - but must be allowed because of case expressions # Called out here to not be mistaken for a bug. when :RBRACE true # Operands (that can be followed by DIV (even if illegal in grammar) when :NAME, :CLASSREF, :NUMBER, :STRING, :BOOLEAN, :DQPRE, :DQMID, :DQPOST, :HEREDOC, :REGEX, :VARIABLE, :WORD false else true end end end diff --git a/lib/puppet/pops/parser/lexer_support.rb b/lib/puppet/pops/parser/lexer_support.rb index 5b296e49c..af6079a4e 100644 --- a/lib/puppet/pops/parser/lexer_support.rb +++ b/lib/puppet/pops/parser/lexer_support.rb @@ -1,113 +1,139 @@ # This is an integral part of the Lexer. It is broken out into a separate module # for maintainability of the code, and making the various parts of the lexer focused. # module Puppet::Pops::Parser::LexerSupport - # Formats given message by appending file, line and position if available. - def positioned_message(msg, pos = nil) - result = [msg] - file = @locator.file - line = @locator.line_for_offset(pos || @scanner.pos) - pos = @locator.pos_on_line(pos || @scanner.pos) - - result << "in file #{file}" if file && file.is_a?(String) && !file.empty? - result << "at line #{line}:#{pos}" - result.join(" ") - end - # Returns "" if at end of input, else the following 5 characters with \n \r \t escaped def followed_by return "" if @scanner.eos? result = @scanner.rest[0,5] + "..." result.gsub!("\t", '\t') result.gsub!("\n", '\n') result.gsub!("\r", '\r') result end # Returns a quoted string using " or ' depending on the given a strings's content def format_quote(q) if q == "'" '"\'"' else "'#{q}'" end end # Raises a Puppet::LexError with the given message - def lex_error_without_pos msg - raise Puppet::LexError.new(msg) + def lex_error_without_pos(issue, args = {}) + raise Puppet::ParseErrorWithIssue.new(issue.format(args), nil, nil, nil, nil, issue.issue_code) end - # Raises a Puppet::LexError with the given message - def lex_error(msg, pos=nil) - raise Puppet::LexError.new(positioned_message(msg, pos)) + # Raises a Puppet::ParserErrorWithIssue with the given issue and arguments + def lex_error(issue, args = {}, pos=nil) + raise create_lex_error(issue, args, pos) + end + + def filename + file = @locator.file + file.is_a?(String) && !file.empty? ? file : nil + end + + def line(pos) + @locator.line_for_offset(pos || @scanner.pos) + end + + def position(pos) + @locator.pos_on_line(pos || @scanner.pos) + end + + def lex_warning(issue, args = {}, pos=nil) + Puppet::Util::Log.create({ + :level => :warning, + :message => issue.format(args), + :issue_code => issue.issue_code, + :file => filename, + :line => line(pos), + :pos => position(pos), + }) + end + + # @param issue [Puppet::Pops::Issues::Issue] the issue + # @param args [Hash] Issue arguments + # @param pos [Integer] + # @return [Puppet::ParseErrorWithIssue] the created error + def create_lex_error(issue, args = {}, pos = nil) + Puppet::ParseErrorWithIssue.new( + issue.format(args), + filename, + line(pos), + position(pos), + nil, + issue.issue_code) end # Asserts that the given string value is a float, or an integer in decimal, octal or hex form. # An error is raised if the given value does not comply. # def assert_numeric(value, length) if value =~ /^0[xX].*$/ - lex_error("Not a valid hex number #{value}", length) unless value =~ /^0[xX][0-9A-Fa-f]+$/ + lex_error(Puppet::Pops::Issues::INVALID_HEX_NUMBER, {:value => value}, length) unless value =~ /^0[xX][0-9A-Fa-f]+$/ elsif value =~ /^0[^.].*$/ - lex_error("Not a valid octal number #{value}", length) unless value =~ /^0[0-7]+$/ + lex_error(Puppet::Pops::Issues::INVALID_OCTAL_NUMBER, {:value => value}, length) unless value =~ /^0[0-7]+$/ else - lex_error("Not a valid decimal number #{value}", length) unless value =~ /0?\d+(?:\.\d+)?(?:[eE]-?\d+)?/ + lex_error(Puppet::Pops::Issues::INVALID_DECIMAL_NUMBER, {:value => value}, length) unless value =~ /0?\d+(?:\.\d+)?(?:[eE]-?\d+)?/ end end # A TokenValue keeps track of the token symbol, the lexed text for the token, its length # and its position in its source container. There is a cost associated with computing the # line and position on line information. # class TokenValue < Puppet::Pops::Parser::Locatable attr_reader :token_array attr_reader :offset attr_reader :locator def initialize(token_array, offset, locator) @token_array = token_array @offset = offset @locator = locator end def length @token_array[2] end def [](key) case key when :value @token_array[1] when :file @locator.file when :line @locator.line_for_offset(@offset) when :pos @locator.pos_on_line(@offset) when :length @token_array[2] when :locator @locator when :offset @offset else nil end end def to_s # This format is very compact and is intended for debugging output from racc parsser in # debug mode. If this is made more elaborate the output from a debug run becomes very hard to read. # "'#{self[:value]} #{@token_array[0]}'" end # TODO: Make this comparable for testing # vs symbolic, vs array with symbol and non hash, array with symbol and hash) # end end diff --git a/lib/puppet/pops/parser/parser_support.rb b/lib/puppet/pops/parser/parser_support.rb index d81dba392..cda914731 100644 --- a/lib/puppet/pops/parser/parser_support.rb +++ b/lib/puppet/pops/parser/parser_support.rb @@ -1,242 +1,239 @@ require 'puppet/parser/functions' require 'puppet/parser/files' require 'puppet/resource/type_collection' require 'puppet/resource/type_collection_helper' require 'puppet/resource/type' require 'monitor' # Supporting logic for the parser. # This supporting logic has slightly different responsibilities compared to the original Puppet::Parser::Parser. # It is only concerned with parsing. # class Puppet::Pops::Parser::Parser # Note that the name of the contained class and the file name (currently parser_support.rb) # needs to be different as the class is generated by Racc, and this file (parser_support.rb) is included as a mix in # # Simplify access to the Model factory # Note that the parser/parser support does not have direct knowledge about the Model. # All model construction/manipulation is made by the Factory. # Factory = Puppet::Pops::Model::Factory Model = Puppet::Pops::Model include Puppet::Resource::TypeCollectionHelper attr_accessor :lexer attr_reader :definitions # Returns the token text of the given lexer token, or nil, if token is nil def token_text t return t if t.nil? t = t.current if t.respond_to?(:current) return t.value if t.is_a? Model::QualifiedName # else it is a lexer token t[:value] end # Produces the fully qualified name, with the full (current) namespace for a given name. # # This is needed because class bodies are lazily evaluated and an inner class' container(s) may not # have been evaluated before some external reference is made to the inner class; its must therefore know its complete name # before evaluation-time. # def classname(name) [namespace, name].join('::').sub(/^::/, '') end # Raises a Parse error with location information. Information about file is always obtained from the # lexer. Line and position is produced if the given semantic is a Positioned object and have been given an offset. # def error(semantic, message) semantic = semantic.current() if semantic.is_a?(Puppet::Pops::Model::Factory) # Adapt the model so it is possible to get location information. # The model may not have been added to the source tree, so give it the lexer's locator # directly instead of searching for the root Program where the locator is normally stored. # if semantic.is_a?(Puppet::Pops::Model::Positioned) adapter = Puppet::Pops::Adapters::SourcePosAdapter.adapt(semantic) adapter.locator = @lexer.locator else adapter = nil end except = Puppet::ParseError.new(message) except.file = @lexer.locator.file except.line = adapter.line if adapter except.pos = adapter.pos if adapter raise except end # Parses a file expected to contain pp DSL logic. def parse_file(file) unless Puppet::FileSystem.exist?(file) unless file =~ /\.pp$/ file = file + ".pp" end end @lexer.file = file _parse() end def initialize() @lexer = Puppet::Pops::Parser::Lexer2.new @namestack = [] @definitions = [] end # This is a callback from the generated parser (when an error occurs while parsing) # def on_error(token,value,stack) if token == 0 # denotes end of file value_at = 'end of file' else value_at = "'#{value[:value]}'" end - if @yydebug - error = "Syntax error at #{value_at}, token: #{token}" - else - error = "Syntax error at #{value_at}" - end + error = Puppet::Pops::Issues::SYNTAX_ERROR.format(:where => value_at) + error = "#{error}, token: #{token}" if @yydebug # Note, old parser had processing of "expected token here" - do not try to reinstate: # The 'expected' is only of value at end of input, otherwise any parse error involving a # start of a pair will be reported as expecting the close of the pair - e.g. "$x.each |$x {|", would # report that "seeing the '{', the '}' is expected. That would be wrong. # Real "expected" tokens are very difficult to compute (would require parsing of racc output data). Output of the stack # could help, but can require extensive backtracking and produce many options. # # The lexer should handle the "expected instead of end of file for strings, and interpolation", other expectancies # must be handled by the grammar. The lexer may have enqueued tokens far ahead - the lexer's opinion about this # is not trustworthy. # - - except = Puppet::ParseError.new(error) + file = nil + line = nil + pos = nil if token != 0 - path = value[:file] - except.line = value[:line] - except.pos = value[:pos] + file = value[:file] + line = value[:line] + pos = value[:pos] else # At end of input, use what the lexer thinks is the source file - path = lexer.file + file = lexer.file end - except.file = path if path.is_a?(String) && !path.empty? - - raise except + file = nil unless file.is_a?(String) && !file.empty? + raise Puppet::ParseErrorWithIssue.new(error, file, line, pos, nil, issue_code = Puppet::Pops::Issues::SYNTAX_ERROR.issue_code) end # Parses a String of pp DSL code. # def parse_string(code, path = '') @lexer.lex_string(code, path) _parse() end # Mark the factory wrapped model object with location information # @return [Puppet::Pops::Model::Factory] the given factory # @api private # def loc(factory, start_locateable, end_locateable = nil) factory.record_position(start_locateable, end_locateable) end # Mark the factory wrapped heredoc model object with location information # @return [Puppet::Pops::Model::Factory] the given factory # @api private # def heredoc_loc(factory, start_locateabke, end_locateable = nil) factory.record_heredoc_position(start_locatable, end_locatable) end def aryfy(o) o = [o] unless o.is_a?(Array) o end def namespace @namestack.join('::') end def namestack(name) @namestack << name end def namepop() @namestack.pop end def add_definition(definition) @definitions << definition.current definition end # Transforms an array of expressions containing literal name expressions to calls if followed by an # expression, or expression list # def transform_calls(expressions) # Factory transform raises an error if a non qualified name is followed by an argument list # since there is no way that that can be transformed back to sanity. This occurs in situations like this: # # $a = 10, notice hello # # where the "10, notice" forms an argument list. The parser builds an Array with the expressions and includes # the comma tokens to enable the error to be reported against the first comma. # begin Factory.transform_calls(expressions) rescue Puppet::Pops::Model::Factory::ArgsToNonCallError => e # e.args[1] is the first comma token in the list # e.name_expr is the function name expression if e.name_expr.is_a?(Puppet::Pops::Model::QualifiedName) error(e.args[1], "attempt to pass argument list to the function '#{e.name_expr.value}' which cannot be called without parentheses") else error(e.args[1], "illegal comma separated argument list") end end end # Transforms a LEFT followed by the result of attribute_operations, this may be a call or an invalid sequence def transform_resource_wo_title(left, resource) Factory.transform_resource_wo_title(left, resource) end # Creates a program with the given body. # def create_program(body) locator = @lexer.locator Factory.PROGRAM(body, definitions, locator) end # Creates an empty program with a single No-op at the input's EOF offset with 0 length. # def create_empty_program() locator = @lexer.locator no_op = Factory.literal(nil) # Create a synthetic NOOP token at EOF offset with 0 size. The lexer does not produce an EOF token that is # visible to the grammar rules. Creating this token is mainly to reuse the positioning logic as it # expects a token decorated with location information. token_sym, token = @lexer.emit_completed([:NOOP,'',0], locator.string.bytesize) loc(no_op, token) # Program with a Noop program = Factory.PROGRAM(no_op, [], locator) program end # Performs the parsing and returns the resulting model. # The lexer holds state, and this is setup with {#parse_string}, or {#parse_file}. # # @api private # def _parse() begin @yydebug = false main = yyparse(@lexer,:scan) end return main ensure @lexer.clear @namestack = [] @definitions = [] end end diff --git a/lib/puppet/pops/parser/slurp_support.rb b/lib/puppet/pops/parser/slurp_support.rb index a93bbb7b9..175ac80e2 100644 --- a/lib/puppet/pops/parser/slurp_support.rb +++ b/lib/puppet/pops/parser/slurp_support.rb @@ -1,95 +1,97 @@ # This module is an integral part of the Lexer. # It defines the string slurping behavior - finding the string and non string parts in interpolated # strings, translating escape sequences in strings to their single character equivalence. # # PERFORMANCE NOTE: The various kinds of slurping could be made even more generic, but requires # additional parameter passing and evaluation of conditional logic. # TODO: More detailed performance analysis of excessive character escaping and interpolation. # module Puppet::Pops::Parser::SlurpSupport + include Puppet::Pops::Parser::LexerSupport SLURP_SQ_PATTERN = /(?:[^\\]|^|[^\\])(?:[\\]{2})*[']/ SLURP_DQ_PATTERN = /(?:[^\\]|^|[^\\])(?:[\\]{2})*(["]|[$]\{?)/ SLURP_UQ_PATTERN = /(?:[^\\]|^|[^\\])(?:[\\]{2})*([$]\{?|\z)/ SLURP_ALL_PATTERN = /.*(\z)/m SQ_ESCAPES = %w{ \\ ' } DQ_ESCAPES = %w{ \\ $ ' " r n t s u}+["\r\n", "\n"] UQ_ESCAPES = %w{ \\ $ r n t s u}+["\r\n", "\n"] def slurp_sqstring # skip the leading ' @scanner.pos += 1 - str = slurp(@scanner, SLURP_SQ_PATTERN, SQ_ESCAPES, :ignore_invalid_escapes) || lex_error("Unclosed quote after \"'\" followed by '#{followed_by}'") + str = slurp(@scanner, SLURP_SQ_PATTERN, SQ_ESCAPES, :ignore_invalid_escapes) + lex_error(Puppet::Pops::Issues::UNCLOSED_QUOTE, :after => "\"'\"", :followed_by => followed_by) unless str str[0..-2] # strip closing "'" from result end def slurp_dqstring scn = @scanner last = scn.matched str = slurp(scn, SLURP_DQ_PATTERN, DQ_ESCAPES, false) unless str - lex_error("Unclosed quote after #{format_quote(last)} followed by '#{followed_by}'") + lex_error(Puppet::Pops::Issues::UNCLOSED_QUOTE, :after => format_quote(last), :followed_by => followed_by) end # Terminator may be a single char '"', '$', or two characters '${' group match 1 (scn[1]) from the last slurp holds this terminator = scn[1] [str[0..(-1 - terminator.length)], terminator] end # Copy from old lexer - can do much better def slurp_uqstring scn = @scanner last = scn.matched ignore = true str = slurp(scn, @lexing_context[:uq_slurp_pattern], @lexing_context[:escapes], :ignore_invalid_escapes) # Terminator may be a single char '$', two characters '${', or empty string '' at the end of intput. # Group match 1 holds this. # The exceptional case is found by looking at the subgroup 1 of the most recent match made by the scanner (i.e. @scanner[1]). # This is the last match made by the slurp method (having called scan_until on the scanner). # If there is a terminating character is must be stripped and returned separately. # terminator = scn[1] [str[0..(-1 - terminator.length)], terminator] end # Slurps a string from the given scanner until the given pattern and then replaces any escaped # characters given by escapes into their control-character equivalent or in case of line breaks, replaces the # pattern \r?\n with an empty string. # The returned string contains the terminating character. Returns nil if the scanner can not scan until the given # pattern. # def slurp(scanner, pattern, escapes, ignore_invalid_escapes) str = scanner.scan_until(pattern) || return # Process unicode escapes first as they require getting 4 hex digits # If later a \u is found it is warned not to be a unicode escape if escapes.include?('u') str.gsub!(/\\u([\da-fA-F]{4})/m) { [$1.hex].pack("U") } end str.gsub!(/\\([^\r\n]|(?:\r?\n))/m) { ch = $1 if escapes.include? ch case ch when 'r' ; "\r" when 'n' ; "\n" when 't' ; "\t" when 's' ; " " when 'u' - Puppet.warning(positioned_message("Unicode escape '\\u' was not followed by 4 hex digits")) + lex_warning(Puppet::Pops::Issues::ILLEGAL_UNICODE_ESCAPE) "\\u" when "\n" ; '' when "\r\n"; '' else ch end else - Puppet.warning(positioned_message("Unrecognized escape sequence '\\#{ch}'")) unless ignore_invalid_escapes + lex_warning(Puppet::Pops::Issues::UNRECOGNIZED_ESCAPE, :ch => ch) unless ignore_invalid_escapes "\\#{ch}" end } str end end diff --git a/lib/puppet/util/log.rb b/lib/puppet/util/log.rb index ea2ccee18..08a90b1a9 100644 --- a/lib/puppet/util/log.rb +++ b/lib/puppet/util/log.rb @@ -1,354 +1,385 @@ require 'puppet/util/tagging' require 'puppet/util/classgen' require 'puppet/network/format_support' # Pass feedback to the user. Log levels are modeled after syslog's, and it is # expected that that will be the most common log destination. Supports # multiple destinations, one of which is a remote server. class Puppet::Util::Log include Puppet::Util extend Puppet::Util::ClassGen include Puppet::Util::Tagging include Puppet::Network::FormatSupport @levels = [:debug,:info,:notice,:warning,:err,:alert,:emerg,:crit] @loglevel = 2 @desttypes = {} # Create a new destination type. def self.newdesttype(name, options = {}, &block) dest = genclass( name, :parent => Puppet::Util::Log::Destination, :prefix => "Dest", :block => block, :hash => @desttypes, :attributes => options ) dest.match(dest.name) dest end require 'puppet/util/log/destination' require 'puppet/util/log/destinations' @destinations = {} @queued = [] class << self include Puppet::Util include Puppet::Util::ClassGen attr_reader :desttypes end # Reset log to basics. Basically just flushes and closes files and # undefs other objects. def Log.close(destination) if @destinations.include?(destination) @destinations[destination].flush if @destinations[destination].respond_to?(:flush) @destinations[destination].close if @destinations[destination].respond_to?(:close) @destinations.delete(destination) end end def self.close_all destinations.keys.each { |dest| close(dest) } raise Puppet::DevError.new("Log.close_all failed to close #{@destinations.keys.inspect}") if !@destinations.empty? end # Flush any log destinations that support such operations. def Log.flush @destinations.each { |type, dest| dest.flush if dest.respond_to?(:flush) } end def Log.autoflush=(v) @destinations.each do |type, dest| dest.autoflush = v if dest.respond_to?(:autoflush=) end end # Create a new log message. The primary role of this method is to # avoid creating log messages below the loglevel. def Log.create(hash) raise Puppet::DevError, "Logs require a level" unless hash.include?(:level) raise Puppet::DevError, "Invalid log level #{hash[:level]}" unless @levels.index(hash[:level]) @levels.index(hash[:level]) >= @loglevel ? Puppet::Util::Log.new(hash) : nil end def Log.destinations @destinations end # Yield each valid level in turn def Log.eachlevel @levels.each { |level| yield level } end # Return the current log level. def Log.level @levels[@loglevel] end # Set the current log level. def Log.level=(level) level = level.intern unless level.is_a?(Symbol) raise Puppet::DevError, "Invalid loglevel #{level}" unless @levels.include?(level) @loglevel = @levels.index(level) end def Log.levels @levels.dup end # Create a new log destination. def Log.newdestination(dest) # Each destination can only occur once. if @destinations.find { |name, obj| obj.name == dest } return end name, type = @desttypes.find do |name, klass| klass.match?(dest) end if type.respond_to?(:suitable?) and not type.suitable?(dest) return end raise Puppet::DevError, "Unknown destination type #{dest}" unless type begin if type.instance_method(:initialize).arity == 1 @destinations[dest] = type.new(dest) else @destinations[dest] = type.new end flushqueue @destinations[dest] rescue => detail Puppet.log_exception(detail) # If this was our only destination, then add the console back in. newdestination(:console) if @destinations.empty? and (dest != :console and dest != "console") end end def Log.with_destination(destination, &block) if @destinations.include?(destination) yield else newdestination(destination) begin yield ensure close(destination) end end end # Route the actual message. FIXME There are lots of things this method # should do, like caching and a bit more. It's worth noting that there's # a potential for a loop here, if the machine somehow gets the destination set as # itself. def Log.newmessage(msg) return if @levels.index(msg.level) < @loglevel queuemessage(msg) if @destinations.length == 0 @destinations.each do |name, dest| dest.handle(msg) end end def Log.queuemessage(msg) @queued.push(msg) end def Log.flushqueue return unless @destinations.size >= 1 @queued.each do |msg| Log.newmessage(msg) end @queued.clear end # Flush the logging queue. If there are no destinations available, # adds in a console logger before flushing the queue. # This is mainly intended to be used as a last-resort attempt # to ensure that logging messages are not thrown away before # the program is about to exit--most likely in a horrific # error scenario. # @return nil def Log.force_flushqueue() if (@destinations.empty? and !(@queued.empty?)) newdestination(:console) end flushqueue end def Log.sendlevel?(level) @levels.index(level) >= @loglevel end # Reopen all of our logs. def Log.reopen Puppet.notice "Reopening log files" types = @destinations.keys @destinations.each { |type, dest| dest.close if dest.respond_to?(:close) } @destinations.clear # We need to make sure we always end up with some kind of destination begin types.each { |type| Log.newdestination(type) } rescue => detail if @destinations.empty? Log.setup_default Puppet.err detail.to_s end end end def self.setup_default Log.newdestination( (Puppet.features.syslog? ? :syslog : (Puppet.features.eventlog? ? :eventlog : Puppet[:puppetdlog]))) end # Is the passed level a valid log level? def self.validlevel?(level) @levels.include?(level) end def self.from_data_hash(data) obj = allocate obj.initialize_from_hash(data) obj end def self.from_pson(data) Puppet.deprecation_warning("from_pson is being removed in favour of from_data_hash.") self.from_data_hash(data) end - attr_accessor :time, :remote, :file, :line, :pos, :source, :issue_code, :environment, :node + attr_accessor :time, :remote, :file, :line, :pos, :source, :issue_code, :environment, :node, :backtrace attr_reader :level, :message def initialize(args) self.level = args[:level] self.message = args[:message] self.source = args[:source] || "Puppet" @time = Time.now if tags = args[:tags] tags.each { |t| self.tag(t) } end # Don't add these unless defined (preserve 3.x API as much as possible) - [:file, :line, :pos, :issue_code, :environment, :node].each do |attr| + [:file, :line, :pos, :issue_code, :environment, :node, :backtrace].each do |attr| next unless value = args[attr] send(attr.to_s + '=', value) end Log.newmessage(self) end def initialize_from_hash(data) @level = data['level'].intern @message = data['message'] @source = data['source'] @tags = Puppet::Util::TagSet.new(data['tags']) @time = data['time'] if @time.is_a? String @time = Time.parse(@time) end # Don't add these unless defined (preserve 3.x API as much as possible) - %w(file line pos issue_code environment node).each do |name| + %w(file line pos issue_code environment node backtrace).each do |name| next unless value = data[name] send(name + '=', value) end end def to_hash self.to_data_hash end def to_data_hash - hash = { + { 'level' => @level, - 'message' => @message, + 'message' => to_s, 'source' => @source, 'tags' => @tags.to_a, 'time' => @time.iso8601(9), 'file' => @file, 'line' => @line, } - # Don't add these unless defined (preserve 3.x API as much as possible) - # TODO: Should apply to file and line also but making them optional in the hash would change 3x API - %w(pos issue_code environment node).each do |name| + end + + def to_structured_hash + hash = { + 'level' => @level, + 'message' => @message, + 'source' => @source, + 'tags' => @tags.to_a, + 'time' => @time.iso8601(9), + } + %w(file line pos issue_code environment node backtrace).each do |name| attr_name = "@#{name}" hash[name] = instance_variable_get(attr_name) if instance_variable_defined?(attr_name) end hash end def to_pson(*args) to_data_hash.to_pson(*args) end def message=(msg) raise ArgumentError, "Puppet::Util::Log requires a message" unless msg @message = msg.to_s end def level=(level) raise ArgumentError, "Puppet::Util::Log requires a log level" unless level raise ArgumentError, "Puppet::Util::Log requires a symbol or string" unless level.respond_to? "to_sym" @level = level.to_sym raise ArgumentError, "Invalid log level #{@level}" unless self.class.validlevel?(@level) # Tag myself with my log level tag(level) end # If they pass a source in to us, we make sure it is a string, and # we retrieve any tags we can. def source=(source) if source.respond_to?(:path) @source = source.path source.tags.each { |t| tag(t) } self.file = source.file self.line = source.line else @source = source.to_s end end def to_report "#{time} #{source} (#{level}): #{to_s}" end def to_s - message + msg = message + + # Issue based messages do not have details in the message. It + # must be appended here + unless issue_code.nil? + msg = "Could not parse for environment #{environment}: #{msg}" unless environment.nil? + if file && line && pos + msg = "#{msg} at #{file}:#{line}:#{pos}" + elsif file and line + msg = "#{msg} at #{file}:#{line}" + elsif line && pos + msg = "#{msg} at line #{line}:#{pos}" + elsif line + msg = "#{msg} at line #{line}" + elsif file + msg = "#{msg} in #{file}" + end + msg = "#{msg} on node #{node}" unless node.nil? + if @backtrace.is_a?(Array) + msg += "\n" + msg += @backtrace.join("\n") + end + end + msg end end # This is for backward compatibility from when we changed the constant to Puppet::Util::Log # because the reports include the constant name. Apparently the alias was created in # March 2007, should could probably be removed soon. Puppet::Log = Puppet::Util::Log diff --git a/lib/puppet/util/log/destinations.rb b/lib/puppet/util/log/destinations.rb index ef69df1d2..fb8b6aad8 100644 --- a/lib/puppet/util/log/destinations.rb +++ b/lib/puppet/util/log/destinations.rb @@ -1,253 +1,253 @@ Puppet::Util::Log.newdesttype :syslog do def self.suitable?(obj) Puppet.features.syslog? end def close Syslog.close end def initialize Syslog.close if Syslog.opened? name = "puppet-#{Puppet.run_mode.name}" options = Syslog::LOG_PID | Syslog::LOG_NDELAY # XXX This should really be configurable. str = Puppet[:syslogfacility] begin facility = Syslog.const_get("LOG_#{str.upcase}") rescue NameError raise Puppet::Error, "Invalid syslog facility #{str}", $!.backtrace end @syslog = Syslog.open(name, options, facility) end def handle(msg) # XXX Syslog currently has a bug that makes it so you # cannot log a message with a '%' in it. So, we get rid # of them. if msg.source == "Puppet" msg.to_s.split("\n").each do |line| @syslog.send(msg.level, line.gsub("%", '%%')) end else msg.to_s.split("\n").each do |line| @syslog.send(msg.level, "(%s) %s" % [msg.source.to_s.gsub("%", ""), line.gsub("%", '%%') ] ) end end end end Puppet::Util::Log.newdesttype :file do require 'fileutils' def self.match?(obj) Puppet::Util.absolute_path?(obj) end def close if defined?(@file) @file.close @file = nil end end def flush @file.flush if defined?(@file) end attr_accessor :autoflush def initialize(path) @name = path @json = path.end_with?('.json') ? 1 : 0 # first make sure the directory exists # We can't just use 'Config.use' here, because they've # specified a "special" destination. unless Puppet::FileSystem.exist?(Puppet::FileSystem.dir(path)) FileUtils.mkdir_p(File.dirname(path), :mode => 0755) Puppet.info "Creating log directory #{File.dirname(path)}" end # create the log file, if it doesn't already exist need_array_start = false if @json == 1 need_array_start = true if File.exists?(path) sz = File.size(path) need_array_start = sz == 0 # Assume that entries have been written and that a comma # is needed before next entry @json = 2 if sz > 2 end end file = File.open(path, File::WRONLY|File::CREAT|File::APPEND) file.puts('[') if need_array_start # Give ownership to the user and group puppet will run as begin FileUtils.chown(Puppet[:user], Puppet[:group], path) unless Puppet::Util::Platform.windows? rescue ArgumentError, Errno::EPERM Puppet.err "Unable to set ownership of log file" end @file = file @autoflush = Puppet[:autoflush] end def handle(msg) if @json > 0 @json > 1 ? @file.puts(',') : @json = 2 - JSON.dump(msg.to_hash, @file) + JSON.dump(msg.to_structured_hash, @file) else @file.puts("#{msg.time} #{msg.source} (#{msg.level}): #{msg}") end @file.flush if @autoflush end end Puppet::Util::Log.newdesttype :logstash_event do require 'time' def format(msg) # logstash_event format is documented at # https://logstash.jira.com/browse/LOGSTASH-675 data = {} data = msg.to_hash data['version'] = 1 data['@timestamp'] = data['time'] data.delete('time') data end def handle(msg) message = format(msg) $stdout.puts message.to_pson end end Puppet::Util::Log.newdesttype :console do require 'puppet/util/colors' include Puppet::Util::Colors def initialize # Flush output immediately. $stderr.sync = true $stdout.sync = true end def handle(msg) levels = { :emerg => { :name => 'Emergency', :color => :hred, :stream => $stderr }, :alert => { :name => 'Alert', :color => :hred, :stream => $stderr }, :crit => { :name => 'Critical', :color => :hred, :stream => $stderr }, :err => { :name => 'Error', :color => :hred, :stream => $stderr }, :warning => { :name => 'Warning', :color => :hred, :stream => $stderr }, :notice => { :name => 'Notice', :color => :reset, :stream => $stdout }, :info => { :name => 'Info', :color => :green, :stream => $stdout }, :debug => { :name => 'Debug', :color => :cyan, :stream => $stdout }, } str = msg.respond_to?(:multiline) ? msg.multiline : msg.to_s str = msg.source == "Puppet" ? str : "#{msg.source}: #{str}" level = levels[msg.level] level[:stream].puts colorize(level[:color], "#{level[:name]}: #{str}") end end # Log to a transaction report. Puppet::Util::Log.newdesttype :report do attr_reader :report match "Puppet::Transaction::Report" def initialize(report) @report = report end def handle(msg) @report << msg end end # Log to an array, just for testing. module Puppet::Test class LogCollector def initialize(logs) @logs = logs end def <<(value) @logs << value end end end Puppet::Util::Log.newdesttype :array do match "Puppet::Test::LogCollector" def initialize(messages) @messages = messages end def handle(msg) @messages << msg end end Puppet::Util::Log.newdesttype :eventlog do Puppet::Util::Log::DestEventlog::EVENTLOG_ERROR_TYPE = 0x0001 Puppet::Util::Log::DestEventlog::EVENTLOG_WARNING_TYPE = 0x0002 Puppet::Util::Log::DestEventlog::EVENTLOG_INFORMATION_TYPE = 0x0004 def self.suitable?(obj) Puppet.features.eventlog? end def initialize @eventlog = Win32::EventLog.open("Application") end def to_native(level) case level when :debug,:info,:notice [self.class::EVENTLOG_INFORMATION_TYPE, 0x01] when :warning [self.class::EVENTLOG_WARNING_TYPE, 0x02] when :err,:alert,:emerg,:crit [self.class::EVENTLOG_ERROR_TYPE, 0x03] end end def handle(msg) native_type, native_id = to_native(msg.level) @eventlog.report_event( :source => "Puppet", :event_type => native_type, :event_id => native_id, :data => (msg.source and msg.source != 'Puppet' ? "#{msg.source}: " : '') + msg.to_s ) end def close if @eventlog @eventlog.close @eventlog = nil end end end diff --git a/spec/unit/pops/evaluator/evaluating_parser_spec.rb b/spec/unit/pops/evaluator/evaluating_parser_spec.rb index e7c6f499d..c7e795f13 100644 --- a/spec/unit/pops/evaluator/evaluating_parser_spec.rb +++ b/spec/unit/pops/evaluator/evaluating_parser_spec.rb @@ -1,1391 +1,1391 @@ require 'spec_helper' require 'puppet/pops' require 'puppet/pops/evaluator/evaluator_impl' require 'puppet/loaders' require 'puppet_spec/pops' require 'puppet_spec/scope' require 'puppet/parser/e4_parser_adapter' # relative to this spec file (./) does not work as this file is loaded by rspec #require File.join(File.dirname(__FILE__), '/evaluator_rspec_helper') describe 'Puppet::Pops::Evaluator::EvaluatorImpl' do include PuppetSpec::Pops include PuppetSpec::Scope before(:each) do Puppet[:strict_variables] = true # These must be set since the 3x logic switches some behaviors on these even if the tests explicitly # use the 4x parser and evaluator. # Puppet[:parser] = 'future' # Puppetx cannot be loaded until the correct parser has been set (injector is turned off otherwise) require 'puppetx' # Tests needs a known configuration of node/scope/compiler since it parses and evaluates # snippets as the compiler will evaluate them, butwithout the overhead of compiling a complete # catalog for each tested expression. # @parser = Puppet::Pops::Parser::EvaluatingParser.new @node = Puppet::Node.new('node.example.com') @node.environment = Puppet::Node::Environment.create(:testing, []) @compiler = Puppet::Parser::Compiler.new(@node) @scope = Puppet::Parser::Scope.new(@compiler) @scope.source = Puppet::Resource::Type.new(:node, 'node.example.com') @scope.parent = @compiler.topscope end let(:parser) { @parser } let(:scope) { @scope } types = Puppet::Pops::Types::TypeFactory context "When evaluator evaluates literals" do { "1" => 1, "010" => 8, "0x10" => 16, "3.14" => 3.14, "0.314e1" => 3.14, "31.4e-1" => 3.14, "'1'" => '1', "'banana'" => 'banana', '"banana"' => 'banana', "banana" => 'banana', "banana::split" => 'banana::split', "false" => false, "true" => true, "Array" => types.array_of_data(), "/.*/" => /.*/ }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end end context "When the evaluator evaluates Lists and Hashes" do { "[]" => [], "[1,2,3]" => [1,2,3], "[1,[2.0, 2.1, [2.2]],[3.0, 3.1]]" => [1,[2.0, 2.1, [2.2]],[3.0, 3.1]], "[2 + 2]" => [4], "[1,2,3] == [1,2,3]" => true, "[1,2,3] != [2,3,4]" => true, "[1,2,3] == [2,2,3]" => false, "[1,2,3] != [1,2,3]" => false, "[1,2,3][2]" => 3, "[1,2,3] + [4,5]" => [1,2,3,4,5], "[1,2,3] + [[4,5]]" => [1,2,3,[4,5]], "[1,2,3] + 4" => [1,2,3,4], "[1,2,3] << [4,5]" => [1,2,3,[4,5]], "[1,2,3] << {'a' => 1, 'b'=>2}" => [1,2,3,{'a' => 1, 'b'=>2}], "[1,2,3] << 4" => [1,2,3,4], "[1,2,3,4] - [2,3]" => [1,4], "[1,2,3,4] - [2,5]" => [1,3,4], "[1,2,3,4] - 2" => [1,3,4], "[1,2,3,[2],4] - 2" => [1,3,[2],4], "[1,2,3,[2,3],4] - [[2,3]]" => [1,2,3,4], "[1,2,3,3,2,4,2,3] - [2,3]" => [1,4], "[1,2,3,['a',1],['b',2]] - {'a' => 1, 'b'=>2}" => [1,2,3], "[1,2,3,{'a'=>1,'b'=>2}] - [{'a' => 1, 'b'=>2}]" => [1,2,3], }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "[1,2,3] + {'a' => 1, 'b'=>2}" => [1,2,3,['a',1],['b',2]], }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do # This test must be done with match_array since the order of the hash # is undefined and Ruby 1.8.7 and 1.9.3 produce different results. expect(parser.evaluate_string(scope, source, __FILE__)).to match_array(result) end end { "[1,2,3][a]" => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end { "{}" => {}, "{'a'=>1,'b'=>2}" => {'a'=>1,'b'=>2}, "{'a'=>1,'b'=>{'x'=>2.1,'y'=>2.2}}" => {'a'=>1,'b'=>{'x'=>2.1,'y'=>2.2}}, "{'a'=> 2 + 2}" => {'a'=> 4}, "{'a'=> 1, 'b'=>2} == {'a'=> 1, 'b'=>2}" => true, "{'a'=> 1, 'b'=>2} != {'x'=> 1, 'b'=>2}" => true, "{'a'=> 1, 'b'=>2} == {'a'=> 2, 'b'=>3}" => false, "{'a'=> 1, 'b'=>2} != {'a'=> 1, 'b'=>2}" => false, "{a => 1, b => 2}[b]" => 2, "{2+2 => sum, b => 2}[4]" => 'sum', "{'a'=>1, 'b'=>2} + {'c'=>3}" => {'a'=>1,'b'=>2,'c'=>3}, "{'a'=>1, 'b'=>2} + {'b'=>3}" => {'a'=>1,'b'=>3}, "{'a'=>1, 'b'=>2} + ['c', 3, 'b', 3]" => {'a'=>1,'b'=>3, 'c'=>3}, "{'a'=>1, 'b'=>2} + [['c', 3], ['b', 3]]" => {'a'=>1,'b'=>3, 'c'=>3}, "{'a'=>1, 'b'=>2} - {'b' => 3}" => {'a'=>1}, "{'a'=>1, 'b'=>2, 'c'=>3} - ['b', 'c']" => {'a'=>1}, "{'a'=>1, 'b'=>2, 'c'=>3} - 'c'" => {'a'=>1, 'b'=>2}, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "{'a' => 1, 'b'=>2} << 1" => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end end context "When the evaluator perform comparisons" do { "'a' == 'a'" => true, "'a' == 'b'" => false, "'a' != 'a'" => false, "'a' != 'b'" => true, "'a' < 'b' " => true, "'a' < 'a' " => false, "'b' < 'a' " => false, "'a' <= 'b'" => true, "'a' <= 'a'" => true, "'b' <= 'a'" => false, "'a' > 'b' " => false, "'a' > 'a' " => false, "'b' > 'a' " => true, "'a' >= 'b'" => false, "'a' >= 'a'" => true, "'b' >= 'a'" => true, "'a' == 'A'" => true, "'a' != 'A'" => false, "'a' > 'A'" => false, "'a' >= 'A'" => true, "'A' < 'a'" => false, "'A' <= 'a'" => true, "1 == 1" => true, "1 == 2" => false, "1 != 1" => false, "1 != 2" => true, "1 < 2 " => true, "1 < 1 " => false, "2 < 1 " => false, "1 <= 2" => true, "1 <= 1" => true, "2 <= 1" => false, "1 > 2 " => false, "1 > 1 " => false, "2 > 1 " => true, "1 >= 2" => false, "1 >= 1" => true, "2 >= 1" => true, "1 == 1.0 " => true, "1 < 1.1 " => true, "1.0 == 1 " => true, "1.0 < 2 " => true, "'1.0' < 'a'" => true, "'1.0' < '' " => false, "'1.0' < ' '" => false, "'a' > '1.0'" => true, "/.*/ == /.*/ " => true, "/.*/ != /a.*/" => true, "true == true " => true, "false == false" => true, "true == false" => false, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "a > 1" => /String > Integer/, "a >= 1" => /String >= Integer/, "a < 1" => /String < Integer/, "a <= 1" => /String <= Integer/, "1 > a" => /Integer > String/, "1 >= a" => /Integer >= String/, "1 < a" => /Integer < String/, "1 <= a" => /Integer <= String/, }.each do | source, error| it "should not allow comparison of String and Number '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(error) end end { "'a' =~ /.*/" => true, "'a' =~ '.*'" => true, "/.*/ != /a.*/" => true, "'a' !~ /b.*/" => true, "'a' !~ 'b.*'" => true, '$x = a; a =~ "$x.*"' => true, "a =~ Pattern['a.*']" => true, "a =~ Regexp['a.*']" => false, # String is not subtype of Regexp. PUP-957 "$x = /a.*/ a =~ $x" => true, "$x = Pattern['a.*'] a =~ $x" => true, "1 =~ Integer" => true, "1 !~ Integer" => false, "[1,2,3] =~ Array[Integer[1,10]]" => true, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "666 =~ /6/" => :error, "[a] =~ /a/" => :error, "{a=>1} =~ /a/" => :error, "/a/ =~ /a/" => :error, "Array =~ /A/" => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end { "1 in [1,2,3]" => true, "4 in [1,2,3]" => false, "a in {x=>1, a=>2}" => true, "z in {x=>1, a=>2}" => false, "ana in bananas" => true, "xxx in bananas" => false, "/ana/ in bananas" => true, "/xxx/ in bananas" => false, "ANA in bananas" => false, # ANA is a type, not a String "String[1] in bananas" => false, # Philosophically true though :-) "'ANA' in bananas" => true, "ana in 'BANANAS'" => true, "/ana/ in 'BANANAS'" => false, "/ANA/ in 'BANANAS'" => true, "xxx in 'BANANAS'" => false, "[2,3] in [1,[2,3],4]" => true, "[2,4] in [1,[2,3],4]" => false, "[a,b] in ['A',['A','B'],'C']" => true, "[x,y] in ['A',['A','B'],'C']" => false, "a in {a=>1}" => true, "x in {a=>1}" => false, "'A' in {a=>1}" => true, "'X' in {a=>1}" => false, "a in {'A'=>1}" => true, "x in {'A'=>1}" => false, "/xxx/ in {'aaaxxxbbb'=>1}" => true, "/yyy/ in {'aaaxxxbbb'=>1}" => false, "15 in [1, 0xf]" => true, "15 in [1, '0xf']" => false, "'15' in [1, 0xf]" => false, "15 in [1, 115]" => false, "1 in [11, '111']" => false, "'1' in [11, '111']" => false, "Array[Integer] in [2, 3]" => false, "Array[Integer] in [2, [3, 4]]" => true, "Array[Integer] in [2, [a, 4]]" => false, "Integer in { 2 =>'a'}" => true, "Integer[5,10] in [1,5,3]" => true, "Integer[5,10] in [1,2,3]" => false, "Integer in {'a'=>'a'}" => false, "Integer in {'a'=>1}" => false, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "if /(ana)/ in bananas {$1}" => 'ana', "if /(xyz)/ in bananas {$1} else {$1}" => nil, "$a = bananas =~ /(ana)/; $b = /(xyz)/ in bananas; $1" => 'ana', "$a = xyz =~ /(xyz)/; $b = /(ana)/ in bananas; $1" => 'ana', "if /p/ in [pineapple, bananas] {$0}" => 'p', "if /b/ in [pineapple, bananas] {$0}" => 'b', }.each do |source, result| it "sets match variables for a regexp search using in such that '#{source}' produces '#{result}'" do parser.evaluate_string(scope, source, __FILE__).should == result end end { 'Any' => ['Data', 'Scalar', 'Numeric', 'Integer', 'Float', 'Boolean', 'String', 'Pattern', 'Collection', 'Array', 'Hash', 'CatalogEntry', 'Resource', 'Class', 'Undef', 'File', 'NotYetKnownResourceType'], # Note, Data > Collection is false (so not included) 'Data' => ['Scalar', 'Numeric', 'Integer', 'Float', 'Boolean', 'String', 'Pattern', 'Array', 'Hash',], 'Scalar' => ['Numeric', 'Integer', 'Float', 'Boolean', 'String', 'Pattern'], 'Numeric' => ['Integer', 'Float'], 'CatalogEntry' => ['Class', 'Resource', 'File', 'NotYetKnownResourceType'], 'Integer[1,10]' => ['Integer[2,3]'], }.each do |general, specials| specials.each do |special | it "should compute that #{general} > #{special}" do parser.evaluate_string(scope, "#{general} > #{special}", __FILE__).should == true end it "should compute that #{special} < #{general}" do parser.evaluate_string(scope, "#{special} < #{general}", __FILE__).should == true end it "should compute that #{general} != #{special}" do parser.evaluate_string(scope, "#{special} != #{general}", __FILE__).should == true end end end { 'Integer[1,10] > Integer[2,3]' => true, 'Integer[1,10] == Integer[2,3]' => false, 'Integer[1,10] > Integer[0,5]' => false, 'Integer[1,10] > Integer[1,10]' => false, 'Integer[1,10] >= Integer[1,10]' => true, 'Integer[1,10] == Integer[1,10]' => true, }.each do |source, result| it "should parse and evaluate the integer range comparison expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end end context "When the evaluator performs arithmetic" do context "on Integers" do { "2+2" => 4, "2 + 2" => 4, "7 - 3" => 4, "6 * 3" => 18, "6 / 3" => 2, "6 % 3" => 0, "10 % 3" => 1, "-(6/3)" => -2, "-6/3 " => -2, "8 >> 1" => 4, "8 << 1" => 16, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end context "on Floats" do { "2.2 + 2.2" => 4.4, "7.7 - 3.3" => 4.4, "6.1 * 3.1" => 18.91, "6.6 / 3.3" => 2.0, "-(6.0/3.0)" => -2.0, "-6.0/3.0 " => -2.0, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "3.14 << 2" => :error, "3.14 >> 2" => :error, "6.6 % 3.3" => 0.0, "10.0 % 3.0" => 1.0, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end end context "on strings requiring boxing to Numeric" do { "'2' + '2'" => 4, "'-2' + '2'" => 0, "'- 2' + '2'" => 0, '"-\t 2" + "2"' => 0, "'+2' + '2'" => 4, "'+ 2' + '2'" => 4, "'2.2' + '2.2'" => 4.4, "'-2.2' + '2.2'" => 0.0, "'0xF7' + '010'" => 0xFF, "'0xF7' + '0x8'" => 0xFF, "'0367' + '010'" => 0xFF, "'012.3' + '010'" => 20.3, "'-0x2' + '0x4'" => 2, "'+0x2' + '0x4'" => 6, "'-02' + '04'" => 2, "'+02' + '04'" => 6, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "'0888' + '010'" => :error, "'0xWTF' + '010'" => :error, "'0x12.3' + '010'" => :error, "'0x12.3' + '010'" => :error, '"-\n 2" + "2"' => :error, '"-\v 2" + "2"' => :error, '"-2\n" + "2"' => :error, '"-2\n " + "2"' => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end end end end # arithmetic context "When the evaluator evaluates assignment" do { "$a = 5" => 5, "$a = 5; $a" => 5, "$a = 5; $b = 6; $a" => 5, "$a = $b = 5; $a == $b" => true, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "[a,b,c] = [1,2,3]" => /attempt to assign to 'an Array Expression'/, "[a,b,c] = {b=>2,c=>3,a=>1}" => /attempt to assign to 'an Array Expression'/, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to error with #{result}" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(Puppet::ParseError, result) end end end context "When the evaluator evaluates conditionals" do { "if true {5}" => 5, "if false {5}" => nil, "if false {2} else {5}" => 5, "if false {2} elsif true {5}" => 5, "if false {2} elsif false {5}" => nil, "unless false {5}" => 5, "unless true {5}" => nil, "unless true {2} else {5}" => 5, "unless true {} else {5}" => 5, "$a = if true {5} $a" => 5, "$a = if false {5} $a" => nil, "$a = if false {2} else {5} $a" => 5, "$a = if false {2} elsif true {5} $a" => 5, "$a = if false {2} elsif false {5} $a" => nil, "$a = unless false {5} $a" => 5, "$a = unless true {5} $a" => nil, "$a = unless true {2} else {5} $a" => 5, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "case 1 { 1 : { yes } }" => 'yes', "case 2 { 1,2,3 : { yes} }" => 'yes', "case 2 { 1,3 : { no } 2: { yes} }" => 'yes', "case 2 { 1,3 : { no } 5: { no } default: { yes }}" => 'yes', "case 2 { 1,3 : { no } 5: { no } }" => nil, "case 'banana' { 1,3 : { no } /.*ana.*/: { yes } }" => 'yes', "case 'banana' { /.*(ana).*/: { $1 } }" => 'ana', "case [1] { Array : { yes } }" => 'yes', "case [1] { Array[String] : { no } Array[Integer]: { yes } }" => 'yes', "case 1 { Integer : { yes } Type[Integer] : { no } }" => 'yes', "case Integer { Integer : { no } Type[Integer] : { yes } }" => 'yes', # supports unfold "case ringo { *[paul, john, ringo, george] : { 'beatle' } }" => 'beatle', "case undef { undef : { 'yes' } }" => 'yes', "case undef { *undef : { 'no' } default :{ 'yes' }}" => 'yes', "case [green, 2, whatever] { [/ee/, Integer[0,10], default] : { 'yes' } default :{ 'no' }}" => 'yes', "case {a=>1, b=>2, whatever=>3, extra => ignored} { { a => Integer[0,5], b => Integer[0,5], whatever => default } : { 'yes' } default : { 'no' }}" => 'yes', }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "2 ? { 1 => no, 2 => yes}" => 'yes', "3 ? { 1 => no, 2 => no, default => yes }" => 'yes', "3 ? { 1 => no, default => yes, 3 => no }" => 'no', "3 ? { 1 => no, 3 => no, default => yes }" => 'no', "4 ? { 1 => no, default => yes, 3 => no }" => 'yes', "4 ? { 1 => no, 3 => no, default => yes }" => 'yes', "'banana' ? { /.*(ana).*/ => $1 }" => 'ana', "[2] ? { Array[String] => yes, Array => yes}" => 'yes', "ringo ? *[paul, john, ringo, george] => 'beatle'" => 'beatle', "undef ? undef => 'yes'" => 'yes', "undef ? {*undef => 'no', default => 'yes'}" => 'yes', "[green, 2, whatever] ? { [/ee/, Integer[0,10], default ] => 'yes', default => 'no'}" => 'yes', "{a=>1, b=>2, whatever=>3, extra => ignored} ? {{ a => Integer[0,5], b => Integer[0,5], whatever => default } => 'yes', default => 'no' }" => 'yes', }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end it 'fails if a selector does not match' do expect{parser.evaluate_string(scope, "2 ? 3 => 4")}.to raise_error(/No matching entry for selector parameter with value '2'/) end end context "When evaluator evaluated unfold" do { "*[1,2,3]" => [1,2,3], "*1" => [1], "*'a'" => ['a'] }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end it "should parse and evaluate the expression '*{a=>10, b=>20} to [['a',10],['b',20]]" do result = parser.evaluate_string(scope, '*{a=>10, b=>20}', __FILE__) expect(result).to include(['a', 10]) expect(result).to include(['b', 20]) end end context "When evaluator performs [] operations" do { "[1,2,3][0]" => 1, "[1,2,3][2]" => 3, "[1,2,3][3]" => nil, "[1,2,3][-1]" => 3, "[1,2,3][-2]" => 2, "[1,2,3][-4]" => nil, "[1,2,3,4][0,2]" => [1,2], "[1,2,3,4][1,3]" => [2,3,4], "[1,2,3,4][-2,2]" => [3,4], "[1,2,3,4][-3,2]" => [2,3], "[1,2,3,4][3,5]" => [4], "[1,2,3,4][5,2]" => [], "[1,2,3,4][0,-1]" => [1,2,3,4], "[1,2,3,4][0,-2]" => [1,2,3], "[1,2,3,4][0,-4]" => [1], "[1,2,3,4][0,-5]" => [], "[1,2,3,4][-5,2]" => [1], "[1,2,3,4][-5,-3]" => [1,2], "[1,2,3,4][-6,-3]" => [1,2], "[1,2,3,4][2,-3]" => [], "[1,*[2,3],4]" => [1,2,3,4], "[1,*[2,3],4][1]" => 2, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "{a=>1, b=>2, c=>3}[a]" => 1, "{a=>1, b=>2, c=>3}[c]" => 3, "{a=>1, b=>2, c=>3}[x]" => nil, "{a=>1, b=>2, c=>3}[c,b]" => [3,2], "{a=>1, b=>2, c=>3}[a,b,c]" => [1,2,3], "{a=>{b=>{c=>'it works'}}}[a][b][c]" => 'it works', "$a = {undef => 10} $a[free_lunch]" => nil, "$a = {undef => 10} $a[undef]" => 10, "$a = {undef => 10} $a[$a[free_lunch]]" => 10, "$a = {} $a[free_lunch] == undef" => true, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "'abc'[0]" => 'a', "'abc'[2]" => 'c', "'abc'[-1]" => 'c', "'abc'[-2]" => 'b', "'abc'[-3]" => 'a', "'abc'[-4]" => '', "'abc'[3]" => '', "abc[0]" => 'a', "abc[2]" => 'c', "abc[-1]" => 'c', "abc[-2]" => 'b', "abc[-3]" => 'a', "abc[-4]" => '', "abc[3]" => '', "'abcd'[0,2]" => 'ab', "'abcd'[1,3]" => 'bcd', "'abcd'[-2,2]" => 'cd', "'abcd'[-3,2]" => 'bc', "'abcd'[3,5]" => 'd', "'abcd'[5,2]" => '', "'abcd'[0,-1]" => 'abcd', "'abcd'[0,-2]" => 'abc', "'abcd'[0,-4]" => 'a', "'abcd'[0,-5]" => '', "'abcd'[-5,2]" => 'a', "'abcd'[-5,-3]" => 'ab', "'abcd'[-6,-3]" => 'ab', "'abcd'[2,-3]" => '', }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end # Type operations (full set tested by tests covering type calculator) { "Array[Integer]" => types.array_of(types.integer), "Array[Integer,1]" => types.constrain_size(types.array_of(types.integer),1, :default), "Array[Integer,1,2]" => types.constrain_size(types.array_of(types.integer),1, 2), "Array[Integer,Integer[1,2]]" => types.constrain_size(types.array_of(types.integer),1, 2), "Array[Integer,Integer[1]]" => types.constrain_size(types.array_of(types.integer),1, :default), "Hash[Integer,Integer]" => types.hash_of(types.integer, types.integer), "Hash[Integer,Integer,1]" => types.constrain_size(types.hash_of(types.integer, types.integer),1, :default), "Hash[Integer,Integer,1,2]" => types.constrain_size(types.hash_of(types.integer, types.integer),1, 2), "Hash[Integer,Integer,Integer[1,2]]" => types.constrain_size(types.hash_of(types.integer, types.integer),1, 2), "Hash[Integer,Integer,Integer[1]]" => types.constrain_size(types.hash_of(types.integer, types.integer),1, :default), "Resource[File]" => types.resource('File'), "Resource['File']" => types.resource(types.resource('File')), "File[foo]" => types.resource('file', 'foo'), "File[foo, bar]" => [types.resource('file', 'foo'), types.resource('file', 'bar')], "Pattern[a, /b/, Pattern[c], Regexp[d]]" => types.pattern('a', 'b', 'c', 'd'), "String[1,2]" => types.constrain_size(types.string,1, 2), "String[Integer[1,2]]" => types.constrain_size(types.string,1, 2), "String[Integer[1]]" => types.constrain_size(types.string,1, :default), }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end # LHS where [] not supported, and missing key(s) { "Array[]" => :error, "'abc'[]" => :error, "Resource[]" => :error, "File[]" => :error, "String[]" => :error, "1[]" => :error, "3.14[]" => :error, "/.*/[]" => :error, "$a=[1] $a[]" => :error, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(/Syntax error/) end end # Errors when wrong number/type of keys are used { "Array[0]" => 'Array-Type[] arguments must be types. Got Fixnum', "Hash[0]" => 'Hash-Type[] arguments must be types. Got Fixnum', "Hash[Integer, 0]" => 'Hash-Type[] arguments must be types. Got Fixnum', "Array[Integer,1,2,3]" => 'Array-Type[] accepts 1 to 3 arguments. Got 4', "Array[Integer,String]" => "A Type's size constraint arguments must be a single Integer type, or 1-2 integers (or default). Got a String-Type", "Hash[Integer,String, 1,2,3]" => 'Hash-Type[] accepts 2 to 4 arguments. Got 5', "'abc'[x]" => "The value 'x' cannot be converted to Numeric", "'abc'[1.0]" => "A String[] cannot use Float where Integer is expected", "'abc'[1,2,3]" => "String supports [] with one or two arguments. Got 3", "Resource[0]" => 'First argument to Resource[] must be a resource type or a String. Got Integer', "Resource[a, 0]" => 'Error creating type specialization of a Resource-Type, Cannot use Integer where a resource title String is expected', "File[0]" => 'Error creating type specialization of a File-Type, Cannot use Integer where a resource title String is expected', "String[a]" => "A Type's size constraint arguments must be a single Integer type, or 1-2 integers (or default). Got a String", "Pattern[0]" => 'Error creating type specialization of a Pattern-Type, Cannot use Integer where String or Regexp or Pattern-Type or Regexp-Type is expected', "Regexp[0]" => 'Error creating type specialization of a Regexp-Type, Cannot use Integer where String or Regexp is expected', "Regexp[a,b]" => 'A Regexp-Type[] accepts 1 argument. Got 2', "true[0]" => "Operator '[]' is not applicable to a Boolean", "1[0]" => "Operator '[]' is not applicable to an Integer", "3.14[0]" => "Operator '[]' is not applicable to a Float", "/.*/[0]" => "Operator '[]' is not applicable to a Regexp", "[1][a]" => "The value 'a' cannot be converted to Numeric", "[1][0.0]" => "An Array[] cannot use Float where Integer is expected", "[1]['0.0']" => "An Array[] cannot use Float where Integer is expected", "[1,2][1, 0.0]" => "An Array[] cannot use Float where Integer is expected", "[1,2][1.0, -1]" => "An Array[] cannot use Float where Integer is expected", "[1,2][1, -1.0]" => "An Array[] cannot use Float where Integer is expected", }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(Regexp.new(Regexp.quote(result))) end end context "on catalog types" do it "[n] gets resource parameter [n]" do source = "notify { 'hello': message=>'yo'} Notify[hello][message]" parser.evaluate_string(scope, source, __FILE__).should == 'yo' end it "[n] gets class parameter [n]" do source = "class wonka($produces='chocolate'){ } include wonka Class[wonka][produces]" # This is more complicated since it needs to run like 3.x and do an import_ast adapted_parser = Puppet::Parser::E4ParserAdapter.new adapted_parser.file = __FILE__ ast = adapted_parser.parse(source) Puppet.override({:global_scope => scope}, "test") do scope.known_resource_types.import_ast(ast, '') ast.code.safeevaluate(scope).should == 'chocolate' end end # Resource default and override expressions and resource parameter access with [] { # Properties "notify { id: message=>explicit} Notify[id][message]" => "explicit", "Notify { message=>by_default} notify {foo:} Notify[foo][message]" => "by_default", "notify {foo:} Notify[foo]{message =>by_override} Notify[foo][message]" => "by_override", # Parameters "notify { id: withpath=>explicit} Notify[id][withpath]" => "explicit", "Notify { withpath=>by_default } notify { foo: } Notify[foo][withpath]" => "by_default", "notify {foo:} Notify[foo]{withpath=>by_override} Notify[foo][withpath]" => "by_override", # Metaparameters "notify { foo: tag => evoe} Notify[foo][tag]" => "evoe", # Does not produce the defaults for tag parameter (title, type or names of scopes) "notify { foo: } Notify[foo][tag]" => nil, # But a default may be specified on the type "Notify { tag=>by_default } notify { foo: } Notify[foo][tag]" => "by_default", "Notify { tag=>by_default } notify { foo: } Notify[foo]{ tag=>by_override } Notify[foo][tag]" => "by_override", }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end # Virtual and realized resource default and overridden resource parameter access with [] { # Properties "@notify { id: message=>explicit } Notify[id][message]" => "explicit", "@notify { id: message=>explicit } realize Notify[id] Notify[id][message]" => "explicit", "Notify { message=>by_default } @notify { id: } Notify[id][message]" => "by_default", "Notify { message=>by_default } @notify { id: tag=>thisone } Notify <| tag == thisone |>; Notify[id][message]" => "by_default", "@notify { id: } Notify[id]{message=>by_override} Notify[id][message]" => "by_override", # Parameters "@notify { id: withpath=>explicit } Notify[id][withpath]" => "explicit", "Notify { withpath=>by_default } @notify { id: } Notify[id][withpath]" => "by_default", "@notify { id: } realize Notify[id] Notify[id]{withpath=>by_override} Notify[id][withpath]" => "by_override", # Metaparameters "@notify { id: tag=>explicit } Notify[id][tag]" => "explicit", }.each do |source, result| it "parses and evaluates virtual and realized resources in the expression '#{source}' to #{result}" do expect(parser.evaluate_string(scope, source, __FILE__)).to eq(result) end end # Exported resource attributes { "@@notify { id: message=>explicit } Notify[id][message]" => "explicit", "@@notify { id: message=>explicit, tag=>thisone } Notify <<| tag == thisone |>> Notify[id][message]" => "explicit", }.each do |source, result| it "parses and evaluates exported resources in the expression '#{source}' to #{result}" do expect(parser.evaluate_string(scope, source, __FILE__)).to eq(result) end end # Resource default and override expressions and resource parameter access error conditions { "notify { xid: message=>explicit} Notify[id][message]" => /Resource not found/, "notify { id: message=>explicit} Notify[id][mustard]" => /does not have a parameter called 'mustard'/, # NOTE: these meta-esque parameters are not recognized as such "notify { id: message=>explicit} Notify[id][title]" => /does not have a parameter called 'title'/, "notify { id: message=>explicit} Notify[id]['type']" => /does not have a parameter called 'type'/, "notify { id: message=>explicit } Notify[id]{message=>override}" => /'message' is already set on Notify\[id\]/ }.each do |source, result| it "should parse '#{source}' and raise error matching #{result}" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(result) end end context 'with errors' do { "Class['fail-whale']" => /Illegal name/, "Class[0]" => /An Integer cannot be used where a String is expected/, "Class[/.*/]" => /A Regexp cannot be used where a String is expected/, "Class[4.1415]" => /A Float cannot be used where a String is expected/, "Class[Integer]" => /An Integer-Type cannot be used where a String is expected/, "Class[File['tmp']]" => /A File\['tmp'\] Resource-Reference cannot be used where a String is expected/, }.each do | source, error_pattern| it "an error is flagged for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__)}.to raise_error(error_pattern) end end end end # end [] operations end context "When the evaluator performs boolean operations" do { "true and true" => true, "false and true" => false, "true and false" => false, "false and false" => false, "true or true" => true, "false or true" => true, "true or false" => true, "false or false" => false, "! true" => false, "!! true" => true, "!! false" => false, "! 'x'" => false, "! ''" => false, "! undef" => true, "! [a]" => false, "! []" => false, "! {a=>1}" => false, "! {}" => false, "true and false and '0xwtf' + 1" => false, "false or true or '0xwtf' + 1" => true, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do parser.evaluate_string(scope, source, __FILE__).should == result end end { "false || false || '0xwtf' + 1" => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end end context "When evaluator performs operations on literal undef" do it "computes non existing hash lookup as undef" do parser.evaluate_string(scope, "{a => 1}[b] == undef", __FILE__).should == true parser.evaluate_string(scope, "undef == {a => 1}[b]", __FILE__).should == true end end context "When evaluator performs calls" do let(:populate) do parser.evaluate_string(scope, "$a = 10 $b = [1,2,3]") end { 'sprintf( "x%iy", $a )' => "x10y", # unfolds 'sprintf( *["x%iy", $a] )' => "x10y", '"x%iy".sprintf( $a )' => "x10y", '$b.reduce |$memo,$x| { $memo + $x }' => 6, 'reduce($b) |$memo,$x| { $memo + $x }' => 6, }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do populate parser.evaluate_string(scope, source, __FILE__).should == result end end { '"value is ${a*2} yo"' => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end it "provides location information on error in unparenthesized call logic" do expect{parser.evaluate_string(scope, "include non_existing_class", __FILE__)}.to raise_error(Puppet::ParseError, /line 1\:1/) end it 'defaults can be given in a lambda and used only when arg is missing' do env_loader = @compiler.loaders.public_environment_loader fc = Puppet::Functions.create_function(:test) do dispatch :test do param 'Integer', 'count' required_block_param end def test(count) yield(*[].fill(10, 0, count)) end end the_func = fc.new({}, env_loader) env_loader.add_entry(:function, 'test', the_func, __FILE__) expect(parser.evaluate_string(scope, "test(1) |$x, $y=20| { $x + $y}")).to eql(30) expect(parser.evaluate_string(scope, "test(2) |$x, $y=20| { $x + $y}")).to eql(20) end it 'a given undef does not select the default value' do env_loader = @compiler.loaders.public_environment_loader fc = Puppet::Functions.create_function(:test) do dispatch :test do param 'Any', 'lambda_arg' required_block_param end def test(lambda_arg) yield(lambda_arg) end end the_func = fc.new({}, env_loader) env_loader.add_entry(:function, 'test', the_func, __FILE__) expect(parser.evaluate_string(scope, "test(undef) |$x=20| { $x == undef}")).to eql(true) end it 'a given undef is given as nil' do env_loader = @compiler.loaders.public_environment_loader fc = Puppet::Functions.create_function(:assert_no_undef) do dispatch :assert_no_undef do param 'Any', 'x' end def assert_no_undef(x) case x when Array return unless x.include?(:undef) when Hash return unless x.keys.include?(:undef) || x.values.include?(:undef) else return unless x == :undef end raise "contains :undef" end end the_func = fc.new({}, env_loader) env_loader.add_entry(:function, 'assert_no_undef', the_func, __FILE__) expect{parser.evaluate_string(scope, "assert_no_undef(undef)")}.to_not raise_error() expect{parser.evaluate_string(scope, "assert_no_undef([undef])")}.to_not raise_error() expect{parser.evaluate_string(scope, "assert_no_undef({undef => 1})")}.to_not raise_error() expect{parser.evaluate_string(scope, "assert_no_undef({1 => undef})")}.to_not raise_error() end context 'using the 3x function api' do it 'can call a 3x function' do Puppet::Parser::Functions.newfunction("bazinga", :type => :rvalue) { |args| args[0] } parser.evaluate_string(scope, "bazinga(42)", __FILE__).should == 42 end it 'maps :undef to empty string' do Puppet::Parser::Functions.newfunction("bazinga", :type => :rvalue) { |args| args[0] } parser.evaluate_string(scope, "$a = {} bazinga($a[nope])", __FILE__).should == '' parser.evaluate_string(scope, "bazinga(undef)", __FILE__).should == '' end it 'does not map :undef to empty string in arrays' do Puppet::Parser::Functions.newfunction("bazinga", :type => :rvalue) { |args| args[0][0] } parser.evaluate_string(scope, "$a = {} $b = [$a[nope]] bazinga($b)", __FILE__).should == :undef parser.evaluate_string(scope, "bazinga([undef])", __FILE__).should == :undef end it 'does not map :undef to empty string in hashes' do Puppet::Parser::Functions.newfunction("bazinga", :type => :rvalue) { |args| args[0]['a'] } parser.evaluate_string(scope, "$a = {} $b = {a => $a[nope]} bazinga($b)", __FILE__).should == :undef parser.evaluate_string(scope, "bazinga({a => undef})", __FILE__).should == :undef end end end context "When evaluator performs string interpolation" do let(:populate) do parser.evaluate_string(scope, "$a = 10 $b = [1,2,3]") end { '"value is $a yo"' => "value is 10 yo", '"value is \$a yo"' => "value is $a yo", '"value is ${a} yo"' => "value is 10 yo", '"value is \${a} yo"' => "value is ${a} yo", '"value is ${$a} yo"' => "value is 10 yo", '"value is ${$a*2} yo"' => "value is 20 yo", '"value is ${sprintf("x%iy",$a)} yo"' => "value is x10y yo", '"value is ${"x%iy".sprintf($a)} yo"' => "value is x10y yo", '"value is ${[1,2,3]} yo"' => "value is [1, 2, 3] yo", '"value is ${/.*/} yo"' => "value is /.*/ yo", '$x = undef "value is $x yo"' => "value is yo", '$x = default "value is $x yo"' => "value is default yo", '$x = Array[Integer] "value is $x yo"' => "value is Array[Integer] yo", '"value is ${Array[Integer]} yo"' => "value is Array[Integer] yo", }.each do |source, result| it "should parse and evaluate the expression '#{source}' to #{result}" do populate parser.evaluate_string(scope, source, __FILE__).should == result end end it "should parse and evaluate an interpolation of a hash" do source = '"value is ${{a=>1,b=>2}} yo"' # This test requires testing against two options because a hash to string # produces a result that is unordered hashstr = {'a' => 1, 'b' => 2}.to_s alt_results = ["value is {a => 1, b => 2} yo", "value is {b => 2, a => 1} yo" ] populate parse_result = parser.evaluate_string(scope, source, __FILE__) alt_results.include?(parse_result).should == true end it 'should accept a variable with leading underscore when used directly' do source = '$_x = 10; "$_x"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('10') end it 'should accept a variable with leading underscore when used as an expression' do source = '$_x = 10; "${_x}"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('10') end it 'should accept a numeric variable expressed as $n' do source = '$x = "abc123def" =~ /(abc)(123)(def)/; "${$2}"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('123') end it 'should accept a numeric variable expressed as just an integer' do source = '$x = "abc123def" =~ /(abc)(123)(def)/; "${2}"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('123') end it 'should accept a numeric variable expressed as $n in an access operation' do source = '$x = "abc123def" =~ /(abc)(123)(def)/; "${$0[4,3]}"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('23d') end it 'should accept a numeric variable expressed as just an integer in an access operation' do source = '$x = "abc123def" =~ /(abc)(123)(def)/; "${0[4,3]}"' expect(parser.evaluate_string(scope, source, __FILE__)).to eql('23d') end { '"value is ${a*2} yo"' => :error, }.each do |source, result| it "should parse and raise error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(Puppet::ParseError) end end end context "When evaluating variables" do context "that are non existing an error is raised for" do it "unqualified variable" do expect { parser.evaluate_string(scope, "$quantum_gravity", __FILE__) }.to raise_error(/Unknown variable/) end it "qualified variable" do expect { parser.evaluate_string(scope, "$quantum_gravity::graviton", __FILE__) }.to raise_error(/Unknown variable/) end end it "a lex error should be raised for '$foo::::bar'" do - expect { parser.evaluate_string(scope, "$foo::::bar") }.to raise_error(Puppet::LexError, /Illegal fully qualified name at line 1:7/) + expect { parser.evaluate_string(scope, "$foo::::bar") }.to raise_error(Puppet::ParseErrorWithIssue, /Illegal fully qualified name at line 1:7/) end { '$a = $0' => nil, '$a = $1' => nil, }.each do |source, value| it "it is ok to reference numeric unassigned variables '#{source}'" do parser.evaluate_string(scope, source, __FILE__).should == value end end { '$00 = 0' => /must be a decimal value/, '$0xf = 0' => /must be a decimal value/, '$0777 = 0' => /must be a decimal value/, '$123a = 0' => /must be a decimal value/, }.each do |source, error_pattern| it "should raise an error for '#{source}'" do expect { parser.evaluate_string(scope, source, __FILE__) }.to raise_error(error_pattern) end end context "an initial underscore in the last segment of a var name is allowed" do { '$_a = 1' => 1, '$__a = 1' => 1, }.each do |source, value| it "as in this example '#{source}'" do parser.evaluate_string(scope, source, __FILE__).should == value end end end end context "When evaluating relationships" do it 'should form a relation with File[a] -> File[b]' do source = "File[a] -> File[b]" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['File', 'a', '->', 'File', 'b']) end it 'should form a relation with resource -> resource' do source = "notify{a:} -> notify{b:}" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['Notify', 'a', '->', 'Notify', 'b']) end it 'should form a relation with [File[a], File[b]] -> [File[x], File[y]]' do source = "[File[a], File[b]] -> [File[x], File[y]]" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['File', 'a', '->', 'File', 'x']) scope.compiler.should have_relationship(['File', 'b', '->', 'File', 'x']) scope.compiler.should have_relationship(['File', 'a', '->', 'File', 'y']) scope.compiler.should have_relationship(['File', 'b', '->', 'File', 'y']) end it 'should tolerate (eliminate) duplicates in operands' do source = "[File[a], File[a]] -> File[x]" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['File', 'a', '->', 'File', 'x']) scope.compiler.relationships.size.should == 1 end it 'should form a relation with <-' do source = "File[a] <- File[b]" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['File', 'b', '->', 'File', 'a']) end it 'should form a relation with <-' do source = "File[a] <~ File[b]" parser.evaluate_string(scope, source, __FILE__) scope.compiler.should have_relationship(['File', 'b', '~>', 'File', 'a']) end end context "When evaluating heredoc" do it "evaluates plain heredoc" do src = "@(END)\nThis is\nheredoc text\nEND\n" parser.evaluate_string(scope, src).should == "This is\nheredoc text\n" end it "parses heredoc with margin" do src = [ "@(END)", " This is", " heredoc text", " | END", "" ].join("\n") parser.evaluate_string(scope, src).should == "This is\nheredoc text\n" end it "parses heredoc with margin and right newline trim" do src = [ "@(END)", " This is", " heredoc text", " |- END", "" ].join("\n") parser.evaluate_string(scope, src).should == "This is\nheredoc text" end it "parses escape specification" do src = <<-CODE @(END/t) Tex\\tt\\n |- END CODE parser.evaluate_string(scope, src).should == "Tex\tt\\n" end it "parses syntax checked specification" do src = <<-CODE @(END:json) ["foo", "bar"] |- END CODE parser.evaluate_string(scope, src).should == '["foo", "bar"]' end it "parses syntax checked specification with error and reports it" do src = <<-CODE @(END:json) ['foo', "bar"] |- END CODE expect { parser.evaluate_string(scope, src)}.to raise_error(/Cannot parse invalid JSON string/) end it "parses interpolated heredoc expression" do src = <<-CODE $name = 'Fjodor' @("END") Hello $name |- END CODE parser.evaluate_string(scope, src).should == "Hello Fjodor" end end context "Handles Deprecations and Discontinuations" do it 'of import statements' do source = "\nimport foo" # Error references position 5 at the opening '{' # Set file to nil to make it easier to match with line number (no file name in output) expect { parser.evaluate_string(scope, source) }.to raise_error(/'import' has been discontinued.*line 2:1/) end end context "Detailed Error messages are reported" do it 'for illegal type references' do source = '1+1 { "title": }' # Error references position 5 at the opening '{' # Set file to nil to make it easier to match with line number (no file name in output) expect { parser.evaluate_string(scope, source) }.to raise_error( /Illegal Resource Type expression, expected result to be a type name, or untitled Resource.*line 1:2/) end it 'for non r-value producing <| |>' do expect { parser.parse_string("$a = File <| |>", nil) }.to raise_error(/A Virtual Query does not produce a value at line 1:6/) end it 'for non r-value producing <<| |>>' do expect { parser.parse_string("$a = File <<| |>>", nil) }.to raise_error(/An Exported Query does not produce a value at line 1:6/) end it 'for non r-value producing define' do Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => "Invalid use of expression. A 'define' expression does not produce a value", :line => 1, :pos => 6)) Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => 'Classes, definitions, and nodes may only appear at toplevel or inside other classes', :line => 1, :pos => 6)) expect { parser.parse_string("$a = define foo { }", nil) }.to raise_error(/2 errors/) end it 'for non r-value producing class' do Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => 'Invalid use of expression. A Host Class Definition does not produce a value', :line => 1, :pos => 6)) Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => 'Classes, definitions, and nodes may only appear at toplevel or inside other classes', :line => 1, :pos => 6)) expect { parser.parse_string("$a = class foo { }", nil) }.to raise_error(/2 errors/) end it 'for unclosed quote with indication of start position of string' do source = <<-SOURCE.gsub(/^ {6}/,'') $a = "xx yyy SOURCE # first char after opening " reported as being in error. expect { parser.parse_string(source) }.to raise_error(/Unclosed quote after '"' followed by 'xx\\nyy\.\.\.' at line 1:7/) end it 'for multiple errors with a summary exception' do Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => 'Invalid use of expression. A Node Definition does not produce a value', :line => 1, :pos => 6)) Puppet::Util::Log.expects(:create).with(has_entries(:level => :err, :message => 'Classes, definitions, and nodes may only appear at toplevel or inside other classes', :line => 1, :pos => 6)) expect { parser.parse_string("$a = node x { }",nil) }.to raise_error(/2 errors/) end it 'for a bad hostname' do expect { parser.parse_string("node 'macbook+owned+by+name' { }", nil) }.to raise_error(/The hostname 'macbook\+owned\+by\+name' contains illegal characters.*at line 1:6/) end it 'for a hostname with interpolation' do source = <<-SOURCE.gsub(/^ {6}/,'') $name = 'fred' node "macbook-owned-by$name" { } SOURCE expect { parser.parse_string(source, nil) }.to raise_error(/An interpolated expression is not allowed in a hostname of a node at line 2:23/) end end context 'does not leak variables' do it 'local variables are gone when lambda ends' do source = <<-SOURCE [1,2,3].each |$x| { $y = $x} $a = $y SOURCE expect do parser.evaluate_string(scope, source) end.to raise_error(/Unknown variable: 'y'/) end it 'lambda parameters are gone when lambda ends' do source = <<-SOURCE [1,2,3].each |$x| { $y = $x} $a = $x SOURCE expect do parser.evaluate_string(scope, source) end.to raise_error(/Unknown variable: 'x'/) end it 'does not leak match variables' do source = <<-SOURCE if 'xyz' =~ /(x)(y)(z)/ { notice $2 } case 'abc' { /(a)(b)(c)/ : { $x = $2 } } "-$x-$2-" SOURCE expect(parser.evaluate_string(scope, source)).to eq('-b--') end end matcher :have_relationship do |expected| calc = Puppet::Pops::Types::TypeCalculator.new match do |compiler| op_name = {'->' => :relationship, '~>' => :subscription} compiler.relationships.any? do | relation | relation.source.type == expected[0] && relation.source.title == expected[1] && relation.type == op_name[expected[2]] && relation.target.type == expected[3] && relation.target.title == expected[4] end end failure_message_for_should do |actual| "Relationship #{expected[0]}[#{expected[1]}] #{expected[2]} #{expected[3]}[#{expected[4]}] but was unknown to compiler" end end end diff --git a/spec/unit/pops/parser/lexer2_spec.rb b/spec/unit/pops/parser/lexer2_spec.rb index 9c915a8f4..0250888da 100644 --- a/spec/unit/pops/parser/lexer2_spec.rb +++ b/spec/unit/pops/parser/lexer2_spec.rb @@ -1,492 +1,618 @@ require 'spec_helper' require 'matchers/match_tokens2' require 'puppet/pops' require 'puppet/pops/parser/lexer2' module EgrammarLexer2Spec def tokens_scanned_from(s) lexer = Puppet::Pops::Parser::Lexer2.new lexer.string = s tokens = lexer.fullscan[0..-2] end def epp_tokens_scanned_from(s) lexer = Puppet::Pops::Parser::Lexer2.new lexer.string = s tokens = lexer.fullscan_epp[0..-2] end end describe 'Lexer2' do include EgrammarLexer2Spec { :LISTSTART => '[', :RBRACK => ']', :LBRACE => '{', :RBRACE => '}', :LPAREN => '(', :RPAREN => ')', :EQUALS => '=', :ISEQUAL => '==', :GREATEREQUAL => '>=', :GREATERTHAN => '>', :LESSTHAN => '<', :LESSEQUAL => '<=', :NOTEQUAL => '!=', :NOT => '!', :COMMA => ',', :DOT => '.', :COLON => ':', :AT => '@', :LLCOLLECT => '<<|', :RRCOLLECT => '|>>', :LCOLLECT => '<|', :RCOLLECT => '|>', :SEMIC => ';', :QMARK => '?', :OTHER => '\\', :FARROW => '=>', :PARROW => '+>', :APPENDS => '+=', :DELETES => '-=', :PLUS => '+', :MINUS => '-', :DIV => '/', :TIMES => '*', :LSHIFT => '<<', :RSHIFT => '>>', :MATCH => '=~', :NOMATCH => '!~', :IN_EDGE => '->', :OUT_EDGE => '<-', :IN_EDGE_SUB => '~>', :OUT_EDGE_SUB => '<~', :PIPE => '|', }.each do |name, string| it "should lex a token named #{name.to_s}" do tokens_scanned_from(string).should match_tokens2(name) end end it "should lex [ in position after non whitespace as LBRACK" do tokens_scanned_from("a[").should match_tokens2(:NAME, :LBRACK) end { "case" => :CASE, "class" => :CLASS, "default" => :DEFAULT, "define" => :DEFINE, # "import" => :IMPORT, # done as a function in egrammar "if" => :IF, "elsif" => :ELSIF, "else" => :ELSE, "inherits" => :INHERITS, "node" => :NODE, "and" => :AND, "or" => :OR, "undef" => :UNDEF, "false" => :BOOLEAN, "true" => :BOOLEAN, "in" => :IN, "unless" => :UNLESS, }.each do |string, name| it "should lex a keyword from '#{string}'" do tokens_scanned_from(string).should match_tokens2(name) end end # TODO: Complete with all edge cases [ 'A', 'A::B', '::A', '::A::B',].each do |string| it "should lex a CLASSREF on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:CLASSREF, string]) end end # TODO: Complete with all edge cases [ 'a', 'a::b', '::a', '::a::b',].each do |string| it "should lex a NAME on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NAME, string]) end end [ 'a-b', 'a--b', 'a-b-c', '_x'].each do |string| it "should lex a BARE WORD STRING on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:WORD, string]) end end [ '_x::y', 'x::_y'].each do |string| it "should consider the bare word '#{string}' to be a WORD" do tokens_scanned_from(string).should match_tokens2(:WORD) end end { '-a' => [:MINUS, :NAME], '--a' => [:MINUS, :MINUS, :NAME], 'a-' => [:NAME, :MINUS], 'a- b' => [:NAME, :MINUS, :NAME], 'a--' => [:NAME, :MINUS, :MINUS], 'a-$3' => [:NAME, :MINUS, :VARIABLE], }.each do |source, expected| it "should lex leading and trailing hyphens from #{source}" do tokens_scanned_from(source).should match_tokens2(*expected) end end { 'false'=>false, 'true'=>true}.each do |string, value| it "should lex a BOOLEAN on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:BOOLEAN, value]) end end [ '0', '1', '2982383139'].each do |string| it "should lex a decimal integer NUMBER on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NUMBER, string]) end end { ' 1' => '1', '1 ' => '1', ' 1 ' => '1'}.each do |string, value| it "should lex a NUMBER with surrounding space '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NUMBER, value]) end end [ '0.0', '0.1', '0.2982383139', '29823.235', '10e23', '10e-23', '1.234e23'].each do |string| it "should lex a decimal floating point NUMBER on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NUMBER, string]) end end [ '00', '01', '0123', '0777'].each do |string| it "should lex an octal integer NUMBER on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NUMBER, string]) end end [ '0x0', '0x1', '0xa', '0xA', '0xabcdef', '0xABCDEF'].each do |string| it "should lex an hex integer NUMBER on the form '#{string}'" do tokens_scanned_from(string).should match_tokens2([:NUMBER, string]) end end { "''" => '', "'a'" => 'a', "'a\\'b'" =>"a'b", "'a\\rb'" =>"a\\rb", "'a\\nb'" =>"a\\nb", "'a\\tb'" =>"a\\tb", "'a\\sb'" =>"a\\sb", "'a\\$b'" =>"a\\$b", "'a\\\"b'" =>"a\\\"b", "'a\\\\b'" =>"a\\b", "'a\\\\'" =>"a\\", }.each do |source, expected| it "should lex a single quoted STRING on the form #{source}" do tokens_scanned_from(source).should match_tokens2([:STRING, expected]) end end { "''" => [2, ""], "'a'" => [3, "a"], "'a\\'b'" => [6, "a'b"], }.each do |source, expected| it "should lex a single quoted STRING on the form #{source} as having length #{expected[0]}" do length, value = expected tokens_scanned_from(source).should match_tokens2([:STRING, value, {:line => 1, :pos=>1, :length=> length}]) end end { '""' => '', '"a"' => 'a', '"a\'b"' => "a'b", }.each do |source, expected| it "should lex a double quoted STRING on the form #{source}" do tokens_scanned_from(source).should match_tokens2([:STRING, expected]) end end { '"a$x b"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>2 }], [:VARIABLE, 'x', {:line => 1, :pos=>3, :length=>2 }], [:DQPOST, ' b', {:line => 1, :pos=>5, :length=>3 }]], '"a$x.b"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>2 }], [:VARIABLE, 'x', {:line => 1, :pos=>3, :length=>2 }], [:DQPOST, '.b', {:line => 1, :pos=>5, :length=>3 }]], '"$x.b"' => [[:DQPRE, '', {:line => 1, :pos=>1, :length=>1 }], [:VARIABLE, 'x', {:line => 1, :pos=>2, :length=>2 }], [:DQPOST, '.b', {:line => 1, :pos=>4, :length=>3 }]], '"a$x"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>2 }], [:VARIABLE, 'x', {:line => 1, :pos=>3, :length=>2 }], [:DQPOST, '', {:line => 1, :pos=>5, :length=>1 }]], '"a${x}"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>4 }], [:VARIABLE, 'x', {:line => 1, :pos=>5, :length=>1 }], [:DQPOST, '', {:line => 1, :pos=>7, :length=>1 }]], '"a${_x}"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>4 }], [:VARIABLE, '_x', {:line => 1, :pos=>5, :length=>2 }], [:DQPOST, '', {:line => 1, :pos=>8, :length=>1 }]], '"a${y::_x}"' => [[:DQPRE, 'a', {:line => 1, :pos=>1, :length=>4 }], [:VARIABLE, 'y::_x', {:line => 1, :pos=>5, :length=>5 }], [:DQPOST, '', {:line => 1, :pos=>11, :length=>1 }]], }.each do |source, expected| it "should lex an interpolated variable 'x' from #{source}" do tokens_scanned_from(source).should match_tokens2(*expected) end end { '"$"' => '$', '"a$"' => 'a$', '"a$%b"' => "a$%b", '"a$$"' => "a$$", '"a$$%"' => "a$$%", }.each do |source, expected| it "should lex interpolation including false starts #{source}" do tokens_scanned_from(source).should match_tokens2([:STRING, expected]) end end it "differentiates between foo[x] and foo [x] (whitespace)" do tokens_scanned_from("$a[1]").should match_tokens2(:VARIABLE, :LBRACK, :NUMBER, :RBRACK) tokens_scanned_from("$a [1]").should match_tokens2(:VARIABLE, :LISTSTART, :NUMBER, :RBRACK) tokens_scanned_from("a[1]").should match_tokens2(:NAME, :LBRACK, :NUMBER, :RBRACK) tokens_scanned_from("a [1]").should match_tokens2(:NAME, :LISTSTART, :NUMBER, :RBRACK) tokens_scanned_from(" if \n\r\t\nif if ").should match_tokens2(:IF, :IF, :IF) end it "skips whitepsace" do tokens_scanned_from(" if if if ").should match_tokens2(:IF, :IF, :IF) tokens_scanned_from(" if \n\r\t\nif if ").should match_tokens2(:IF, :IF, :IF) end it "skips single line comments" do tokens_scanned_from("if # comment\nif").should match_tokens2(:IF, :IF) end ["if /* comment */\nif", "if /* comment\n */\nif", "if /*\n comment\n */\nif", ].each do |source| it "skips multi line comments" do tokens_scanned_from(source).should match_tokens2(:IF, :IF) end end { "=~" => [:MATCH, "=~ /./"], "!~" => [:NOMATCH, "!~ /./"], "," => [:COMMA, ", /./"], "(" => [:LPAREN, "( /./"], "[" => [:LISTSTART, "[ /./"], "[" => [[:NAME, :LBRACK], "a[ /./"], "[" => [[:NAME, :LISTSTART], "a [ /./"], "{" => [:LBRACE, "{ /./"], "+" => [:PLUS, "+ /./"], "-" => [:MINUS, "- /./"], "*" => [:TIMES, "* /./"], ";" => [:SEMIC, "; /./"], }.each do |token, entry| it "should lex regexp after '#{token}'" do expected = [entry[0], :REGEX].flatten tokens_scanned_from(entry[1]).should match_tokens2(*expected) end end it "should lex a simple expression" do tokens_scanned_from('1 + 1').should match_tokens2([:NUMBER, '1'], :PLUS, [:NUMBER, '1']) end { "1" => ["1 /./", [:NUMBER, :DIV, :DOT, :DIV]], "'a'" => ["'a' /./", [:STRING, :DIV, :DOT, :DIV]], "true" => ["true /./", [:BOOLEAN, :DIV, :DOT, :DIV]], "false" => ["false /./", [:BOOLEAN, :DIV, :DOT, :DIV]], "/./" => ["/./ /./", [:REGEX, :DIV, :DOT, :DIV]], "a" => ["a /./", [:NAME, :DIV, :DOT, :DIV]], "A" => ["A /./", [:CLASSREF, :DIV, :DOT, :DIV]], ")" => [") /./", [:RPAREN, :DIV, :DOT, :DIV]], "]" => ["] /./", [:RBRACK, :DIV, :DOT, :DIV]], "|>" => ["|> /./", [:RCOLLECT, :DIV, :DOT, :DIV]], "|>>" => ["|>> /./", [:RRCOLLECT, :DIV, :DOT, :DIV]], "$x" => ["$x /1/", [:VARIABLE, :DIV, :NUMBER, :DIV]], "a-b" => ["a-b /1/", [:WORD, :DIV, :NUMBER, :DIV]], '"a$a"' => ['"a$a" /./', [:DQPRE, :VARIABLE, :DQPOST, :DIV, :DOT, :DIV]], }.each do |token, entry| it "should not lex regexp after '#{token}'" do tokens_scanned_from(entry[ 0 ]).should match_tokens2(*entry[ 1 ]) end end it 'should lex assignment' do tokens_scanned_from("$a = 10").should match_tokens2([:VARIABLE, "a"], :EQUALS, [:NUMBER, '10']) end # TODO: Tricky, and heredoc not supported yet # it "should not lex regexp after heredoc" do # tokens_scanned_from("1 / /./").should match_tokens2(:NUMBER, :DIV, :REGEX) # end it "should lex regexp at beginning of input" do tokens_scanned_from(" /./").should match_tokens2(:REGEX) end it "should lex regexp right of div" do tokens_scanned_from("1 / /./").should match_tokens2(:NUMBER, :DIV, :REGEX) end context 'when lexer lexes heredoc' do it 'lexes tag, syntax and escapes, margin and right trim' do code = <<-CODE @(END:syntax/t) Tex\\tt\\n |- END CODE tokens_scanned_from(code).should match_tokens2([:HEREDOC, 'syntax'], :SUBLOCATE, [:STRING, "Tex\tt\\n"]) end it 'lexes "tag", syntax and escapes, margin, right trim and interpolation' do code = <<-CODE @("END":syntax/t) Tex\\tt\\n$var After |- END CODE tokens_scanned_from(code).should match_tokens2( [:HEREDOC, 'syntax'], :SUBLOCATE, [:DQPRE, "Tex\tt\\n"], [:VARIABLE, "var"], [:DQPOST, " After"] ) end + + context 'with bad syntax' do + def expect_issue(code, issue) + expect { tokens_scanned_from(code) }.to raise_error(Puppet::ParseErrorWithIssue) { |e| + expect(e.issue_code).to be(issue.issue_code) + } + end + + it 'detects and reports HEREDOC_UNCLOSED_PARENTHESIS' do + code = <<-CODE + @(END:syntax/t + Text + |- END + CODE + expect_issue(code, Puppet::Pops::Issues::HEREDOC_UNCLOSED_PARENTHESIS) + end + + it 'detects and reports HEREDOC_WITHOUT_END_TAGGED_LINE' do + code = <<-CODE + @(END:syntax/t) + Text + CODE + expect_issue(code, Puppet::Pops::Issues::HEREDOC_WITHOUT_END_TAGGED_LINE) + end + + it 'detects and reports HEREDOC_INVALID_ESCAPE' do + code = <<-CODE + @(END:syntax/x) + Text + |- END + CODE + expect_issue(code, Puppet::Pops::Issues::HEREDOC_INVALID_ESCAPE) + end + + it 'detects and reports HEREDOC_INVALID_SYNTAX' do + code = <<-CODE + @(END:syntax/t/p) + Text + |- END + CODE + expect_issue(code, Puppet::Pops::Issues::HEREDOC_INVALID_SYNTAX) + end + + it 'detects and reports HEREDOC_WITHOUT_TEXT' do + code = '@(END:syntax/t)' + expect_issue(code, Puppet::Pops::Issues::HEREDOC_WITHOUT_TEXT) + end + + it 'detects and reports HEREDOC_MULTIPLE_AT_ESCAPES' do + code = <<-CODE + @(END:syntax/tst) + Tex\\tt\\n + |- END + CODE + expect_issue(code, Puppet::Pops::Issues::HEREDOC_MULTIPLE_AT_ESCAPES) + end + end end context 'when dealing with multi byte characters' do it 'should support unicode characters' do code = <<-CODE "x\\u2713y" CODE if Puppet::Pops::Parser::Locator::RUBYVER < Puppet::Pops::Parser::Locator::RUBY_1_9_3 # Ruby 1.8.7 reports the multibyte char as several octal characters tokens_scanned_from(code).should match_tokens2([:STRING, "x\342\234\223y"]) else # >= Ruby 1.9.3 reports \u tokens_scanned_from(code).should match_tokens2([:STRING, "x\u2713y"]) end end it 'should not select LISTSTART token when preceded by multibyte chars' do # This test is sensitive to the number of multibyte characters and position of the expressions # within the string - it is designed to fail if the position is calculated on the byte offset of the '[' # instead of the char offset. # code = "$a = '\u00f6\u00fc\u00fc\u00fc\u00fc\u00e4\u00e4\u00f6\u00e4'\nnotify {'x': message => B['dkda'] }\n" tokens_scanned_from(code).should match_tokens2( :VARIABLE, :EQUALS, :STRING, [:NAME, 'notify'], :LBRACE, [:STRING, 'x'], :COLON, :NAME, :FARROW, :CLASSREF, :LBRACK, :STRING, :RBRACK, :RBRACE) end end context 'when lexing epp' do it 'epp can contain just text' do code = <<-CODE This is just text CODE epp_tokens_scanned_from(code).should match_tokens2(:EPP_START, [:RENDER_STRING, " This is just text\n"]) end it 'epp can contain text with interpolated rendered expressions' do code = <<-CODE This is <%= $x %> just text CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:RENDER_EXPR, nil], [:VARIABLE, "x"], [:EPP_END, "%>"], [:RENDER_STRING, " just text\n"] ) end it 'epp can contain text with trimmed interpolated rendered expressions' do code = <<-CODE This is <%= $x -%> just text CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:RENDER_EXPR, nil], [:VARIABLE, "x"], [:EPP_END_TRIM, "-%>"], [:RENDER_STRING, "just text\n"] ) end it 'epp can contain text with expressions that are not rendered' do code = <<-CODE This is <% $x=10 %> just text CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:VARIABLE, "x"], :EQUALS, [:NUMBER, "10"], [:RENDER_STRING, " just text\n"] ) end it 'epp can skip leading space in tail text' do code = <<-CODE This is <% $x=10 -%> just text CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:VARIABLE, "x"], :EQUALS, [:NUMBER, "10"], [:RENDER_STRING, "just text\n"] ) end it 'epp can skip comments' do code = <<-CODE This is <% $x=10 -%> <%# This is an epp comment -%> just text CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:VARIABLE, "x"], :EQUALS, [:NUMBER, "10"], [:RENDER_STRING, "just text\n"] ) end it 'epp can escape epp tags' do code = <<-CODE This is <% $x=10 -%> <%% this is escaped epp %%> CODE epp_tokens_scanned_from(code).should match_tokens2( :EPP_START, [:RENDER_STRING, " This is "], [:VARIABLE, "x"], :EQUALS, [:NUMBER, "10"], [:RENDER_STRING, "<% this is escaped epp %>\n"] ) end + + context 'with bad epp syntax' do + def expect_issue(code, issue) + expect { epp_tokens_scanned_from(code) }.to raise_error(Puppet::ParseErrorWithIssue) { |e| + expect(e.issue_code).to be(issue.issue_code) + } + end + + it 'detects and reports EPP_UNBALANCED_TAG' do + expect_issue('<% asf', Puppet::Pops::Issues::EPP_UNBALANCED_TAG) + end + + it 'detects and reports EPP_UNBALANCED_COMMENT' do + expect_issue('<%# asf', Puppet::Pops::Issues::EPP_UNBALANCED_COMMENT) + end + + it 'detects and reports EPP_UNBALANCED_EXPRESSION' do + expect_issue('asf <%', Puppet::Pops::Issues::EPP_UNBALANCED_EXPRESSION) + end + end end -end + context 'when parsing bad code' do + def expect_issue(code, issue) + expect { tokens_scanned_from(code) }.to raise_error(Puppet::ParseErrorWithIssue) do |e| + expect(e.issue_code).to be(issue.issue_code) + end + end + + it 'detects and reports issue ILLEGAL_CLASS_REFERENCE' do + expect_issue('A::3', Puppet::Pops::Issues::ILLEGAL_CLASS_REFERENCE) + end + + it 'detects and reports issue ILLEGAL_FULLY_QUALIFIED_CLASS_REFERENCE' do + expect_issue('::A::3', Puppet::Pops::Issues::ILLEGAL_FULLY_QUALIFIED_CLASS_REFERENCE) + end + + it 'detects and reports issue ILLEGAL_FULLY_QUALIFIED_NAME' do + expect_issue('::a::3', Puppet::Pops::Issues::ILLEGAL_FULLY_QUALIFIED_NAME) + end + + it 'detects and reports issue ILLEGAL_NAME_OR_BARE_WORD' do + expect_issue('a::3', Puppet::Pops::Issues::ILLEGAL_NAME_OR_BARE_WORD) + end + + it 'detects and reports issue ILLEGAL_NUMBER' do + expect_issue('3g', Puppet::Pops::Issues::ILLEGAL_NUMBER) + end + + it 'detects and reports issue INVALID_HEX_NUMBER' do + expect_issue('0x3g', Puppet::Pops::Issues::INVALID_HEX_NUMBER) + end + + it 'detects and reports issue INVALID_OCTAL_NUMBER' do + expect_issue('038', Puppet::Pops::Issues::INVALID_OCTAL_NUMBER) + end + + it 'detects and reports issue INVALID_DECIMAL_NUMBER' do + expect_issue('4.3g', Puppet::Pops::Issues::INVALID_DECIMAL_NUMBER) + end + + it 'detects and reports issue NO_INPUT_TO_LEXER' do + expect { Puppet::Pops::Parser::Lexer2.new.fullscan }.to raise_error(Puppet::ParseErrorWithIssue) { |e| + expect(e.issue_code).to be(Puppet::Pops::Issues::NO_INPUT_TO_LEXER.issue_code) + } + end + + it 'detects and reports issue UNCLOSED_QUOTE' do + expect_issue('"asd', Puppet::Pops::Issues::UNCLOSED_QUOTE) + end + end +end diff --git a/spec/unit/util/log_spec.rb b/spec/unit/util/log_spec.rb index bc6ecfcf0..6fd1db3a2 100755 --- a/spec/unit/util/log_spec.rb +++ b/spec/unit/util/log_spec.rb @@ -1,378 +1,491 @@ #! /usr/bin/env ruby require 'spec_helper' require 'puppet/util/log' describe Puppet::Util::Log do include PuppetSpec::Files def log_notice(message) Puppet::Util::Log.new(:level => :notice, :message => message) end it "should write a given message to the specified destination" do arraydest = [] Puppet::Util::Log.newdestination(Puppet::Test::LogCollector.new(arraydest)) Puppet::Util::Log.new(:level => :notice, :message => "foo") message = arraydest.last.message message.should == "foo" end describe ".setup_default" do it "should default to :syslog" do Puppet.features.stubs(:syslog?).returns(true) Puppet::Util::Log.expects(:newdestination).with(:syslog) Puppet::Util::Log.setup_default end it "should fall back to :eventlog" do Puppet.features.stubs(:syslog?).returns(false) Puppet.features.stubs(:eventlog?).returns(true) Puppet::Util::Log.expects(:newdestination).with(:eventlog) Puppet::Util::Log.setup_default end it "should fall back to :file" do Puppet.features.stubs(:syslog?).returns(false) Puppet.features.stubs(:eventlog?).returns(false) Puppet::Util::Log.expects(:newdestination).with(Puppet[:puppetdlog]) Puppet::Util::Log.setup_default end end describe "#with_destination" do it "does nothing when nested" do logs = [] destination = Puppet::Test::LogCollector.new(logs) Puppet::Util::Log.with_destination(destination) do Puppet::Util::Log.with_destination(destination) do log_notice("Inner block") end log_notice("Outer block") end log_notice("Outside") expect(logs.collect(&:message)).to include("Inner block", "Outer block") expect(logs.collect(&:message)).not_to include("Outside") end it "logs when called a second time" do logs = [] destination = Puppet::Test::LogCollector.new(logs) Puppet::Util::Log.with_destination(destination) do log_notice("First block") end log_notice("Between blocks") Puppet::Util::Log.with_destination(destination) do log_notice("Second block") end expect(logs.collect(&:message)).to include("First block", "Second block") expect(logs.collect(&:message)).not_to include("Between blocks") end it "doesn't close the destination if already set manually" do logs = [] destination = Puppet::Test::LogCollector.new(logs) Puppet::Util::Log.newdestination(destination) Puppet::Util::Log.with_destination(destination) do log_notice "Inner block" end log_notice "Outer block" Puppet::Util::Log.close(destination) expect(logs.collect(&:message)).to include("Inner block", "Outer block") end + + it 'includes backtrace for RuntimeError in log message when trace is enabled' do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise RuntimeError, 'Oops' + rescue RuntimeError => e + Puppet.log_exception(e, :default, :trace => true) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to match('/log_spec.rb') + expect(log.backtrace).to be_nil + end + + it 'excludes backtrace for RuntimeError in log message when trace is disabled' do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise RuntimeError, 'Oops' + rescue RuntimeError => e + Puppet.log_exception(e) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to_not match('/log_spec.rb') + expect(log.backtrace).to be_nil + end + + it "backtrace is Array in 'backtrace' and excluded from 'message' when logging ParseErrorWithIssue with trace enabled" do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise Puppet::ParseErrorWithIssue.new('Oops', '/tmp/test.pp', 30, 15, nil, :SYNTAX_ERROR) + rescue RuntimeError => e + Puppet.log_exception(e, :default, :trace => true) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to_not match('/log_spec.rb') + expect(log.backtrace).to be_a(Array) + end + + it "backtrace is excluded when logging ParseErrorWithIssue with trace disabled" do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise Puppet::ParseErrorWithIssue.new('Oops', '/tmp/test.pp', 30, 15, nil, :SYNTAX_ERROR) + rescue RuntimeError => e + Puppet.log_exception(e) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to_not match('/log_spec.rb') + expect(log.backtrace).to be_nil + end + + it 'includes position details for ParseError in log message' do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise Puppet::ParseError.new('Oops', '/tmp/test.pp', 30, 15) + rescue RuntimeError => e + Puppet.log_exception(e) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to match(/ at \/tmp\/test\.pp:30:15/) + expect(log.message).to be(log.to_s) + end + + it 'excludes position details for ParseErrorWithIssue from log message' do + logs = [] + destination = Puppet::Test::LogCollector.new(logs) + + Puppet::Util::Log.newdestination(destination) + Puppet::Util::Log.with_destination(destination) do + begin + raise Puppet::ParseErrorWithIssue.new('Oops', '/tmp/test.pp', 30, 15, nil, :SYNTAX_ERROR) + rescue RuntimeError => e + Puppet.log_exception(e) + end + end + expect(logs.size).to eq(1) + log = logs[0] + expect(log.message).to_not match(/ at \/tmp\/test\.pp:30:15/) + expect(log.to_s).to match(/ at \/tmp\/test\.pp:30:15/) + expect(log.issue_code).to eq(:SYNTAX_ERROR) + expect(log.file).to eq('/tmp/test.pp') + expect(log.line).to eq(30) + expect(log.pos).to eq(15) + end end + describe Puppet::Util::Log::DestConsole do before do @console = Puppet::Util::Log::DestConsole.new @console.stubs(:console_has_color?).returns(true) end it "should colorize if Puppet[:color] is :ansi" do Puppet[:color] = :ansi @console.colorize(:alert, "abc").should == "\e[0;31mabc\e[0m" end it "should colorize if Puppet[:color] is 'yes'" do Puppet[:color] = "yes" @console.colorize(:alert, "abc").should == "\e[0;31mabc\e[0m" end it "should htmlize if Puppet[:color] is :html" do Puppet[:color] = :html @console.colorize(:alert, "abc").should == "abc" end it "should do nothing if Puppet[:color] is false" do Puppet[:color] = false @console.colorize(:alert, "abc").should == "abc" end it "should do nothing if Puppet[:color] is invalid" do Puppet[:color] = "invalid option" @console.colorize(:alert, "abc").should == "abc" end end describe Puppet::Util::Log::DestSyslog do before do @syslog = Puppet::Util::Log::DestSyslog.new end end describe Puppet::Util::Log::DestEventlog, :if => Puppet.features.eventlog? do before :each do Win32::EventLog.stubs(:open).returns(mock 'mylog') Win32::EventLog.stubs(:report_event) Win32::EventLog.stubs(:close) Puppet.features.stubs(:eventlog?).returns(true) end it "should restrict its suitability" do Puppet.features.expects(:eventlog?).returns(false) Puppet::Util::Log::DestEventlog.suitable?('whatever').should == false end it "should open the 'Application' event log" do Win32::EventLog.expects(:open).with('Application') Puppet::Util::Log.newdestination(:eventlog) end it "should close the event log" do log = mock('myeventlog') log.expects(:close) Win32::EventLog.expects(:open).returns(log) Puppet::Util::Log.newdestination(:eventlog) Puppet::Util::Log.close(:eventlog) end it "should handle each puppet log level" do log = Puppet::Util::Log::DestEventlog.new Puppet::Util::Log.eachlevel do |level| log.to_native(level).should be_is_a(Array) end end end describe "instances" do before do Puppet::Util::Log.stubs(:newmessage) end [:level, :message, :time, :remote].each do |attr| it "should have a #{attr} attribute" do log = Puppet::Util::Log.new :level => :notice, :message => "A test message" log.should respond_to(attr) log.should respond_to(attr.to_s + "=") end end it "should fail if created without a level" do lambda { Puppet::Util::Log.new(:message => "A test message") }.should raise_error(ArgumentError) end it "should fail if created without a message" do lambda { Puppet::Util::Log.new(:level => :notice) }.should raise_error(ArgumentError) end it "should make available the level passed in at initialization" do Puppet::Util::Log.new(:level => :notice, :message => "A test message").level.should == :notice end it "should make available the message passed in at initialization" do Puppet::Util::Log.new(:level => :notice, :message => "A test message").message.should == "A test message" end # LAK:NOTE I don't know why this behavior is here, I'm just testing what's in the code, # at least at first. it "should always convert messages to strings" do Puppet::Util::Log.new(:level => :notice, :message => :foo).message.should == "foo" end it "should flush the log queue when the first destination is specified" do Puppet::Util::Log.close_all Puppet::Util::Log.expects(:flushqueue) Puppet::Util::Log.newdestination(:console) end it "should convert the level to a symbol if it's passed in as a string" do Puppet::Util::Log.new(:level => "notice", :message => :foo).level.should == :notice end it "should fail if the level is not a symbol or string" do lambda { Puppet::Util::Log.new(:level => 50, :message => :foo) }.should raise_error(ArgumentError) end it "should fail if the provided level is not valid" do Puppet::Util::Log.expects(:validlevel?).with(:notice).returns false lambda { Puppet::Util::Log.new(:level => :notice, :message => :foo) }.should raise_error(ArgumentError) end it "should set its time to the initialization time" do time = mock 'time' Time.expects(:now).returns time Puppet::Util::Log.new(:level => "notice", :message => :foo).time.should equal(time) end it "should make available any passed-in tags" do log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :tags => %w{foo bar}) log.tags.should be_include("foo") log.tags.should be_include("bar") end it "should use a passed-in source" do Puppet::Util::Log.any_instance.expects(:source=).with "foo" Puppet::Util::Log.new(:level => "notice", :message => :foo, :source => "foo") end [:file, :line].each do |attr| it "should use #{attr} if provided" do Puppet::Util::Log.any_instance.expects(attr.to_s + "=").with "foo" Puppet::Util::Log.new(:level => "notice", :message => :foo, attr => "foo") end end it "should default to 'Puppet' as its source" do Puppet::Util::Log.new(:level => "notice", :message => :foo).source.should == "Puppet" end it "should register itself with Log" do Puppet::Util::Log.expects(:newmessage) Puppet::Util::Log.new(:level => "notice", :message => :foo) end it "should update Log autoflush when Puppet[:autoflush] is set" do Puppet::Util::Log.expects(:autoflush=).once.with(true) Puppet[:autoflush] = true end it "should have a method for determining if a tag is present" do Puppet::Util::Log.new(:level => "notice", :message => :foo).should respond_to(:tagged?) end it "should match a tag if any of the tags are equivalent to the passed tag as a string" do Puppet::Util::Log.new(:level => "notice", :message => :foo, :tags => %w{one two}).should be_tagged(:one) end it "should tag itself with its log level" do Puppet::Util::Log.new(:level => "notice", :message => :foo).should be_tagged(:notice) end it "should return its message when converted to a string" do Puppet::Util::Log.new(:level => "notice", :message => :foo).to_s.should == "foo" end it "should include its time, source, level, and message when prepared for reporting" do log = Puppet::Util::Log.new(:level => "notice", :message => :foo) report = log.to_report report.should be_include("notice") report.should be_include("foo") report.should be_include(log.source) report.should be_include(log.time.to_s) end it "should not create unsuitable log destinations" do Puppet.features.stubs(:syslog?).returns(false) Puppet::Util::Log::DestSyslog.expects(:suitable?) Puppet::Util::Log::DestSyslog.expects(:new).never Puppet::Util::Log.newdestination(:syslog) end describe "when setting the source as a RAL object" do let(:path) { File.expand_path('/foo/bar') } it "should tag itself with any tags the source has" do source = Puppet::Type.type(:file).new :path => path log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :source => source) source.tags.each do |tag| log.tags.should be_include(tag) end end it "should set the source to 'path', when available" do source = Puppet::Type.type(:file).new :path => path source.tags = ["tag", "tag2"] log = Puppet::Util::Log.new(:level => "notice", :message => :foo) log.expects(:tag).with("file") log.expects(:tag).with("tag") log.expects(:tag).with("tag2") log.source = source log.source.should == "/File[#{path}]" end it "should copy over any file and line information" do source = Puppet::Type.type(:file).new :path => path source.file = "/my/file" source.line = 50 log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :source => source) log.line.should == 50 log.file.should == "/my/file" end end describe "when setting the source as a non-RAL object" do it "should not try to copy over file, version, line, or tag information" do source = mock source.expects(:file).never log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :source => source) end end end describe "to_yaml" do it "should not include the @version attribute" do log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :version => 100) log.to_yaml_properties.should_not include('@version') end it "should include attributes @level, @message, @source, @tags, and @time" do log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :version => 100) log.to_yaml_properties.should =~ [:@level, :@message, :@source, :@tags, :@time] end it "should include attributes @file and @line if specified" do log = Puppet::Util::Log.new(:level => "notice", :message => :foo, :file => "foo", :line => 35) log.to_yaml_properties.should include(:@file) log.to_yaml_properties.should include(:@line) end end it "should round trip through pson" do log = Puppet::Util::Log.new(:level => 'notice', :message => 'hooray', :file => 'thefile', :line => 1729, :source => 'specs', :tags => ['a', 'b', 'c']) tripped = Puppet::Util::Log.from_data_hash(PSON.parse(log.to_pson)) tripped.file.should == log.file tripped.line.should == log.line tripped.level.should == log.level tripped.message.should == log.message tripped.source.should == log.source tripped.tags.should == log.tags tripped.time.should == log.time end end