diff --git a/lib/puppet/external/nagios/parser.rb b/lib/puppet/external/nagios/parser.rb index 5504f5818..17db5e307 100644 --- a/lib/puppet/external/nagios/parser.rb +++ b/lib/puppet/external/nagios/parser.rb @@ -1,775 +1,775 @@ # # DO NOT MODIFY!!!! # This file is automatically generated by racc 1.4.5 # from racc grammer file "grammar.ry". # # # parser.rb: generated by racc (runtime embedded) # ###### racc/parser.rb begin unless $LOADED_FEATURES.index 'racc/parser.rb' $LOADED_FEATURES.push 'racc/parser.rb' self.class.module_eval <<'..end racc/parser.rb modeval..id5256434e8a', 'racc/parser.rb', 1 # # $Id: parser.rb,v 1.7 2005/11/20 17:31:32 aamine Exp $ # # Copyright (c) 1999-2005 Minero Aoki # # This program is free software. # You can distribute/modify this program under the same terms of ruby. # # As a special exception, when this code is copied by Racc # into a Racc output file, you may use that output file # without restriction. # NotImplementedError = NotImplementError unless defined?(NotImplementedError) module Racc class ParseError < StandardError; end end ParseError = Racc::ParseError unless defined?(::ParseError) module Racc Racc_No_Extentions = false unless defined?(Racc_No_Extentions) class Parser Racc_Runtime_Version = '1.4.5' Racc_Runtime_Revision = '$Revision: 1.7 $'.split[1] Racc_Runtime_Core_Version_R = '1.4.5' Racc_Runtime_Core_Revision_R = '$Revision: 1.7 $'.split[1] begin require 'racc/cparse' # Racc_Runtime_Core_Version_C = (defined in extention) Racc_Runtime_Core_Revision_C = Racc_Runtime_Core_Id_C.split[2] raise LoadError, 'old cparse.so' unless new.respond_to?(:_racc_do_parse_c, true) raise LoadError, 'selecting ruby version of racc runtime core' if Racc_No_Extentions Racc_Main_Parsing_Routine = :_racc_do_parse_c Racc_YY_Parse_Method = :_racc_yyparse_c Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_C Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_C Racc_Runtime_Type = 'c' rescue LoadError Racc_Main_Parsing_Routine = :_racc_do_parse_rb Racc_YY_Parse_Method = :_racc_yyparse_rb Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_R Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_R Racc_Runtime_Type = 'ruby' end def Parser.racc_runtime_type Racc_Runtime_Type end private def _racc_setup @yydebug = false unless self.class::Racc_debug_parser @yydebug ||= false if @yydebug @racc_debug_out ||= $stderr @racc_debug_out ||= $stderr end arg = self.class::Racc_arg arg[13] = true if arg.size < 14 arg end def _racc_init_sysvars @racc_state = [0] @racc_tstack = [] @racc_vstack = [] @racc_t = nil @racc_val = nil @racc_read_next = true @racc_user_yyerror = false @racc_error_status = 0 end ### ### do_parse ### def do_parse __send__(Racc_Main_Parsing_Routine, _racc_setup, false) end def next_token raise NotImplementedError, "#{self.class}\#next_token is not defined" end def _racc_do_parse_rb(arg, in_debug) action_table, action_check, action_default, action_pointer, goto_table, goto_check, goto_default, goto_pointer, nt_base, reduce_table, token_table, shift_n, reduce_n, use_result, * = arg _racc_init_sysvars tok = act = i = nil nerr = 0 catch(:racc_end_parse) { while true if i = action_pointer[@racc_state[-1]] if @racc_read_next if @racc_t != 0 # not EOF tok, @racc_val = next_token unless tok # EOF @racc_t = 0 else @racc_t = (token_table[tok] or 1) # error token end racc_read_token(@racc_t, tok, @racc_val) if @yydebug @racc_read_next = false end end i += @racc_t unless i >= 0 and act = action_table[i] and action_check[i] == @racc_state[-1] act = action_default[@racc_state[-1]] end else act = action_default[@racc_state[-1]] end while act = _racc_evalact(act, arg) ; end end } end ### ### yyparse ### def yyparse(recv, mid) __send__(Racc_YY_Parse_Method, recv, mid, _racc_setup, true) end def _racc_yyparse_rb(recv, mid, arg, c_debug) action_table, action_check, action_default, action_pointer, goto_table, goto_check, goto_default, goto_pointer, nt_base, reduce_table, token_table, shift_n, reduce_n, use_result, * = arg _racc_init_sysvars tok = nil act = nil i = nil nerr = 0 catch(:racc_end_parse) { until i = action_pointer[@racc_state[-1]] while act = _racc_evalact(action_default[@racc_state[-1]], arg) ; end end recv.__send__(mid) do |tok, val| unless tok @racc_t = 0 else @racc_t = (token_table[tok] or 1) # error token end @racc_val = val @racc_read_next = false i += @racc_t unless i >= 0 and act = action_table[i] and action_check[i] == @racc_state[-1] act = action_default[@racc_state[-1]] end while act = _racc_evalact(act, arg) ; end while not (i = action_pointer[@racc_state[-1]]) or not @racc_read_next or @racc_t == 0 # $ unless i and i += @racc_t and i >= 0 and act = action_table[i] and action_check[i] == @racc_state[-1] act = action_default[@racc_state[-1]] end while act = _racc_evalact(act, arg) ; end end end } end ### ### common ### def _racc_evalact(act, arg) action_table, action_check, action_default, action_pointer, goto_table, goto_check, goto_default, goto_pointer, nt_base, reduce_table, token_table, shift_n, reduce_n, use_result, * = arg nerr = 0 # tmp if act > 0 and act < shift_n # # shift # if @racc_error_status > 0 @racc_error_status -= 1 unless @racc_t == 1 # error token end @racc_vstack.push @racc_val @racc_state.push act @racc_read_next = true if @yydebug @racc_tstack.push @racc_t racc_shift @racc_t, @racc_tstack, @racc_vstack end elsif act < 0 and act > -reduce_n # # reduce # code = catch(:racc_jump) { @racc_state.push _racc_do_reduce(arg, act) false } if code case code when 1 # yyerror @racc_user_yyerror = true # user_yyerror return -reduce_n when 2 # yyaccept return shift_n else raise '[Racc Bug] unknown jump code' end end elsif act == shift_n # # accept # racc_accept if @yydebug throw :racc_end_parse, @racc_vstack[0] elsif act == -reduce_n # # error # case @racc_error_status when 0 unless arg[21] # user_yyerror nerr += 1 on_error @racc_t, @racc_val, @racc_vstack end when 3 if @racc_t == 0 # is $ throw :racc_end_parse, nil end @racc_read_next = true end @racc_user_yyerror = false @racc_error_status = 3 while true if i = action_pointer[@racc_state[-1]] i += 1 # error token if i >= 0 and (act = action_table[i]) and action_check[i] == @racc_state[-1] break end end throw :racc_end_parse, nil if @racc_state.size <= 1 @racc_state.pop @racc_vstack.pop if @yydebug @racc_tstack.pop racc_e_pop @racc_state, @racc_tstack, @racc_vstack end end return act else raise "[Racc Bug] unknown action #{act.inspect}" end racc_next_state(@racc_state[-1], @racc_state) if @yydebug nil end def _racc_do_reduce(arg, act) action_table, action_check, action_default, action_pointer, goto_table, goto_check, goto_default, goto_pointer, nt_base, reduce_table, token_table, shift_n, reduce_n, use_result, * = arg state = @racc_state vstack = @racc_vstack tstack = @racc_tstack i = act * -3 len = reduce_table[i] reduce_to = reduce_table[i+1] method_id = reduce_table[i+2] void_array = [] tmp_t = tstack[-len, len] if @yydebug tmp_v = vstack[-len, len] tstack[-len, len] = void_array if @yydebug vstack[-len, len] = void_array state[-len, len] = void_array # tstack must be updated AFTER method call if use_result vstack.push __send__(method_id, tmp_v, vstack, tmp_v[0]) else vstack.push __send__(method_id, tmp_v, vstack) end tstack.push reduce_to racc_reduce(tmp_t, reduce_to, tstack, vstack) if @yydebug k1 = reduce_to - nt_base if i = goto_pointer[k1] i += state[-1] if i >= 0 and (curstate = goto_table[i]) and goto_check[i] == k1 return curstate end end goto_default[k1] end def on_error(t, val, vstack) raise ParseError, sprintf("\nparse error on value %s (%s)", val.inspect, token_to_str(t) || '?') end def yyerror throw :racc_jump, 1 end def yyaccept throw :racc_jump, 2 end def yyerrok @racc_error_status = 0 end # # for debugging output # def racc_read_token(t, tok, val) @racc_debug_out.print 'read ' @racc_debug_out.print tok.inspect, '(', racc_token2str(t), ') ' @racc_debug_out.puts val.inspect @racc_debug_out.puts end def racc_shift(tok, tstack, vstack) @racc_debug_out.puts "shift #{racc_token2str tok}" racc_print_stacks tstack, vstack @racc_debug_out.puts end def racc_reduce(toks, sim, tstack, vstack) out = @racc_debug_out out.print 'reduce ' if toks.empty? out.print ' ' else toks.each {|t| out.print ' ', racc_token2str(t) } end out.puts " --> #{racc_token2str(sim)}" racc_print_stacks tstack, vstack @racc_debug_out.puts end def racc_accept @racc_debug_out.puts 'accept' @racc_debug_out.puts end def racc_e_pop(state, tstack, vstack) @racc_debug_out.puts 'error recovering mode: pop token' racc_print_states state racc_print_stacks tstack, vstack @racc_debug_out.puts end def racc_next_state(curstate, state) @racc_debug_out.puts "goto #{curstate}" racc_print_states state @racc_debug_out.puts end def racc_print_stacks(t, v) out = @racc_debug_out out.print ' [' t.each_index do |i| out.print ' (', racc_token2str(t[i]), ' ', v[i].inspect, ')' end out.puts ' ]' end def racc_print_states(s) out = @racc_debug_out out.print ' [' s.each {|st| out.print ' ', st } out.puts ' ]' end def racc_token2str(tok) self.class::Racc_token_to_s_table[tok] or raise "[Racc Bug] can't convert token #{tok} to string" end def token_to_str(t) self.class::Racc_token_to_s_table[t] end end end ..end racc/parser.rb modeval..id5256434e8a end ###### racc/parser.rb end module Nagios class Parser < Racc::Parser module_eval <<'..end grammar.ry modeval..idcb2ea30b34', 'grammar.ry', 57 class ::Nagios::Parser::SyntaxError < RuntimeError; end def parse(src) @src = src # state variables @invar = false @inobject = false @done = false @line = 0 @yydebug = true do_parse end # The lexer. Very simple. def token @src.sub!(/\A\n/,'') if $MATCH @line += 1 return [ :RETURN, "\n" ] end return nil if @done yytext = String.new # remove comments from this line @src.sub!(/\A[ \t]*;.*\n/,"\n") return [:INLINECOMMENT, ""] if $MATCH @src.sub!(/\A#.*\n/,"\n") return [:COMMENT, ""] if $MATCH @src.sub!(/#.*/,'') if @src.length == 0 @done = true return [false, '$'] end if @invar @src.sub!(/\A[ \t]+/,'') @src.sub!(/\A([^;\n]+)(\n|;)/,'\2') if $1 yytext += $1 end @invar = false return [:VALUE, yytext] else @src.sub!(/\A[\t ]*(\S+)([\t ]*|$)/,'') if $1 yytext = $1 case yytext when 'define' #puts "got define" return [:DEFINE, yytext] when '{' #puts "got {" @inobject = true return [:LCURLY, yytext] else unless @inobject #puts "got type: #{yytext}" if yytext =~ /\W/ giveback = yytext.dup giveback.sub!(/^\w+/,'') #puts "giveback #{giveback}" #puts "yytext #{yytext}" yytext.sub!(/\W.*$/,'') #puts "yytext #{yytext}" #puts "all [#{giveback} #{yytext} #{orig}]" @src = giveback + @src end return [:NAME, yytext] else if yytext == '}' #puts "got closure: #{yytext}" @inobject = false return [:RCURLY, '}'] end unless @invar @invar = true return [:PARAM, $1] else end end end end end end def next_token token end def yydebug 1 end def yywrap 0 end def on_error(token, value, vstack ) msg = "" unless value.nil? msg = "line #{@line}: syntax error at '#{value}'" else msg = "line #{@line}: syntax error at '#{token}'" end msg = "line #{@line}: Unexpected end of file" unless @src.size > 0 if token == '$end'.intern puts "okay, this is silly" else raise ::Nagios::Parser::SyntaxError, msg end end ..end grammar.ry modeval..idcb2ea30b34 ##### racc 1.4.5 generates ### racc_reduce_table = [ 0, 0, :racc_error, 1, 13, :_reduce_1, 2, 13, :_reduce_2, 1, 14, :_reduce_3, 1, 14, :_reduce_4, 1, 14, :_reduce_none, 2, 16, :_reduce_6, 6, 15, :_reduce_7, 1, 17, :_reduce_none, 2, 17, :_reduce_9, 4, 18, :_reduce_10, 1, 20, :_reduce_none, 2, 20, :_reduce_none, 0, 19, :_reduce_none, 1, 19, :_reduce_none ] racc_reduce_n = 15 racc_shift_n = 26 racc_action_table = [ 9, 15, 1, 20, 1, 14, 12, 13, 11, 6, 7, 6, 7, 15, 18, 8, 21, 23, 25 ] racc_action_check = [ 2, 16, 2, 16, 0, 12, 8, 9, 7, 2, 2, 0, 0, 14, 15, 1, 18, 22, 24 ] racc_action_pointer = [ 2, 12, 0, nil, nil, nil, nil, -1, 0, 7, nil, nil, -4, nil, 8, 6, -4, nil, 5, nil, nil, nil, 8, nil, 9, nil ] racc_action_default = [ -15, -15, -15, -1, -3, -5, -4, -15, -15, -15, -2, -6, -15, 26, -15, -15, -15, -8, -13, -9, -7, -14, -15, -11, -10, -12 ] racc_goto_table = [ 17, 3, 19, 10, 2, 16, 22, 24 ] racc_goto_check = [ 6, 2, 6, 2, 1, 5, 7, 8 ] racc_goto_pointer = [ nil, 4, 1, nil, nil, -9, -14, -12, -15 ] racc_goto_default = [ nil, nil, nil, 4, 5, nil, nil, nil, nil ] racc_token_table = { false => 0, Object.new => 1, :DEFINE => 2, :NAME => 3, :STRING => 4, :PARAM => 5, :LCURLY => 6, :RCURLY => 7, :VALUE => 8, :RETURN => 9, :COMMENT => 10, :INLINECOMMENT => 11 } racc_use_result_var = true racc_nt_base = 12 Racc_arg = [ racc_action_table, racc_action_check, racc_action_default, racc_action_pointer, racc_goto_table, racc_goto_check, racc_goto_default, racc_goto_pointer, racc_nt_base, racc_reduce_table, racc_token_table, racc_shift_n, racc_reduce_n, racc_use_result_var ] Racc_token_to_s_table = [ '$end', 'error', 'DEFINE', 'NAME', 'STRING', 'PARAM', 'LCURLY', 'RCURLY', 'VALUE', 'RETURN', 'COMMENT', 'INLINECOMMENT', '$start', 'decls', 'decl', 'object', 'comment', 'vars', 'var', 'icomment', 'returns'] Racc_debug_parser = false ##### racc system variables end ##### # reduce 0 omitted module_eval <<'.,.,', 'grammar.ry', 6 def _reduce_1( val, _values, result ) return val[0] if val[0] result end .,., module_eval <<'.,.,', 'grammar.ry', 18 def _reduce_2( val, _values, result ) if val[1].nil? result = val[0] else if val[0].nil? result = val[1] else result = [ val[0], val[1] ].flatten end end result end .,., module_eval <<'.,.,', 'grammar.ry', 20 def _reduce_3( val, _values, result ) result = [val[0]] result end .,., module_eval <<'.,.,', 'grammar.ry', 21 def _reduce_4( val, _values, result ) result = nil result end .,., # reduce 5 omitted module_eval <<'.,.,', 'grammar.ry', 25 def _reduce_6( val, _values, result ) result = nil result end .,., module_eval <<'.,.,', 'grammar.ry', 31 def _reduce_7( val, _values, result ) result = Nagios::Base.create(val[1],val[4]) result end .,., # reduce 8 omitted module_eval <<'.,.,', 'grammar.ry', 40 def _reduce_9( val, _values, result ) val[1].each {|p,v| val[0][p] = v } result = val[0] result end .,., module_eval <<'.,.,', 'grammar.ry', 42 def _reduce_10( val, _values, result ) -result = {val[0],val[1]} +result = {val[0] => val[1]} result end .,., # reduce 11 omitted # reduce 12 omitted # reduce 13 omitted # reduce 14 omitted def _reduce_none( val, _values, result ) result end end end diff --git a/lib/puppet/provider/service/daemontools.rb b/lib/puppet/provider/service/daemontools.rb index bbb962a71..f5a073329 100644 --- a/lib/puppet/provider/service/daemontools.rb +++ b/lib/puppet/provider/service/daemontools.rb @@ -1,194 +1,194 @@ # Daemontools service management # # author Brice Figureau Puppet::Type.type(:service).provide :daemontools, :parent => :base do desc "Daemontools service management. This provider manages daemons running supervised by D.J.Bernstein daemontools. It tries to detect the service directory, with by order of preference: * /service * /etc/service * /var/lib/svscan The daemon directory should be placed in a directory that can be by default in: * /var/lib/service * /etc or this can be overriden in the service resource parameters:: service { \"myservice\": provider => \"daemontools\", path => \"/path/to/daemons\", } This provider supports out of the box: * start/stop (mapped to enable/disable) * enable/disable * restart * status If a service has `ensure => \"running\"`, it will link /path/to/daemon to /path/to/service, which will automatically enable the service. If a service has `ensure => \"stopped\"`, it will only down the service, not remove the /path/to/service link. " commands :svc => "/usr/bin/svc", :svstat => "/usr/bin/svstat" class << self attr_writer :defpath # Determine the daemon path. def defpath(dummy_argument=:work_arround_for_ruby_GC_bug) unless @defpath ["/var/lib/service", "/etc"].each do |path| if FileTest.exist?(path) @defpath = path break end end raise "Could not find the daemon directory (tested [/var/lib/service,/etc])" unless @defpath end @defpath end end attr_writer :servicedir # returns all providers for all existing services in @defpath # ie enabled or not def self.instances path = self.defpath unless FileTest.directory?(path) Puppet.notice "Service path #{path} does not exist" - next + return end # reject entries that aren't either a directory # or don't contain a run file Dir.entries(path).reject { |e| fullpath = File.join(path, e) e =~ /^\./ or ! FileTest.directory?(fullpath) or ! FileTest.exist?(File.join(fullpath,"run")) }.collect do |name| new(:name => name, :path => path) end end # returns the daemon dir on this node def self.daemondir self.defpath end # find the service dir on this node def servicedir unless @servicedir ["/service", "/etc/service","/var/lib/svscan"].each do |path| if FileTest.exist?(path) @servicedir = path break end end raise "Could not find service directory" unless @servicedir end @servicedir end # returns the full path of this service when enabled # (ie in the service directory) def service File.join(self.servicedir, resource[:name]) end # returns the full path to the current daemon directory # note that this path can be overriden in the resource # definition def daemon File.join(resource[:path], resource[:name]) end def status begin output = svstat self.service if output =~ /:\s+up \(/ return :running end rescue Puppet::ExecutionFailure => detail raise Puppet::Error.new( "Could not get status for service #{resource.ref}: #{detail}" ) end :stopped end def setupservice if resource[:manifest] Puppet.notice "Configuring #{resource[:name]}" command = [ resource[:manifest], resource[:name] ] #texecute("setupservice", command) rv = system("#{command}") end rescue Puppet::ExecutionFailure => detail raise Puppet::Error.new( "Cannot config #{self.service} to enable it: #{detail}" ) end def enabled? case self.status when :running # obviously if the daemon is running then it is enabled return :true else # the service is enabled if it is linked return FileTest.symlink?(self.service) ? :true : :false end end def enable if ! FileTest.directory?(self.daemon) Puppet.notice "No daemon dir, calling setupservice for #{resource[:name]}" self.setupservice end if self.daemon if ! FileTest.symlink?(self.service) Puppet.notice "Enabling #{self.service}: linking #{self.daemon} -> #{self.service}" File.symlink(self.daemon, self.service) end end rescue Puppet::ExecutionFailure => detail raise Puppet::Error.new( "No daemon directory found for #{self.service}") end def disable begin if ! FileTest.directory?(self.daemon) Puppet.notice "No daemon dir, calling setupservice for #{resource[:name]}" self.setupservice end if self.daemon if FileTest.symlink?(self.service) Puppet.notice "Disabling #{self.service}: removing link #{self.daemon} -> #{self.service}" File.unlink(self.service) end end rescue Puppet::ExecutionFailure => detail raise Puppet::Error.new( "No daemon directory found for #{self.service}") end self.stop end def restart svc "-t", self.service end def start enable unless enabled? == :true svc "-u", self.service end def stop svc "-d", self.service end end diff --git a/lib/puppet/type/file.rb b/lib/puppet/type/file.rb index 16b1f962d..1a6d0c3ac 100644 --- a/lib/puppet/type/file.rb +++ b/lib/puppet/type/file.rb @@ -1,793 +1,793 @@ require 'digest/md5' require 'cgi' require 'etc' require 'uri' require 'fileutils' require 'puppet/network/handler' require 'puppet/util/diff' require 'puppet/util/checksums' require 'puppet/network/client' require 'puppet/util/backups' Puppet::Type.newtype(:file) do include Puppet::Util::MethodHelper include Puppet::Util::Checksums include Puppet::Util::Backups @doc = "Manages local files, including setting ownership and permissions, creation of both files and directories, and retrieving entire files from remote servers. As Puppet matures, it expected that the `file` resource will be used less and less to manage content, and instead native resources will be used to do so. If you find that you are often copying files in from a central location, rather than using native resources, please contact Puppet Labs and we can hopefully work with you to develop a native resource to support what you are doing. **Autorequires:** If Puppet is managing the user or group that owns a file, the file resource will autorequire them. If Puppet is managing any parent directories of a file, the file resource will autorequire them." def self.title_patterns [ [ /^(.*?)\/*\Z/m, [ [ :path, lambda{|x| x} ] ] ] ] end newparam(:path) do desc "The path to the file to manage. Must be fully qualified." isnamevar validate do |value| # accept various path syntaxes: lone slash, posix, win32, unc unless (Puppet.features.posix? and value =~ /^\//) or (Puppet.features.microsoft_windows? and (value =~ /^.:\// or value =~ /^\/\/[^\/]+\/[^\/]+/)) fail Puppet::Error, "File paths must be fully qualified, not '#{value}'" end end # convert the current path in an index into the collection and the last # path name. The aim is to use less storage for all common paths in a hierarchy munge do |value| - path, name = File.split(value.gsub(/\/+/,'/')) + path, name = ::File.split(value.gsub(/\/+/,'/')) { :index => Puppet::FileCollection.collection.index(path), :name => name } end # and the reverse unmunge do |value| basedir = Puppet::FileCollection.collection.path(value[:index]) # a lone slash as :name indicates a root dir on windows if value[:name] == '/' basedir else - File.join( basedir, value[:name] ) + ::File.join( basedir, value[:name] ) end end end newparam(:backup) do desc "Whether files should be backed up before being replaced. The preferred method of backing files up is via a `filebucket`, which stores files by their MD5 sums and allows easy retrieval without littering directories with backups. You can specify a local filebucket or a network-accessible server-based filebucket by setting `backup => bucket-name`. Alternatively, if you specify any value that begins with a `.` (e.g., `.puppet-bak`), then Puppet will use copy the file in the same directory with that value as the extension of the backup. Setting `backup => false` disables all backups of the file in question. Puppet automatically creates a local filebucket named `puppet` and defaults to backing up there. To use a server-based filebucket, you must specify one in your configuration filebucket { main: server => puppet } The `puppet master` daemon creates a filebucket by default, so you can usually back up to your main server with this configuration. Once you've described the bucket in your configuration, you can use it in any file file { \"/my/file\": source => \"/path/in/nfs/or/something\", backup => main } This will back the file up to the central server. At this point, the benefits of using a filebucket are that you do not have backup files lying around on each of your machines, a given version of a file is only backed up once, and you can restore any given file manually, no matter how old. Eventually, transactional support will be able to automatically restore filebucketed files. " defaultto "puppet" munge do |value| # I don't really know how this is happening. value = value.shift if value.is_a?(Array) case value when false, "false", :false false when true, "true", ".puppet-bak", :true ".puppet-bak" when String value else self.fail "Invalid backup type #{value.inspect}" end end end newparam(:recurse) do desc "Whether and how deeply to do recursive management." newvalues(:true, :false, :inf, :remote, /^[0-9]+$/) # Replace the validation so that we allow numbers in # addition to string representations of them. validate { |arg| } munge do |value| newval = super(value) case newval when :true, :inf; true when :false; false when :remote; :remote when Integer, Fixnum, Bignum self.warning "Setting recursion depth with the recurse parameter is now deprecated, please use recurselimit" # recurse == 0 means no recursion return false if value == 0 resource[:recurselimit] = value true when /^\d+$/ self.warning "Setting recursion depth with the recurse parameter is now deprecated, please use recurselimit" value = Integer(value) # recurse == 0 means no recursion return false if value == 0 resource[:recurselimit] = value true else self.fail "Invalid recurse value #{value.inspect}" end end end newparam(:recurselimit) do desc "How deeply to do recursive management." newvalues(/^[0-9]+$/) munge do |value| newval = super(value) case newval when Integer, Fixnum, Bignum; value when /^\d+$/; Integer(value) else self.fail "Invalid recurselimit value #{value.inspect}" end end end newparam(:replace, :boolean => true) do desc "Whether or not to replace a file that is sourced but exists. This is useful for using file sources purely for initialization." newvalues(:true, :false) aliasvalue(:yes, :true) aliasvalue(:no, :false) defaultto :true end newparam(:force, :boolean => true) do desc "Force the file operation. Currently only used when replacing directories with links." newvalues(:true, :false) defaultto false end newparam(:ignore) do desc "A parameter which omits action on files matching specified patterns during recursion. Uses Ruby's builtin globbing engine, so shell metacharacters are fully supported, e.g. `[a-z]*`. Matches that would descend into the directory structure are ignored, e.g., `*/*`." validate do |value| unless value.is_a?(Array) or value.is_a?(String) or value == false self.devfail "Ignore must be a string or an Array" end end end newparam(:links) do desc "How to handle links during file actions. During file copying, `follow` will copy the target file instead of the link, `manage` will copy the link itself, and `ignore` will just pass it by. When not copying, `manage` and `ignore` behave equivalently (because you cannot really ignore links entirely during local recursion), and `follow` will manage the file to which the link points." newvalues(:follow, :manage) defaultto :manage end newparam(:purge, :boolean => true) do desc "Whether unmanaged files should be purged. If you have a filebucket configured the purged files will be uploaded, but if you do not, this will destroy data. Only use this option for generated files unless you really know what you are doing. This option only makes sense when recursively managing directories. Note that when using `purge` with `source`, Puppet will purge any files that are not on the remote system." defaultto :false newvalues(:true, :false) end newparam(:sourceselect) do desc "Whether to copy all valid sources, or just the first one. This parameter is only used in recursive copies; by default, the first valid source is the only one used as a recursive source, but if this parameter is set to `all`, then all valid sources will have all of their contents copied to the local host, and for sources that have the same file, the source earlier in the list will be used." defaultto :first newvalues(:first, :all) end # Autorequire any parent directories. autorequire(:file) do - basedir = File.dirname(self[:path]) + basedir = ::File.dirname(self[:path]) if basedir != self[:path] basedir else nil end end # Autorequire the owner and group of the file. {:user => :owner, :group => :group}.each do |type, property| autorequire(type) do if @parameters.include?(property) # The user/group property automatically converts to IDs next unless should = @parameters[property].shouldorig val = should[0] if val.is_a?(Integer) or val =~ /^\d+$/ nil else val end end end end CREATORS = [:content, :source, :target] SOURCE_ONLY_CHECKSUMS = [:none, :ctime, :mtime] validate do creator_count = 0 CREATORS.each do |param| creator_count += 1 if self.should(param) end creator_count += 1 if @parameters.include?(:source) self.fail "You cannot specify more than one of #{CREATORS.collect { |p| p.to_s}.join(", ")}" if creator_count > 1 self.fail "You cannot specify a remote recursion without a source" if !self[:source] and self[:recurse] == :remote self.fail "You cannot specify source when using checksum 'none'" if self[:checksum] == :none && !self[:source].nil? SOURCE_ONLY_CHECKSUMS.each do |checksum_type| self.fail "You cannot specify content when using checksum '#{checksum_type}'" if self[:checksum] == checksum_type && !self[:content].nil? end self.warning "Possible error: recurselimit is set but not recurse, no recursion will happen" if !self[:recurse] and self[:recurselimit] end def self.[](path) return nil unless path super(path.gsub(/\/+/, '/').sub(/\/$/, '')) end def self.instances(base = '/') return self.new(:name => base, :recurse => true, :recurselimit => 1, :audit => :all).recurse_local.values end @depthfirst = false # Determine the user to write files as. def asuser if self.should(:owner) and ! self.should(:owner).is_a?(Symbol) writeable = Puppet::Util::SUIDManager.asuser(self.should(:owner)) { - FileTest.writable?(File.dirname(self[:path])) + FileTest.writable?(::File.dirname(self[:path])) } # If the parent directory is writeable, then we execute # as the user in question. Otherwise we'll rely on # the 'owner' property to do things. asuser = self.should(:owner) if writeable end asuser end def bucket return @bucket if @bucket backup = self[:backup] return nil unless backup return nil if backup =~ /^\./ unless catalog or backup == "puppet" fail "Can not find filebucket for backups without a catalog" end unless catalog and filebucket = catalog.resource(:filebucket, backup) or backup == "puppet" fail "Could not find filebucket #{backup} specified in backup" end return default_bucket unless filebucket @bucket = filebucket.bucket @bucket end def default_bucket Puppet::Type.type(:filebucket).mkdefaultbucket.bucket end # Does the file currently exist? Just checks for whether # we have a stat def exist? stat ? true : false end # We have to do some extra finishing, to retrieve our bucket if # there is one. def finish # Look up our bucket, if there is one bucket super end # Create any children via recursion or whatever. def eval_generate return [] unless self.recurse? recurse #recurse.reject do |resource| # catalog.resource(:file, resource[:path]) #end.each do |child| # catalog.add_resource child # catalog.relationship_graph.add_edge self, child #end end def flush # We want to make sure we retrieve metadata anew on each transaction. @parameters.each do |name, param| param.flush if param.respond_to?(:flush) end @stat = nil end def initialize(hash) # Used for caching clients @clients = {} super # If they've specified a source, we get our 'should' values # from it. unless self[:ensure] if self[:target] self[:ensure] = :symlink elsif self[:content] self[:ensure] = :file end end @stat = nil end # Configure discovered resources to be purged. def mark_children_for_purging(children) children.each do |name, child| next if child[:source] child[:ensure] = :absent end end # Create a new file or directory object as a child to the current # object. def newchild(path) - full_path = File.join(self[:path], path) + full_path = ::File.join(self[:path], path) # Add some new values to our original arguments -- these are the ones # set at initialization. We specifically want to exclude any param # values set by the :source property or any default values. # LAK:NOTE This is kind of silly, because the whole point here is that # the values set at initialization should live as long as the resource # but values set by default or by :source should only live for the transaction # or so. Unfortunately, we don't have a straightforward way to manage # the different lifetimes of this data, so we kludge it like this. # The right-side hash wins in the merge. options = @original_parameters.merge(:path => full_path).reject { |param, value| value.nil? } # These should never be passed to our children. [:parent, :ensure, :recurse, :recurselimit, :target, :alias, :source].each do |param| options.delete(param) if options.include?(param) end self.class.new(options) end # Files handle paths specially, because they just lengthen their # path names, rather than including the full parent's title each # time. def pathbuilder # We specifically need to call the method here, so it looks # up our parent in the catalog graph. if parent = parent() # We only need to behave specially when our parent is also # a file if parent.is_a?(self.class) # Remove the parent file name list = parent.pathbuilder list.pop # remove the parent's path info return list << self.ref else return super end else return [self.ref] end end # Should we be purging? def purge? @parameters.include?(:purge) and (self[:purge] == :true or self[:purge] == "true") end # Recursively generate a list of file resources, which will # be used to copy remote files, manage local files, and/or make links # to map to another directory. def recurse children = {} children = recurse_local if self[:recurse] != :remote if self[:target] recurse_link(children) elsif self[:source] recurse_remote(children) end # If we're purging resources, then delete any resource that isn't on the # remote system. mark_children_for_purging(children) if self.purge? result = children.values.sort { |a, b| a[:path] <=> b[:path] } remove_less_specific_files(result) end # This is to fix bug #2296, where two files recurse over the same # set of files. It's a rare case, and when it does happen you're # not likely to have many actual conflicts, which is good, because # this is a pretty inefficient implementation. def remove_less_specific_files(files) - mypath = self[:path].split(File::Separator) + mypath = self[:path].split(::File::Separator) other_paths = catalog.vertices. select { |r| r.is_a?(self.class) and r[:path] != self[:path] }. - collect { |r| r[:path].split(File::Separator) }. + collect { |r| r[:path].split(::File::Separator) }. select { |p| p[0,mypath.length] == mypath } return files if other_paths.empty? files.reject { |file| - path = file[:path].split(File::Separator) + path = file[:path].split(::File::Separator) other_paths.any? { |p| path[0,p.length] == p } } end # A simple method for determining whether we should be recursing. def recurse? return false unless @parameters.include?(:recurse) val = @parameters[:recurse].value !!(val and (val == true or val == :remote)) end # Recurse the target of the link. def recurse_link(children) perform_recursion(self[:target]).each do |meta| if meta.relative_path == "." self[:ensure] = :directory next end children[meta.relative_path] ||= newchild(meta.relative_path) if meta.ftype == "directory" children[meta.relative_path][:ensure] = :directory else children[meta.relative_path][:ensure] = :link children[meta.relative_path][:target] = meta.full_path end end children end # Recurse the file itself, returning a Metadata instance for every found file. def recurse_local result = perform_recursion(self[:path]) return {} unless result result.inject({}) do |hash, meta| next hash if meta.relative_path == "." hash[meta.relative_path] = newchild(meta.relative_path) hash end end # Recurse against our remote file. def recurse_remote(children) sourceselect = self[:sourceselect] total = self[:source].collect do |source| next unless result = perform_recursion(source) return if top = result.find { |r| r.relative_path == "." } and top.ftype != "directory" result.each { |data| data.source = "#{source}/#{data.relative_path}" } break result if result and ! result.empty? and sourceselect == :first result end.flatten # This only happens if we have sourceselect == :all unless sourceselect == :first found = [] total.reject! do |data| result = found.include?(data.relative_path) found << data.relative_path unless found.include?(data.relative_path) result end end total.each do |meta| if meta.relative_path == "." parameter(:source).metadata = meta next end children[meta.relative_path] ||= newchild(meta.relative_path) children[meta.relative_path][:source] = meta.source children[meta.relative_path][:checksum] = :md5 if meta.ftype == "file" children[meta.relative_path].parameter(:source).metadata = meta end children end def perform_recursion(path) Puppet::FileServing::Metadata.indirection.search( path, :links => self[:links], :recurse => (self[:recurse] == :remote ? true : self[:recurse]), :recurselimit => self[:recurselimit], :ignore => self[:ignore], :checksum_type => (self[:source] || self[:content]) ? self[:checksum] : :none ) end # Remove any existing data. This is only used when dealing with # links or directories. def remove_existing(should) return unless s = stat self.fail "Could not back up; will not replace" unless perform_backup unless should.to_s == "link" return if s.ftype.to_s == should.to_s end case s.ftype when "directory" if self[:force] == :true debug "Removing existing directory for replacement with #{should}" FileUtils.rmtree(self[:path]) else notice "Not removing directory; use 'force' to override" end when "link", "file" debug "Removing existing #{s.ftype} for replacement with #{should}" - File.unlink(self[:path]) + ::File.unlink(self[:path]) else self.fail "Could not back up files of type #{s.ftype}" end expire end def retrieve if source = parameter(:source) source.copy_source_values end super end # Set the checksum, from another property. There are multiple # properties that modify the contents of a file, and they need the # ability to make sure that the checksum value is in sync. def setchecksum(sum = nil) if @parameters.include? :checksum if sum @parameters[:checksum].checksum = sum else # If they didn't pass in a sum, then tell checksum to # figure it out. currentvalue = @parameters[:checksum].retrieve @parameters[:checksum].checksum = currentvalue end end end # Should this thing be a normal file? This is a relatively complex # way of determining whether we're trying to create a normal file, # and it's here so that the logic isn't visible in the content property. def should_be_file? return true if self[:ensure] == :file # I.e., it's set to something like "directory" return false if e = self[:ensure] and e != :present # The user doesn't really care, apparently if self[:ensure] == :present return true unless s = stat return(s.ftype == "file" ? true : false) end # If we've gotten here, then :ensure isn't set return true if self[:content] return true if stat and stat.ftype == "file" false end # Stat our file. Depending on the value of the 'links' attribute, we # use either 'stat' or 'lstat', and we expect the properties to use the # resulting stat object accordingly (mostly by testing the 'ftype' # value). cached_attr(:stat) do method = :stat # Files are the only types that support links if (self.class.name == :file and self[:links] != :follow) or self.class.name == :tidy method = :lstat end path = self[:path] begin - File.send(method, self[:path]) + ::File.send(method, self[:path]) rescue Errno::ENOENT => error return nil rescue Errno::EACCES => error warning "Could not stat; permission denied" return nil end end # We have to hack this just a little bit, because otherwise we'll get # an error when the target and the contents are created as properties on # the far side. def to_trans(retrieve = true) obj = super obj.delete(:target) if obj[:target] == :notlink obj end # Write out the file. Requires the property name for logging. # Write will be done by the content property, along with checksum computation def write(property) remove_existing(:file) use_temporary_file = write_temporary_file? if use_temporary_file path = "#{self[:path]}.puppettmp_#{rand(10000)}" - path = "#{self[:path]}.puppettmp_#{rand(10000)}" while File.exists?(path) or File.symlink?(path) + path = "#{self[:path]}.puppettmp_#{rand(10000)}" while ::File.exists?(path) or ::File.symlink?(path) else path = self[:path] end mode = self.should(:mode) # might be nil umask = mode ? 000 : 022 mode_int = mode ? mode.to_i(8) : nil - content_checksum = Puppet::Util.withumask(umask) { File.open(path, 'w', mode_int ) { |f| write_content(f) } } + content_checksum = Puppet::Util.withumask(umask) { ::File.open(path, 'w', mode_int ) { |f| write_content(f) } } # And put our new file in place if use_temporary_file # This is only not true when our file is empty. begin fail_if_checksum_is_wrong(path, content_checksum) if validate_checksum? - File.rename(path, self[:path]) + ::File.rename(path, self[:path]) rescue => detail fail "Could not rename temporary file #{path} to #{self[:path]}: #{detail}" ensure # Make sure the created file gets removed - File.unlink(path) if FileTest.exists?(path) + ::File.unlink(path) if FileTest.exists?(path) end end # make sure all of the modes are actually correct property_fix end private # Should we validate the checksum of the file we're writing? def validate_checksum? self[:checksum] !~ /time/ end # Make sure the file we wrote out is what we think it is. def fail_if_checksum_is_wrong(path, content_checksum) newsum = parameter(:checksum).sum_file(path) return if [:absent, nil, content_checksum].include?(newsum) self.fail "File written to disk did not match checksum; discarding changes (#{content_checksum} vs #{newsum})" end # write the current content. Note that if there is no content property # simply opening the file with 'w' as done in write is enough to truncate # or write an empty length file. def write_content(file) (content = property(:content)) && content.write(file) end private def write_temporary_file? # unfortunately we don't know the source file size before fetching it # so let's assume the file won't be empty (c = property(:content) and c.length) || (s = @parameters[:source] and 1) end # There are some cases where all of the work does not get done on # file creation/modification, so we have to do some extra checking. def property_fix properties.each do |thing| next unless [:mode, :owner, :group, :seluser, :selrole, :seltype, :selrange].include?(thing.name) # Make sure we get a new stat objct expire currentvalue = thing.retrieve thing.sync unless thing.safe_insync?(currentvalue) end end end # We put all of the properties in separate files, because there are so many # of them. The order these are loaded is important, because it determines # the order they are in the property lit. require 'puppet/type/file/checksum' require 'puppet/type/file/content' # can create the file require 'puppet/type/file/source' # can create the file require 'puppet/type/file/target' # creates a different type of file require 'puppet/type/file/ensure' # can create the file require 'puppet/type/file/owner' require 'puppet/type/file/group' require 'puppet/type/file/mode' require 'puppet/type/file/type' require 'puppet/type/file/selcontext' # SELinux file context require 'puppet/type/file/ctime' require 'puppet/type/file/mtime' diff --git a/lib/puppet/util/classgen.rb b/lib/puppet/util/classgen.rb index ed69c5878..1e99aa873 100644 --- a/lib/puppet/util/classgen.rb +++ b/lib/puppet/util/classgen.rb @@ -1,197 +1,209 @@ module Puppet class ConstantAlreadyDefined < Error; end class SubclassAlreadyDefined < Error; end end module Puppet::Util::ClassGen include Puppet::Util::MethodHelper include Puppet::Util # Create a new subclass. Valid options are: # * :array: An array of existing classes. If specified, the new # class is added to this array. # * :attributes: A hash of attributes to set before the block is # evaluated. # * :block: The block to evaluate in the context of the class. # You can also just pass the block normally, but it will still be evaluated # with class_eval. # * :constant: What to set the constant as. Defaults to the # capitalized name. # * :hash: A hash of existing classes. If specified, the new # class is added to this hash, and it is also used for overwrite tests. # * :overwrite: Whether to overwrite an existing class. # * :parent: The parent class for the generated class. Defaults to # self. # * :prefix: The constant prefix. Default to nothing; if specified, # the capitalized name is appended and the result is set as the constant. def genclass(name, options = {}, &block) genthing(name, Class, options, block) end # Create a new module. Valid options are: # * :array: An array of existing classes. If specified, the new # class is added to this array. # * :attributes: A hash of attributes to set before the block is # evaluated. # * :block: The block to evaluate in the context of the class. # You can also just pass the block normally, but it will still be evaluated # with class_eval. # * :constant: What to set the constant as. Defaults to the # capitalized name. # * :hash: A hash of existing classes. If specified, the new # class is added to this hash, and it is also used for overwrite tests. # * :overwrite: Whether to overwrite an existing class. # * :prefix: The constant prefix. Default to nothing; if specified, # the capitalized name is appended and the result is set as the constant. def genmodule(name, options = {}, &block) genthing(name, Module, options, block) end # Remove an existing class def rmclass(name, options) options = symbolize_options(options) const = genconst_string(name, options) retval = false if const_defined?(const) remove_const(const) retval = true end if hash = options[:hash] and hash.include? name hash.delete(name) retval = true end # Let them know whether we did actually delete a subclass. retval end private # Generate the constant to create or remove. def genconst_string(name, options) unless const = options[:constant] prefix = options[:prefix] || "" const = prefix + name2const(name) end const end # This does the actual work of creating our class or module. It's just a # slightly abstract version of genclass. def genthing(name, type, options, block) options = symbolize_options(options) name = symbolize(name.to_s.downcase) if type == Module #evalmethod = :module_eval evalmethod = :class_eval # Create the class, with the correct name. klass = Module.new do class << self attr_reader :name end @name = name end else options[:parent] ||= self evalmethod = :class_eval # Create the class, with the correct name. klass = Class.new(options[:parent]) do @name = name end end # Create the constant as appropriation. handleclassconst(klass, name, options) # Initialize any necessary variables. initclass(klass, options) block ||= options[:block] # Evaluate the passed block if there is one. This should usually # define all of the work. klass.send(evalmethod, &block) if block klass.postinit if klass.respond_to? :postinit # Store the class in hashes or arrays or whatever. storeclass(klass, name, options) klass end + # const_defined? in Ruby 1.9 behaves differently in terms + # of which class hierarchy it polls for nested namespaces + # + # See http://redmine.ruby-lang.org/issues/show/1915 + def is_constant_defined?(const) + if ::RUBY_VERSION =~ /1.9/ + const_defined?(const, false) + else + const_defined?(const) + end + end + # Handle the setting and/or removing of the associated constant. def handleclassconst(klass, name, options) const = genconst_string(name, options) - if const_defined?(const) + if is_constant_defined?(const) if options[:overwrite] Puppet.info "Redefining #{name} in #{self}" remove_const(const) else raise Puppet::ConstantAlreadyDefined, "Class #{const} is already defined in #{self}" end end const_set(const, klass) const end # Perform the initializations on the class. def initclass(klass, options) klass.initvars if klass.respond_to? :initvars if attrs = options[:attributes] attrs.each do |param, value| method = param.to_s + "=" klass.send(method, value) if klass.respond_to? method end end [:include, :extend].each do |method| if set = options[method] set = [set] unless set.is_a?(Array) set.each do |mod| klass.send(method, mod) end end end klass.preinit if klass.respond_to? :preinit end # Convert our name to a constant. def name2const(name) name.to_s.capitalize end # Store the class in the appropriate places. def storeclass(klass, klassname, options) if hash = options[:hash] if hash.include? klassname and ! options[:overwrite] raise Puppet::SubclassAlreadyDefined, "Already a generated class named #{klassname}" end hash[klassname] = klass end # If we were told to stick it in a hash, then do so if array = options[:array] if (klass.respond_to? :name and array.find { |c| c.name == klassname } and ! options[:overwrite]) raise Puppet::SubclassAlreadyDefined, "Already a generated class named #{klassname}" end array << klass end end end diff --git a/lib/puppet/util/rdoc/parser.rb b/lib/puppet/util/rdoc/parser.rb index 0f746e2ea..762ce25f0 100644 --- a/lib/puppet/util/rdoc/parser.rb +++ b/lib/puppet/util/rdoc/parser.rb @@ -1,487 +1,493 @@ # Puppet "parser" for the rdoc system # The parser uses puppet parser and traverse the AST to instruct RDoc about # our current structures. It also parses ruby files that could contain # either custom facts or puppet plugins (functions, types...) # rdoc mandatory includes require "rdoc/code_objects" require "puppet/util/rdoc/code_objects" require "rdoc/tokenstream" -require "rdoc/markup/simple_markup/preprocess" -require "rdoc/parsers/parserfactory" + +if ::RUBY_VERSION =~ /1.9/ + require "rdoc/markup/preprocess" + require "rdoc/parser" +else + require "rdoc/markup/simple_markup/preprocess" + require "rdoc/parsers/parserfactory" +end module RDoc class Parser - extend ParserFactory + extend ParserFactory unless ::RUBY_VERSION =~ /1.9/ SITE = "__site__" attr_accessor :input_file_name, :top_level # parser registration into RDoc parse_files_matching(/\.(rb|pp)$/) # called with the top level file def initialize(top_level, file_name, content, options, stats) @options = options @stats = stats @input_file_name = file_name @top_level = PuppetTopLevel.new(top_level) @progress = $stderr unless options.quiet end # main entry point def scan environment = Puppet::Node::Environment.new @known_resource_types = environment.known_resource_types unless environment.known_resource_types.watching_file?(@input_file_name) Puppet.info "rdoc: scanning #{@input_file_name}" if @input_file_name =~ /\.pp$/ @parser = Puppet::Parser::Parser.new(environment) @parser.file = @input_file_name @parser.parse.instantiate('').each do |type| @known_resource_types.add type end end end scan_top_level(@top_level) @top_level end # Due to a bug in RDoc, we need to roll our own find_module_named # The issue is that RDoc tries harder by asking the parent for a class/module # of the name. But by doing so, it can mistakenly use a module of same name # but from which we are not descendant. def find_object_named(container, name) return container if container.name == name container.each_classmodule do |m| return m if m.name == name end nil end # walk down the namespace and lookup/create container as needed def get_class_or_module(container, name) # class ::A -> A is in the top level if name =~ /^::/ container = @top_level end names = name.split('::') final_name = names.pop names.each do |name| prev_container = container container = find_object_named(container, name) container ||= prev_container.add_class(PuppetClass, name, nil) end [container, final_name] end # split_module tries to find if +path+ belongs to the module path # if it does, it returns the module name, otherwise if we are sure # it is part of the global manifest path, "__site__" is returned. # And finally if this path couldn't be mapped anywhere, nil is returned. def split_module(path) # find a module fullpath = File.expand_path(path) Puppet.debug "rdoc: testing #{fullpath}" if fullpath =~ /(.*)\/([^\/]+)\/(?:manifests|plugins|lib)\/.+\.(pp|rb)$/ modpath = $1 name = $2 Puppet.debug "rdoc: module #{name} into #{modpath} ?" Puppet::Module.modulepath.each do |mp| if File.identical?(modpath,mp) Puppet.debug "rdoc: found module #{name}" return name end end end if fullpath =~ /\.(pp|rb)$/ # there can be paths we don't want to scan under modules # imagine a ruby or manifest that would be distributed as part as a module # but we don't want those to be hosted under Puppet::Module.modulepath.each do |mp| # check that fullpath is a descendant of mp dirname = fullpath while (dirname = File.dirname(dirname)) != '/' return nil if File.identical?(dirname,mp) end end end # we are under a global manifests Puppet.debug "rdoc: global manifests" SITE end # create documentation for the top level +container+ def scan_top_level(container) # use the module README as documentation for the module comment = "" readme = File.join(File.dirname(File.dirname(@input_file_name)), "README") comment = File.open(readme,"r") { |f| f.read } if FileTest.readable?(readme) look_for_directives_in(container, comment) unless comment.empty? # infer module name from directory name = split_module(@input_file_name) if name.nil? # skip .pp files that are not in manifests directories as we can't guarantee they're part # of a module or the global configuration. container.document_self = false return end Puppet.debug "rdoc: scanning for #{name}" container.module_name = name container.global=true if name == SITE @stats.num_modules += 1 container, name = get_class_or_module(container,name) mod = container.add_module(PuppetModule, name) mod.record_location(@top_level) mod.comment = comment if @input_file_name =~ /\.pp$/ parse_elements(mod) elsif @input_file_name =~ /\.rb$/ parse_plugins(mod) end end # create documentation for include statements we can find in +code+ # and associate it with +container+ def scan_for_include_or_require(container, code) code = [code] unless code.is_a?(Array) code.each do |stmt| scan_for_include_or_require(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) if stmt.is_a?(Puppet::Parser::AST::Function) and ['include','require'].include?(stmt.name) stmt.arguments.each do |included| Puppet.debug "found #{stmt.name}: #{included}" container.send("add_#{stmt.name}",Include.new(included.to_s, stmt.doc)) end end end end # create documentation for realize statements we can find in +code+ # and associate it with +container+ def scan_for_realize(container, code) code = [code] unless code.is_a?(Array) code.each do |stmt| scan_for_realize(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) if stmt.is_a?(Puppet::Parser::AST::Function) and stmt.name == 'realize' stmt.arguments.each do |realized| Puppet.debug "found #{stmt.name}: #{realized}" container.add_realize(Include.new(realized.to_s, stmt.doc)) end end end end # create documentation for global variables assignements we can find in +code+ # and associate it with +container+ def scan_for_vardef(container, code) code = [code] unless code.is_a?(Array) code.each do |stmt| scan_for_vardef(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) if stmt.is_a?(Puppet::Parser::AST::VarDef) Puppet.debug "rdoc: found constant: #{stmt.name} = #{stmt.value}" container.add_constant(Constant.new(stmt.name.to_s, stmt.value.to_s, stmt.doc)) end end end # create documentation for resources we can find in +code+ # and associate it with +container+ def scan_for_resource(container, code) code = [code] unless code.is_a?(Array) code.each do |stmt| scan_for_resource(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) if stmt.is_a?(Puppet::Parser::AST::Resource) and !stmt.type.nil? begin type = stmt.type.split("::").collect { |s| s.capitalize }.join("::") stmt.instances.each do |inst| title = inst.title.is_a?(Puppet::Parser::AST::ASTArray) ? inst.title.to_s.gsub(/\[(.*)\]/,'\1') : inst.title.to_s Puppet.debug "rdoc: found resource: #{type}[#{title}]" param = [] inst.parameters.children.each do |p| res = {} res["name"] = p.param res["value"] = "#{p.value.to_s}" unless p.value.nil? param << res end container.add_resource(PuppetResource.new(type, title, stmt.doc, param)) end rescue => detail raise Puppet::ParseError, "impossible to parse resource in #{stmt.file} at line #{stmt.line}: #{detail}" end end end end def resource_stmt_to_ref(stmt) type = stmt.type.split("::").collect { |s| s.capitalize }.join("::") title = stmt.title.is_a?(Puppet::Parser::AST::ASTArray) ? stmt.title.to_s.gsub(/\[(.*)\]/,'\1') : stmt.title.to_s param = stmt.params.children.collect do |p| {"name" => p.param, "value" => p.value.to_s} end PuppetResource.new(type, title, stmt.doc, param) end # create documentation for a class named +name+ def document_class(name, klass, container) Puppet.debug "rdoc: found new class #{name}" container, name = get_class_or_module(container, name) superclass = klass.parent superclass = "" if superclass.nil? or superclass.empty? @stats.num_classes += 1 comment = klass.doc look_for_directives_in(container, comment) unless comment.empty? cls = container.add_class(PuppetClass, name, superclass) # it is possible we already encountered this class, while parsing some namespaces # from other classes of other files. But at that time we couldn't know this class superclass # so, now we know it and force it. cls.superclass = superclass cls.record_location(@top_level) # scan class code for include code = klass.code.children if klass.code.is_a?(Puppet::Parser::AST::ASTArray) code ||= klass.code unless code.nil? scan_for_include_or_require(cls, code) scan_for_realize(cls, code) scan_for_resource(cls, code) if Puppet.settings[:document_all] end cls.comment = comment rescue => detail raise Puppet::ParseError, "impossible to parse class '#{name}' in #{klass.file} at line #{klass.line}: #{detail}" end # create documentation for a node def document_node(name, node, container) Puppet.debug "rdoc: found new node #{name}" superclass = node.parent superclass = "" if superclass.nil? or superclass.empty? comment = node.doc look_for_directives_in(container, comment) unless comment.empty? n = container.add_node(name, superclass) n.record_location(@top_level) code = node.code.children if node.code.is_a?(Puppet::Parser::AST::ASTArray) code ||= node.code unless code.nil? scan_for_include_or_require(n, code) scan_for_realize(n, code) scan_for_vardef(n, code) scan_for_resource(n, code) if Puppet.settings[:document_all] end n.comment = comment rescue => detail raise Puppet::ParseError, "impossible to parse node '#{name}' in #{node.file} at line #{node.line}: #{detail}" end # create documentation for a define def document_define(name, define, container) Puppet.debug "rdoc: found new definition #{name}" # find superclas if any @stats.num_methods += 1 # find the parent # split define name by :: to find the complete module hierarchy container, name = get_class_or_module(container,name) # build up declaration declaration = "" define.arguments.each do |arg,value| declaration << "\$#{arg}" unless value.nil? declaration << " => " case value when Puppet::Parser::AST::Leaf declaration << "'#{value.value}'" when Puppet::Parser::AST::ASTArray declaration << "[#{value.children.collect { |v| "'#{v}'" }.join(", ")}]" else declaration << "#{value.to_s}" end end declaration << ", " end declaration.chop!.chop! if declaration.size > 1 # register method into the container meth = AnyMethod.new(declaration, name) meth.comment = define.doc container.add_method(meth) look_for_directives_in(container, meth.comment) unless meth.comment.empty? meth.params = "( #{declaration} )" meth.visibility = :public meth.document_self = true meth.singleton = false rescue => detail raise Puppet::ParseError, "impossible to parse definition '#{name}' in #{define.file} at line #{define.line}: #{detail}" end # Traverse the AST tree and produce code-objects node # that contains the documentation def parse_elements(container) Puppet.debug "rdoc: scanning manifest" @known_resource_types.hostclasses.values.sort { |a,b| a.name <=> b.name }.each do |klass| name = klass.name if klass.file == @input_file_name unless name.empty? document_class(name,klass,container) else # on main class document vardefs code = klass.code.children if klass.code.is_a?(Puppet::Parser::AST::ASTArray) code ||= klass.code scan_for_vardef(container, code) unless code.nil? end end end @known_resource_types.definitions.each do |name, define| if define.file == @input_file_name document_define(name,define,container) end end @known_resource_types.nodes.each do |name, node| if node.file == @input_file_name document_node(name.to_s,node,container) end end end # create documentation for plugins def parse_plugins(container) Puppet.debug "rdoc: scanning plugin or fact" if @input_file_name =~ /\/facter\/[^\/]+\.rb$/ parse_fact(container) else parse_puppet_plugin(container) end end # this is a poor man custom fact parser :-) def parse_fact(container) comments = "" current_fact = nil File.open(@input_file_name) do |of| of.each do |line| # fetch comments if line =~ /^[ \t]*# ?(.*)$/ comments += $1 + "\n" elsif line =~ /^[ \t]*Facter.add\(['"](.*?)['"]\)/ current_fact = Fact.new($1,{}) look_for_directives_in(container, comments) unless comments.empty? current_fact.comment = comments container.add_fact(current_fact) current_fact.record_location(@top_level) comments = "" Puppet.debug "rdoc: found custom fact #{current_fact.name}" elsif line =~ /^[ \t]*confine[ \t]*:(.*?)[ \t]*=>[ \t]*(.*)$/ current_fact.confine = { :type => $1, :value => $2 } unless current_fact.nil? else # unknown line type comments ="" end end end end # this is a poor man puppet plugin parser :-) # it doesn't extract doc nor desc :-( def parse_puppet_plugin(container) comments = "" current_plugin = nil File.open(@input_file_name) do |of| of.each do |line| # fetch comments if line =~ /^[ \t]*# ?(.*)$/ comments += $1 + "\n" elsif line =~ /^[ \t]*newfunction[ \t]*\([ \t]*:(.*?)[ \t]*,[ \t]*:type[ \t]*=>[ \t]*(:rvalue|:lvalue)\)/ current_plugin = Plugin.new($1, "function") container.add_plugin(current_plugin) look_for_directives_in(container, comments) unless comments.empty? current_plugin.comment = comments current_plugin.record_location(@top_level) comments = "" Puppet.debug "rdoc: found new function plugins #{current_plugin.name}" elsif line =~ /^[ \t]*Puppet::Type.newtype[ \t]*\([ \t]*:(.*?)\)/ current_plugin = Plugin.new($1, "type") container.add_plugin(current_plugin) look_for_directives_in(container, comments) unless comments.empty? current_plugin.comment = comments current_plugin.record_location(@top_level) comments = "" Puppet.debug "rdoc: found new type plugins #{current_plugin.name}" elsif line =~ /module Puppet::Parser::Functions/ # skip else # unknown line type comments ="" end end end end # look_for_directives_in scans the current +comment+ for RDoc directives def look_for_directives_in(context, comment) preprocess = SM::PreProcess.new(@input_file_name, @options.rdoc_include) preprocess.handle(comment) do |directive, param| case directive when "stopdoc" context.stop_doc "" when "startdoc" context.start_doc context.force_documentation = true "" when "enddoc" #context.done_documenting = true #"" throw :enddoc when "main" options = Options.instance options.main_page = param "" when "title" options = Options.instance options.title = param "" when "section" context.set_current_section(param, comment) comment.replace("") # 1.8 doesn't support #clear break else warn "Unrecognized directive '#{directive}'" break end end remove_private_comments(comment) end def remove_private_comments(comment) comment.gsub!(/^#--.*?^#\+\+/m, '') comment.sub!(/^#--.*/m, '') end end end diff --git a/spec/unit/network/authstore_spec.rb b/spec/unit/network/authstore_spec.rb index 0b4dd21de..d477ee301 100644 --- a/spec/unit/network/authstore_spec.rb +++ b/spec/unit/network/authstore_spec.rb @@ -1,370 +1,370 @@ #!/usr/bin/env ruby require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper') require 'puppet/network/authconfig' describe Puppet::Network::AuthStore do describe "when checking if the acl has some entries" do before :each do @authstore = Puppet::Network::AuthStore.new end it "should be empty if no ACE have been entered" do @authstore.should be_empty end it "should not be empty if it is a global allow" do @authstore.allow('*') @authstore.should_not be_empty end it "should not be empty if at least one allow has been entered" do @authstore.allow('1.1.1.*') @authstore.should_not be_empty end it "should not be empty if at least one deny has been entered" do @authstore.deny('1.1.1.*') @authstore.should_not be_empty end end end describe Puppet::Network::AuthStore::Declaration do ['100.101.99.98','100.100.100.100','1.2.3.4','11.22.33.44'].each { |ip| describe "when the pattern is a simple numeric IP such as #{ip}" do before :each do @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,ip) end it "should match the specified IP" do @declaration.should be_match('www.testsite.org',ip) end it "should not match other IPs" do @declaration.should_not be_match('www.testsite.org','200.101.99.98') end end (1..3).each { |n| describe "when the pattern is a IP mask with #{n} numeric segments and a *" do before :each do @ip_pattern = ip.split('.')[0,n].join('.')+'.*' @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,@ip_pattern) end it "should match an IP in the range" do @declaration.should be_match('www.testsite.org',ip) end it "should not match other IPs" do @declaration.should_not be_match('www.testsite.org','200.101.99.98') end it "should not match IPs that differ in the last non-wildcard segment" do other = ip.split('.') other[n-1].succ! @declaration.should_not be_match('www.testsite.org',other.join('.')) end end } } describe "when the pattern is a numeric IP with a back reference" do before :each do @ip = '100.101.$1' @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,@ip).interpolate('12.34'.match(/(.*)/)) end it "should match an IP with the appropriate interpolation" do @declaration.should be_match('www.testsite.org',@ip.sub(/\$1/,'12.34')) end it "should not match other IPs" do @declaration.should_not be_match('www.testsite.org',@ip.sub(/\$1/,'66.34')) end end [ "02001:0000:1234:0000:0000:C1C0:ABCD:0876", "2001:0000:1234:0000:00001:C1C0:ABCD:0876", - " 2001:0000:1234:0000:0000:C1C0:ABCD:0876  0", + " 2001:0000:1234:0000:0000:C1C0:ABCD:0876 0", "2001:0000:1234: 0000:0000:C1C0:ABCD:0876", "3ffe:0b00:0000:0001:0000:0000:000a", "FF02:0000:0000:0000:0000:0000:0000:0000:0001", "3ffe:b00::1::a", "1:2:3::4:5::7:8", "12345::6:7:8", "1::5:400.2.3.4", "1::5:260.2.3.4", "1::5:256.2.3.4", "1::5:1.256.3.4", "1::5:1.2.256.4", "1::5:1.2.3.256", "1::5:300.2.3.4", "1::5:1.300.3.4", "1::5:1.2.300.4", "1::5:1.2.3.300", "1::5:900.2.3.4", "1::5:1.900.3.4", "1::5:1.2.900.4", "1::5:1.2.3.900", "1::5:300.300.300.300", "1::5:3000.30.30.30", "1::400.2.3.4", "1::260.2.3.4", "1::256.2.3.4", "1::1.256.3.4", "1::1.2.256.4", "1::1.2.3.256", "1::300.2.3.4", "1::1.300.3.4", "1::1.2.300.4", "1::1.2.3.300", "1::900.2.3.4", "1::1.900.3.4", "1::1.2.900.4", "1::1.2.3.900", "1::300.300.300.300", "1::3000.30.30.30", "::400.2.3.4", "::260.2.3.4", "::256.2.3.4", "::1.256.3.4", "::1.2.256.4", "::1.2.3.256", "::300.2.3.4", "::1.300.3.4", "::1.2.300.4", "::1.2.3.300", "::900.2.3.4", "::1.900.3.4", "::1.2.900.4", "::1.2.3.900", "::300.300.300.300", "::3000.30.30.30", "2001:DB8:0:0:8:800:200C:417A:221", # unicast, full "FF01::101::2" # multicast, compressed ].each { |invalid_ip| describe "when the pattern is an invalid IPv6 address such as #{invalid_ip}" do it "should raise an exception" do lambda { Puppet::Network::AuthStore::Declaration.new(:allow,invalid_ip) }.should raise_error end end } [ "1.2.3.4", "2001:0000:1234:0000:0000:C1C0:ABCD:0876", "3ffe:0b00:0000:0000:0001:0000:0000:000a", "FF02:0000:0000:0000:0000:0000:0000:0001", "0000:0000:0000:0000:0000:0000:0000:0001", "0000:0000:0000:0000:0000:0000:0000:0000", "::ffff:192.168.1.26", "2::10", "ff02::1", "fe80::", "2002::", "2001:db8::", "2001:0db8:1234::", "::ffff:0:0", "::1", "::ffff:192.168.1.1", "1:2:3:4:5:6:7:8", "1:2:3:4:5:6::8", "1:2:3:4:5::8", "1:2:3:4::8", "1:2:3::8", "1:2::8", "1::8", "1::2:3:4:5:6:7", "1::2:3:4:5:6", "1::2:3:4:5", "1::2:3:4", "1::2:3", "1::8", "::2:3:4:5:6:7:8", "::2:3:4:5:6:7", "::2:3:4:5:6", "::2:3:4:5", "::2:3:4", "::2:3", "::8", "1:2:3:4:5:6::", "1:2:3:4:5::", "1:2:3:4::", "1:2:3::", "1:2::", "1::", "1:2:3:4:5::7:8", "1:2:3:4::7:8", "1:2:3::7:8", "1:2::7:8", "1::7:8", "1:2:3:4:5:6:1.2.3.4", "1:2:3:4:5::1.2.3.4", "1:2:3:4::1.2.3.4", "1:2:3::1.2.3.4", "1:2::1.2.3.4", "1::1.2.3.4", "1:2:3:4::5:1.2.3.4", "1:2:3::5:1.2.3.4", "1:2::5:1.2.3.4", "1::5:1.2.3.4", "1::5:11.22.33.44", "fe80::217:f2ff:254.7.237.98", "fe80::217:f2ff:fe07:ed62", "2001:DB8:0:0:8:800:200C:417A", # unicast, full "FF01:0:0:0:0:0:0:101", # multicast, full "0:0:0:0:0:0:0:1", # loopback, full "0:0:0:0:0:0:0:0", # unspecified, full "2001:DB8::8:800:200C:417A", # unicast, compressed "FF01::101", # multicast, compressed "::1", # loopback, compressed, non-routable "::", # unspecified, compressed, non-routable "0:0:0:0:0:0:13.1.68.3", # IPv4-compatible IPv6 address, full, deprecated "0:0:0:0:0:FFFF:129.144.52.38", # IPv4-mapped IPv6 address, full "::13.1.68.3", # IPv4-compatible IPv6 address, compressed, deprecated "::FFFF:129.144.52.38", # IPv4-mapped IPv6 address, compressed "2001:0DB8:0000:CD30:0000:0000:0000:0000/60", # full, with prefix "2001:0DB8::CD30:0:0:0:0/60", # compressed, with prefix "2001:0DB8:0:CD30::/60", # compressed, with prefix #2 "::/128", # compressed, unspecified address type, non-routable "::1/128", # compressed, loopback address type, non-routable "FF00::/8", # compressed, multicast address type "FE80::/10", # compressed, link-local unicast, non-routable "FEC0::/10", # compressed, site-local unicast, deprecated "127.0.0.1", # standard IPv4, loopback, non-routable "0.0.0.0", # standard IPv4, unspecified, non-routable "255.255.255.255", # standard IPv4 "fe80:0000:0000:0000:0204:61ff:fe9d:f156", "fe80:0:0:0:204:61ff:fe9d:f156", "fe80::204:61ff:fe9d:f156", "fe80:0000:0000:0000:0204:61ff:254.157.241.086", "fe80:0:0:0:204:61ff:254.157.241.86", "fe80::204:61ff:254.157.241.86", "::1", "fe80::", "fe80::1" ].each { |ip| describe "when the pattern is a valid IP such as #{ip}" do before :each do @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,ip) end it "should match the specified IP" do @declaration.should be_match('www.testsite.org',ip) end it "should not match other IPs" do @declaration.should_not be_match('www.testsite.org','200.101.99.98') end end unless ip =~ /:.*\./ # Hybrid IPs aren't supported by ruby's ipaddr } { 'spirit.mars.nasa.gov' => 'a PQDN', 'ratchet.2ndsiteinc.com' => 'a PQDN with digits', 'a.c.ru' => 'a PQDN with short segments', }.each {|pqdn,desc| describe "when the pattern is #{desc}" do before :each do @host = pqdn @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,@host) end it "should match the specified PQDN" do @declaration.should be_match(@host,'200.101.99.98') end it "should not match a similar FQDN" do pending "FQDN consensus" @declaration.should_not be_match(@host+'.','200.101.99.98') end end } ['abc.12seps.edu.phisher.biz','www.google.com','slashdot.org'].each { |host| (1...(host.split('.').length)).each { |n| describe "when the pattern is #{"*."+host.split('.')[-n,n].join('.')}" do before :each do @pattern = "*."+host.split('.')[-n,n].join('.') @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,@pattern) end it "should match #{host}" do @declaration.should be_match(host,'1.2.3.4') end it "should not match www.testsite.gov" do @declaration.should_not be_match('www.testsite.gov','200.101.99.98') end it "should not match hosts that differ in the first non-wildcard segment" do other = host.split('.') other[-n].succ! @declaration.should_not be_match(other.join('.'),'1.2.3.4') end end } } describe "when the pattern is a FQDN" do before :each do @host = 'spirit.mars.nasa.gov.' @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,@host) end it "should match the specified FQDN" do pending "FQDN consensus" @declaration.should be_match(@host,'200.101.99.98') end it "should not match a similar PQDN" do @declaration.should_not be_match(@host[0..-2],'200.101.99.98') end end describe "when the pattern is an opaque string with a back reference" do before :each do @host = 'c216f41a-f902-4bfb-a222-850dd957bebb' @item = "/catalog/#{@host}" @pattern = %{^/catalog/([^/]+)$} @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,'$1') end it "should match an IP with the appropriate interpolation" do @declaration.interpolate(@item.match(@pattern)).should be_match(@host,'10.0.0.5') end end describe "when the pattern is an opaque string with a back reference and the matched data contains dots" do before :each do @host = 'admin.mgmt.nym1' @item = "/catalog/#{@host}" @pattern = %{^/catalog/([^/]+)$} @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,'$1') end it "should match a name with the appropriate interpolation" do @declaration.interpolate(@item.match(@pattern)).should be_match(@host,'10.0.0.5') end end describe "when the pattern is an opaque string with a back reference and the matched data contains dots with an initial prefix that looks like an IP address" do before :each do @host = '01.admin.mgmt.nym1' @item = "/catalog/#{@host}" @pattern = %{^/catalog/([^/]+)$} @declaration = Puppet::Network::AuthStore::Declaration.new(:allow,'$1') end it "should match a name with the appropriate interpolation" do @declaration.interpolate(@item.match(@pattern)).should be_match(@host,'10.0.0.5') end end describe "when comparing patterns" do before :each do @ip = Puppet::Network::AuthStore::Declaration.new(:allow,'127.0.0.1') @host_name = Puppet::Network::AuthStore::Declaration.new(:allow,'www.hard_knocks.edu') @opaque = Puppet::Network::AuthStore::Declaration.new(:allow,'hey_dude') end it "should consider ip addresses before host names" do (@ip < @host_name).should be_true end it "should consider ip addresses before opaque strings" do (@ip < @opaque).should be_true end it "should consider host_names before opaque strings" do (@host_name < @opaque).should be_true end end end