diff --git a/lib/puppet/file_bucket/dipper.rb b/lib/puppet/file_bucket/dipper.rb index 4adab5df0..8e35bde67 100644 --- a/lib/puppet/file_bucket/dipper.rb +++ b/lib/puppet/file_bucket/dipper.rb @@ -1,107 +1,107 @@ +require 'pathname' require 'puppet/file_bucket' require 'puppet/file_bucket/file' require 'puppet/indirector/request' class Puppet::FileBucket::Dipper # This is a transitional implementation that uses REST # to access remote filebucket files. attr_accessor :name # Create our bucket client def initialize(hash = {}) # Emulate the XMLRPC client server = hash[:Server] port = hash[:Port] || Puppet[:masterport] environment = Puppet[:environment] if hash.include?(:Path) @local_path = hash[:Path] @rest_path = nil else @local_path = nil @rest_path = "https://#{server}:#{port}/#{environment}/file_bucket_file/" end end def local? !! @local_path end # Back up a file to our bucket def backup(file) raise(ArgumentError, "File #{file} does not exist") unless ::File.exist?(file) contents = IO.binread(file) begin file_bucket_file = Puppet::FileBucket::File.new(contents, :bucket_path => @local_path) files_original_path = absolutize_path(file) dest_path = "#{@rest_path}#{file_bucket_file.name}/#{files_original_path}" file_bucket_path = "#{@rest_path}#{file_bucket_file.checksum_type}/#{file_bucket_file.checksum_data}/#{files_original_path}" # Make a HEAD request for the file so that we don't waste time # uploading it if it already exists in the bucket. unless Puppet::FileBucket::File.indirection.head(file_bucket_path) Puppet::FileBucket::File.indirection.save(file_bucket_file, dest_path) end return file_bucket_file.checksum_data rescue => detail puts detail.backtrace if Puppet[:trace] raise Puppet::Error, "Could not back up #{file}: #{detail}" end end # Retrieve a file by sum. def getfile(sum) source_path = "#{@rest_path}md5/#{sum}" file_bucket_file = Puppet::FileBucket::File.indirection.find(source_path, :bucket_path => @local_path) raise Puppet::Error, "File not found" unless file_bucket_file file_bucket_file.to_s end # Restore the file def restore(file,sum) restore = true if FileTest.exists?(file) cursum = Digest::MD5.hexdigest(IO.binread(file)) # if the checksum has changed... # this might be extra effort if cursum == sum restore = false end end if restore if newcontents = getfile(sum) tmp = "" newsum = Digest::MD5.hexdigest(newcontents) changed = nil if FileTest.exists?(file) and ! FileTest.writable?(file) changed = ::File.stat(file).mode ::File.chmod(changed | 0200, file) end ::File.open(file, ::File::WRONLY|::File::TRUNC|::File::CREAT) { |of| of.binmode of.print(newcontents) } ::File.chmod(changed, file) if changed else Puppet.err "Could not find file with checksum #{sum}" return nil end return newsum else return nil end end private def absolutize_path( path ) - require 'pathname' Pathname.new(path).realpath end end diff --git a/lib/puppet/interface/action_manager.rb b/lib/puppet/interface/action_manager.rb index 5c9af4f96..1226507f5 100644 --- a/lib/puppet/interface/action_manager.rb +++ b/lib/puppet/interface/action_manager.rb @@ -1,75 +1,74 @@ require 'puppet/interface/action' +require 'puppet/interface/action_builder' module Puppet::Interface::ActionManager # Declare that this app can take a specific action, and provide # the code to do so. def action(name, &block) - require 'puppet/interface/action_builder' - @actions ||= {} raise "Action #{name} already defined for #{self}" if action?(name) action = Puppet::Interface::ActionBuilder.build(self, name, &block) if action.default and current = get_default_action raise "Actions #{current.name} and #{name} cannot both be default" end @actions[action.name] = action end # This is the short-form of an action definition; it doesn't use the # builder, just creates the action directly from the block. def script(name, &block) @actions ||= {} raise "Action #{name} already defined for #{self}" if action?(name) @actions[name] = Puppet::Interface::Action.new(self, name, :when_invoked => block) end def actions @actions ||= {} result = @actions.keys if self.is_a?(Class) and superclass.respond_to?(:actions) result += superclass.actions elsif self.class.respond_to?(:actions) result += self.class.actions end # We need to uniq the result, because we duplicate actions when they are # fetched to ensure that they have the correct bindings; they shadow the # parent, and uniq implements that. --daniel 2011-06-01 result.uniq.sort end def get_action(name) @actions ||= {} result = @actions[name.to_sym] if result.nil? if self.is_a?(Class) and superclass.respond_to?(:get_action) found = superclass.get_action(name) elsif self.class.respond_to?(:get_action) found = self.class.get_action(name) end if found then # This is not the nicest way to make action equivalent to the Ruby # Method object, rather than UnboundMethod, but it will do for now, # and we only have to make this change in *one* place. --daniel 2011-04-12 result = @actions[name.to_sym] = found.__dup_and_rebind_to(self) end end return result end def get_default_action default = actions.map {|x| get_action(x) }.select {|x| x.default } if default.length > 1 raise "The actions #{default.map(&:name).join(", ")} cannot all be default" end default.first end def action?(name) actions.include?(name.to_sym) end end diff --git a/lib/puppet/network/handler/fileserver.rb b/lib/puppet/network/handler/fileserver.rb index 5548f40fb..c0d5756f1 100755 --- a/lib/puppet/network/handler/fileserver.rb +++ b/lib/puppet/network/handler/fileserver.rb @@ -1,732 +1,732 @@ require 'puppet' require 'puppet/network/authstore' require 'webrick/httpstatus' require 'cgi' require 'delegate' require 'sync' require 'puppet/network/handler' require 'puppet/network/handler' require 'puppet/network/xmlrpc/server' require 'puppet/file_serving' require 'puppet/file_serving/metadata' require 'puppet/network/handler' class Puppet::Network::Handler AuthStoreError = Puppet::AuthStoreError class FileServerError < Puppet::Error; end class FileServer < Handler desc "The interface to Puppet's fileserving abilities." attr_accessor :local CHECKPARAMS = [:mode, :type, :owner, :group, :checksum] # Special filserver module for puppet's module system MODULES = "modules" PLUGINS = "plugins" @interface = XMLRPC::Service::Interface.new("fileserver") { |iface| iface.add_method("string describe(string, string)") iface.add_method("string list(string, string, boolean, array)") iface.add_method("string retrieve(string, string)") } def self.params CHECKPARAMS.dup end # If the configuration file exists, then create (if necessary) a LoadedFile # object to manage it; else, return nil. def configuration # Short-circuit the default case. return @configuration if defined?(@configuration) config_path = @passed_configuration_path || Puppet[:fileserverconfig] return nil unless FileTest.exist?(config_path) # The file exists but we don't have a LoadedFile instance for it. @configuration = Puppet::Util::LoadedFile.new(config_path) end # Create our default mounts for modules and plugins. This is duplicated code, # but I'm not really worried about that. def create_default_mounts @mounts = {} Puppet.debug "No file server configuration file; autocreating #{MODULES} mount with default permissions" mount = Mount.new(MODULES) mount.allow("*") @mounts[MODULES] = mount Puppet.debug "No file server configuration file; autocreating #{PLUGINS} mount with default permissions" mount = PluginMount.new(PLUGINS) mount.allow("*") @mounts[PLUGINS] = mount end # Describe a given file. This returns all of the manageable aspects # of that file. def describe(url, links = :follow, client = nil, clientip = nil) links = links.intern if links.is_a? String mount, path = convert(url, client, clientip) mount.debug("Describing #{url} for #{client}") if client # use the mount to resolve the path for us. return "" unless full_path = mount.file_path(path, client) metadata = Puppet::FileServing::Metadata.new(url, :path => full_path, :links => links) return "" unless metadata.exist? begin metadata.collect rescue => detail puts detail.backtrace if Puppet[:trace] Puppet.err detail return "" end metadata.attributes_with_tabs end # Create a new fileserving module. def initialize(hash = {}) @mounts = {} @files = {} @local = hash[:Local] @noreadconfig = true if hash[:Config] == false @passed_configuration_path = hash[:Config] if hash.include?(:Mount) @passedconfig = true raise Puppet::DevError, "Invalid mount hash #{hash[:Mount].inspect}" unless hash[:Mount].is_a?(Hash) hash[:Mount].each { |dir, name| self.mount(dir, name) if FileTest.exists?(dir) } self.mount(nil, MODULES) self.mount(nil, PLUGINS) else @passedconfig = false if configuration readconfig(false) # don't check the file the first time. else create_default_mounts end end end # List a specific directory's contents. def list(url, links = :ignore, recurse = false, ignore = false, client = nil, clientip = nil) mount, path = convert(url, client, clientip) mount.debug "Listing #{url} for #{client}" if client return "" unless mount.path_exists?(path, client) desc = mount.list(path, recurse, ignore, client) if desc.length == 0 mount.notice "Got no information on //#{mount}/#{path}" return "" end desc.collect { |sub| sub.join("\t") }.join("\n") end def local? self.local end # Is a given mount available? def mounted?(name) @mounts.include?(name) end # Mount a new directory with a name. def mount(path, name) if @mounts.include?(name) if @mounts[name] != path raise FileServerError, "#{@mounts[name].path} is already mounted at #{name}" else # it's already mounted; no problem return end end # Let the mounts do their own error-checking. @mounts[name] = Mount.new(name, path) @mounts[name].info "Mounted #{path}" @mounts[name] end # Retrieve a file from the local disk and pass it to the remote # client. def retrieve(url, links = :ignore, client = nil, clientip = nil) links = links.intern if links.is_a? String mount, path = convert(url, client, clientip) mount.info "Sending #{url} to #{client}" if client unless mount.path_exists?(path, client) mount.debug "#{mount} reported that #{path} does not exist" return "" end links = links.intern if links.is_a? String if links == :ignore and FileTest.symlink?(path) mount.debug "I think that #{path} is a symlink and we're ignoring them" return "" end str = mount.read_file(path, client) if @local return str else return CGI.escape(str) end end def umount(name) @mounts.delete(name) if @mounts.include? name end private def authcheck(file, mount, client, clientip) # If we're local, don't bother passing in information. if local? client = nil clientip = nil end unless mount.allowed?(client, clientip) mount.warning "#{client} cannot access #{file}" raise Puppet::AuthorizationError, "Cannot access #{mount}" end end # Take a URL and some client info and return a mount and relative # path pair. # def convert(url, client, clientip) readconfig url = URI.unescape(url) mount, stub = splitpath(url, client) authcheck(url, mount, client, clientip) return mount, stub end # Return the mount for the Puppet modules; allows file copying from # the modules. def modules_mount(module_name, client) # Find our environment, if we have one. unless hostname = (client || Facter.value("hostname")) raise ArgumentError, "Could not find hostname" end env = (node = Puppet::Node.indirection.find(hostname)) ? node.environment : nil # And use the environment to look up the module. (mod = Puppet::Node::Environment.new(env).module(module_name) and mod.files?) ? @mounts[MODULES].copy(mod.name, mod.file_directory) : nil end # Read the configuration file. def readconfig(check = true) return if @noreadconfig return unless configuration return if check and ! @configuration.changed? newmounts = {} begin File.open(@configuration.file) { |f| mount = nil count = 1 f.each_line { |line| case line when /^\s*#/; next # skip comments when /^\s*$/; next # skip blank lines when /\[([-\w]+)\]/ name = $1 raise FileServerError, "#{newmounts[name]} is already mounted as #{name} in #{@configuration.file}" if newmounts.include?(name) mount = Mount.new(name) newmounts[name] = mount when /^\s*(\w+)\s+(.+)$/ var = $1 value = $2 case var when "path" raise FileServerError.new("No mount specified for argument #{var} #{value}") unless mount if mount.name == MODULES Puppet.warning "The '#{mount.name}' module can not have a path. Ignoring attempt to set it" else begin mount.path = value rescue FileServerError => detail Puppet.err "Removing mount #{mount.name}: #{detail}" newmounts.delete(mount.name) end end when "allow" raise FileServerError.new("No mount specified for argument #{var} #{value}") unless mount value.split(/\s*,\s*/).each { |val| begin mount.info "allowing #{val} access" mount.allow(val) rescue AuthStoreError => detail puts detail.backtrace if Puppet[:trace] raise FileServerError.new( detail.to_s, count, @configuration.file) end } when "deny" raise FileServerError.new("No mount specified for argument #{var} #{value}") unless mount value.split(/\s*,\s*/).each { |val| begin mount.info "denying #{val} access" mount.deny(val) rescue AuthStoreError => detail raise FileServerError.new( detail.to_s, count, @configuration.file) end } else raise FileServerError.new("Invalid argument '#{var}'", count, @configuration.file) end else raise FileServerError.new("Invalid line '#{line.chomp}'", count, @configuration.file) end count += 1 } } rescue Errno::EACCES => detail Puppet.err "FileServer error: Cannot read #{@configuration}; cannot serve" #raise Puppet::Error, "Cannot read #{@configuration}" rescue Errno::ENOENT => detail Puppet.err "FileServer error: '#{@configuration}' does not exist; cannot serve" end unless newmounts[MODULES] Puppet.debug "No #{MODULES} mount given; autocreating with default permissions" mount = Mount.new(MODULES) mount.allow("*") newmounts[MODULES] = mount end unless newmounts[PLUGINS] Puppet.debug "No #{PLUGINS} mount given; autocreating with default permissions" mount = PluginMount.new(PLUGINS) mount.allow("*") newmounts[PLUGINS] = mount end unless newmounts[PLUGINS].valid? Puppet.debug "No path given for #{PLUGINS} mount; creating a special PluginMount" # We end up here if the user has specified access rules for # the plugins mount, without specifying a path (which means # they want to have the default behaviour for the mount, but # special access control). So we need to move all the # user-specified access controls into the new PluginMount # object... mount = PluginMount.new(PLUGINS) # Yes, you're allowed to hate me for this. mount.instance_variable_set( :@declarations, newmounts[PLUGINS].instance_variable_get(:@declarations) ) newmounts[PLUGINS] = mount end # Verify each of the mounts are valid. # We let the check raise an error, so that it can raise an error # pointing to the specific problem. newmounts.each { |name, mount| raise FileServerError, "Invalid mount #{name}" unless mount.valid? } @mounts = newmounts end # Split the path into the separate mount point and path. def splitpath(dir, client) # the dir is based on one of the mounts # so first retrieve the mount path mount = nil path = nil if dir =~ %r{/([-\w]+)} # Strip off the mount name. mount_name, path = dir.sub(%r{^/}, '').split(File::Separator, 2) unless mount = modules_mount(mount_name, client) unless mount = @mounts[mount_name] raise FileServerError, "Fileserver module '#{mount_name}' not mounted" end end else raise FileServerError, "Fileserver error: Invalid path '#{dir}'" end if path.nil? or path == '' path = '/' elsif path # Remove any double slashes that might have occurred path = URI.unescape(path.gsub(/\/\//, "/")) end return mount, path end def to_s "fileserver" end # A simple class for wrapping mount points. Instances of this class # don't know about the enclosing object; they're mainly just used for # authorization. class Mount < Puppet::Network::AuthStore attr_reader :name @@syncs = {} @@files = {} Puppet::Util.logmethods(self, true) # Create a map for a specific client. def clientmap(client) { "h" => client.sub(/\..*$/, ""), "H" => client, "d" => client.sub(/[^.]+\./, "") # domain name } end # Replace % patterns as appropriate. def expand(path, client = nil) # This map should probably be moved into a method. map = nil if client map = clientmap(client) else Puppet.notice "No client; expanding '#{path}' with local host" # Else, use the local information map = localmap end path.gsub(/%(.)/) do |v| key = $1 if key == "%" "%" else map[key] || v end end end # Do we have any patterns in our path, yo? def expandable? if defined?(@expandable) @expandable else false end end # Return a fully qualified path, given a short path and # possibly a client name. def file_path(relative_path, node = nil) full_path = path(node) unless full_path p self raise ArgumentError.new("Mounts without paths are not usable") unless full_path end # If there's no relative path name, then we're serving the mount itself. return full_path unless relative_path and relative_path != "/" File.join(full_path, relative_path) end # Create out object. It must have a name. def initialize(name, path = nil) unless name =~ %r{^[-\w]+$} raise FileServerError, "Invalid name format '#{name}'" end @name = name if path self.path = path else @path = nil end @files = {} super() end def fileobj(path, links, client) obj = nil if obj = @files[file_path(path, client)] # This can only happen in local fileserving, but it's an # important one. It'd be nice if we didn't just set # the check params every time, but I'm not sure it's worth # the effort. obj[:audit] = CHECKPARAMS else obj = Puppet::Type.type(:file).new( :name => file_path(path, client), :audit => CHECKPARAMS ) @files[file_path(path, client)] = obj end if links == :manage links = :follow end # This, ah, might be completely redundant obj[:links] = links unless obj[:links] == links obj end # Read the contents of the file at the relative path given. def read_file(relpath, client) File.read(file_path(relpath, client)) end # Cache this manufactured map, since if it's used it's likely # to get used a lot. def localmap unless defined?(@@localmap) @@localmap = { "h" => Facter.value("hostname"), "H" => [Facter.value("hostname"), Facter.value("domain")].join("."), "d" => Facter.value("domain") } end @@localmap end # Return the path as appropriate, expanding as necessary. def path(client = nil) if expandable? return expand(@path, client) else return @path end end # Set the path. def path=(path) # FIXME: For now, just don't validate paths with replacement # patterns in them. if path =~ /%./ # Mark that we're expandable. @expandable = true else raise FileServerError, "#{path} does not exist" unless FileTest.exists?(path) raise FileServerError, "#{path} is not a directory" unless FileTest.directory?(path) raise FileServerError, "#{path} is not readable" unless FileTest.readable?(path) @expandable = false end @path = path end # Verify that the path given exists within this mount's subtree. # def path_exists?(relpath, client = nil) File.exists?(file_path(relpath, client)) end # Return the current values for the object. def properties(obj) obj.retrieve.inject({}) { |props, ary| props[ary[0].name] = ary[1]; props } end # Retrieve a specific directory relative to a mount point. # If they pass in a client, then expand as necessary. def subdir(dir = nil, client = nil) basedir = self.path(client) dirname = if dir File.join(basedir, *dir.split("/")) else basedir end dirname end def sync(path) @@syncs[path] ||= Sync.new @@syncs[path] end def to_s "mount[#{@name}]" end # Verify our configuration is valid. This should really check to # make sure at least someone will be allowed, but, eh. def valid? if name == MODULES return @path.nil? else return ! @path.nil? end end # Return a new mount with the same properties as +self+, except # with a different name and path. def copy(name, path) result = self.clone result.path = path result.instance_variable_set(:@name, name) result end # List the contents of the relative path +relpath+ of this mount. # # +recurse+ is the number of levels to recurse into the tree, # or false to provide no recursion or true if you just want to # go for broke. # # +ignore+ is an array of filenames to ignore when traversing # the list. # # The return value of this method is a complex nest of arrays, # which describes a directory tree. Each file or directory is # represented by an array, where the first element is the path # of the file (relative to the root of the mount), and the # second element is the type. A directory is represented by an # array as well, where the first element is a "directory" array, # while the remaining elements are other file or directory # arrays. Confusing? Hell yes. As an added bonus, all names # must start with a slash, because... well, I'm fairly certain # a complete explanation would involve the words "crack pipe" # and "bad batch". # def list(relpath, recurse, ignore, client = nil) abspath = file_path(relpath, client) if FileTest.exists?(abspath) if FileTest.directory?(abspath) and recurse return reclist(abspath, recurse, ignore) else return [["/", File.stat(abspath).ftype]] end end nil end + require 'puppet/file_serving' + require 'puppet/file_serving/fileset' def reclist(abspath, recurse, ignore) - require 'puppet/file_serving' - require 'puppet/file_serving/fileset' if recurse.is_a?(Fixnum) args = { :recurse => true, :recurselimit => recurse, :links => :follow } else args = { :recurse => recurse, :links => :follow } end args[:ignore] = ignore if ignore fs = Puppet::FileServing::Fileset.new(abspath, args) ary = fs.files.collect do |file| if file == "." file = "/" else file = File.join("/", file ) end stat = fs.stat(File.join(abspath, file)) next if stat.nil? [ file, stat.ftype ] end ary.compact end end # A special mount class specifically for the plugins mount -- just # has some magic to effectively do a union mount of the 'plugins' # directory of all modules. # class PluginMount < Mount def path(client) '' end def mod_path_exists?(mod, relpath, client = nil) ! mod.plugin(relpath).nil? end def path_exists?(relpath, client = nil) !valid_modules(client).find { |mod| mod.plugin(relpath) }.nil? end def valid? true end def mod_file_path(mod, relpath, client = nil) File.join(mod, PLUGINS, relpath) end def file_path(relpath, client = nil) return nil unless mod = valid_modules(client).find { |m| m.plugin(relpath) } mod.plugin(relpath) end # create a list of files by merging all modules def list(relpath, recurse, ignore, client = nil) result = [] valid_modules(client).each do |mod| if modpath = mod.plugin(relpath) if FileTest.directory?(modpath) and recurse ary = reclist(modpath, recurse, ignore) ary ||= [] result += ary else result += [["/", File.stat(modpath).ftype]] end end end result end private def valid_modules(client) Puppet::Node::Environment.new.modules.find_all { |mod| mod.exist? } end def add_to_filetree(f, filetree) first, rest = f.split(File::SEPARATOR, 2) end end end end diff --git a/lib/puppet/parser/functions/fqdn_rand.rb b/lib/puppet/parser/functions/fqdn_rand.rb index 93ab98bcd..dd8d56800 100644 --- a/lib/puppet/parser/functions/fqdn_rand.rb +++ b/lib/puppet/parser/functions/fqdn_rand.rb @@ -1,12 +1,13 @@ +require 'digest/md5' + Puppet::Parser::Functions::newfunction(:fqdn_rand, :type => :rvalue, :doc => "Generates random numbers based on the node's fqdn. Generated random values will be a range from 0 up to and excluding n, where n is the first parameter. The second argument specifies a number to add to the seed and is optional, for example: $random_number = fqdn_rand(30) $random_number_seed = fqdn_rand(30,30)") do |args| - require 'digest/md5' max = args.shift srand(Digest::MD5.hexdigest([lookupvar('::fqdn'),args].join(':')).hex) rand(max).to_s end diff --git a/lib/puppet/parser/functions/md5.rb b/lib/puppet/parser/functions/md5.rb index f7a4f7222..864aae048 100644 --- a/lib/puppet/parser/functions/md5.rb +++ b/lib/puppet/parser/functions/md5.rb @@ -1,5 +1,5 @@ -Puppet::Parser::Functions::newfunction(:md5, :type => :rvalue, :doc => "Returns a MD5 hash value from a provided string.") do |args| - require 'md5' +require 'md5' +Puppet::Parser::Functions::newfunction(:md5, :type => :rvalue, :doc => "Returns a MD5 hash value from a provided string.") do |args| Digest::MD5.hexdigest(args[0]) end diff --git a/lib/puppet/parser/functions/sha1.rb b/lib/puppet/parser/functions/sha1.rb index 1e7d5abe4..c52df4d28 100644 --- a/lib/puppet/parser/functions/sha1.rb +++ b/lib/puppet/parser/functions/sha1.rb @@ -1,5 +1,5 @@ -Puppet::Parser::Functions::newfunction(:sha1, :type => :rvalue, :doc => "Returns a SHA1 hash value from a provided string.") do |args| - require 'digest/sha1' +require 'digest/sha1' +Puppet::Parser::Functions::newfunction(:sha1, :type => :rvalue, :doc => "Returns a SHA1 hash value from a provided string.") do |args| Digest::SHA1.hexdigest(args[0]) end diff --git a/lib/puppet/parser/functions/template.rb b/lib/puppet/parser/functions/template.rb index 8105e2cac..5e4b00e1e 100644 --- a/lib/puppet/parser/functions/template.rb +++ b/lib/puppet/parser/functions/template.rb @@ -1,25 +1,23 @@ Puppet::Parser::Functions::newfunction(:template, :type => :rvalue, :doc => "Evaluate a template and return its value. See [the templating docs](http://docs.puppetlabs.com/guides/templating.html) for more information. Note that if multiple templates are specified, their output is all concatenated and returned as the output of the function.") do |vals| - require 'erb' - vals.collect do |file| # Use a wrapper, so the template can't get access to the full # Scope object. debug "Retrieving template #{file}" wrapper = Puppet::Parser::TemplateWrapper.new(self) wrapper.file = file begin wrapper.result rescue => detail info = detail.backtrace.first.split(':') raise Puppet::ParseError, "Failed to parse template #{file}:\n Filepath: #{info[0]}\n Line: #{info[1]}\n Detail: #{detail}\n" end end.join("") end diff --git a/lib/puppet/parser/type_loader.rb b/lib/puppet/parser/type_loader.rb index 65579a820..861a098a5 100644 --- a/lib/puppet/parser/type_loader.rb +++ b/lib/puppet/parser/type_loader.rb @@ -1,173 +1,172 @@ +require 'find' require 'puppet/node/environment' class Puppet::Parser::TypeLoader include Puppet::Node::Environment::Helper # Helper class that makes sure we don't try to import the same file # more than once from either the same thread or different threads. class Helper include MonitorMixin def initialize super # These hashes are indexed by filename @state = {} # :doing or :done @thread = {} # if :doing, thread that's doing the parsing @cond_var = {} # if :doing, condition var that will be signaled when done. end # Execute the supplied block exactly once per file, no matter how # many threads have asked for it to run. If another thread is # already executing it, wait for it to finish. If this thread is # already executing it, return immediately without executing the # block. # # Note: the reason for returning immediately if this thread is # already executing the block is to handle the case of a circular # import--when this happens, we attempt to recursively re-parse a # file that we are already in the process of parsing. To prevent # an infinite regress we need to simply do nothing when the # recursive import is attempted. def do_once(file) need_to_execute = synchronize do case @state[file] when :doing if @thread[file] != Thread.current @cond_var[file].wait end false when :done false else @state[file] = :doing @thread[file] = Thread.current @cond_var[file] = new_cond true end end if need_to_execute begin yield ensure synchronize do @state[file] = :done @thread.delete(file) @cond_var.delete(file).broadcast end end end end end # Import our files. def import(file, current_file = nil) return if Puppet[:ignoreimport] # use a path relative to the file doing the importing if current_file dir = current_file.sub(%r{[^/]+$},'').sub(/\/$/, '') else dir = "." end if dir == "" dir = "." end pat = file modname, files = Puppet::Parser::Files.find_manifests(pat, :cwd => dir, :environment => environment) if files.size == 0 raise Puppet::ImportError.new("No file(s) found for import of '#{pat}'") end loaded_asts = [] files.each do |file| unless Puppet::Util.absolute_path?(file) file = File.join(dir, file) end @loading_helper.do_once(file) do loaded_asts << parse_file(file) end end loaded_asts.inject([]) do |loaded_types, ast| loaded_types + known_resource_types.import_ast(ast, modname) end end def import_all - require 'find' - module_names = [] # Collect the list of all known modules environment.modulepath.each do |path| Dir.chdir(path) do Dir.glob("*").each do |dir| next unless FileTest.directory?(dir) module_names << dir end end end module_names.uniq! # And then load all files from each module, but (relying on system # behavior) only load files from the first module of a given name. E.g., # given first/foo and second/foo, only files from first/foo will be loaded. module_names.each do |name| mod = Puppet::Module.new(name, :environment => environment) Find.find(File.join(mod.path, "manifests")) do |path| if path =~ /\.pp$/ or path =~ /\.rb$/ import(path) end end end end def known_resource_types environment.known_resource_types end def initialize(env) self.environment = env @loading_helper = Helper.new end # Try to load the object with the given fully qualified name. def try_load_fqname(type, fqname) return nil if fqname == "" # special-case main. name2files(fqname).each do |filename| begin imported_types = import(filename) if result = imported_types.find { |t| t.type == type and t.name == fqname } Puppet.debug "Automatically imported #{fqname} from #{filename} into #{environment}" return result end rescue Puppet::ImportError => detail # We couldn't load the item # I'm not convienced we should just drop these errors, but this # preserves existing behaviours. end end # Nothing found. return nil end def parse_file(file) Puppet.debug("importing '#{file}' in environment #{environment}") parser = Puppet::Parser::Parser.new(environment) parser.file = file return parser.parse end private # Return a list of all file basenames that should be tried in order # to load the object with the given fully qualified name. def name2files(fqname) result = [] ary = fqname.split("::") while ary.length > 0 result << ary.join(File::SEPARATOR) ary.pop end return result end end diff --git a/lib/puppet/rails/benchmark.rb b/lib/puppet/rails/benchmark.rb index e1e92bb79..741b6d5bd 100644 --- a/lib/puppet/rails/benchmark.rb +++ b/lib/puppet/rails/benchmark.rb @@ -1,63 +1,63 @@ require 'benchmark' +require 'yaml' + module Puppet::Rails::Benchmark $benchmarks = {:accumulated => {}} def time_debug? Puppet::Rails::TIME_DEBUG end def railsmark(message) result = nil seconds = Benchmark.realtime { result = yield } Puppet.debug(message + " in %0.2f seconds" % seconds) $benchmarks[message] = seconds if time_debug? result end def debug_benchmark(message) return yield unless Puppet::Rails::TIME_DEBUG railsmark(message) { yield } end # Collect partial benchmarks to be logged when they're # all done. # These are always low-level debugging so we only # print them if time_debug is enabled. def accumulate_benchmark(message, label) return yield unless time_debug? $benchmarks[:accumulated][message] ||= Hash.new(0) $benchmarks[:accumulated][message][label] += Benchmark.realtime { yield } end # Log the accumulated marks. def log_accumulated_marks(message) return unless time_debug? return if $benchmarks[:accumulated].empty? or $benchmarks[:accumulated][message].nil? or $benchmarks[:accumulated][message].empty? $benchmarks[:accumulated][message].each do |label, value| Puppet.debug(message + ("(#{label})") + (" in %0.2f seconds" % value)) end end def write_benchmarks return unless time_debug? branch = %x{git branch}.split("\n").find { |l| l =~ /^\*/ }.sub("* ", '') file = "/tmp/time_debugging.yaml" - require 'yaml' - if FileTest.exist?(file) data = YAML.load_file(file) else data = {} end data[branch] = $benchmarks Puppet::Util.replace_file(file, 0644) { |f| f.print YAML.dump(data) } end end diff --git a/lib/puppet/resource/type_collection.rb b/lib/puppet/resource/type_collection.rb index 89b0a16ed..e432d52a0 100644 --- a/lib/puppet/resource/type_collection.rb +++ b/lib/puppet/resource/type_collection.rb @@ -1,214 +1,215 @@ +require 'puppet/parser/type_loader' + class Puppet::Resource::TypeCollection attr_reader :environment attr_accessor :parse_failed def clear @hostclasses.clear @definitions.clear @nodes.clear @watched_files.clear end def initialize(env) @environment = env.is_a?(String) ? Puppet::Node::Environment.new(env) : env @hostclasses = {} @definitions = {} @nodes = {} # So we can keep a list and match the first-defined regex @node_list = [] @watched_files = {} end def import_ast(ast, modname) ast.instantiate(modname).each do |instance| add(instance) end end def inspect "TypeCollection" + { :hostclasses => @hostclasses.keys, :definitions => @definitions.keys, :nodes => @nodes.keys }.inspect end def <<(thing) add(thing) self end def add(instance) if instance.type == :hostclass and other = @hostclasses[instance.name] and other.type == :hostclass other.merge(instance) return other end method = "add_#{instance.type}" send(method, instance) instance.resource_type_collection = self instance end def add_hostclass(instance) dupe_check(instance, @hostclasses) { |dupe| "Class '#{instance.name}' is already defined#{dupe.error_context}; cannot redefine" } dupe_check(instance, @definitions) { |dupe| "Definition '#{instance.name}' is already defined#{dupe.error_context}; cannot be redefined as a class" } @hostclasses[instance.name] = instance instance end def hostclass(name) @hostclasses[munge_name(name)] end def add_node(instance) dupe_check(instance, @nodes) { |dupe| "Node '#{instance.name}' is already defined#{dupe.error_context}; cannot redefine" } @node_list << instance @nodes[instance.name] = instance instance end def loader - require 'puppet/parser/type_loader' @loader ||= Puppet::Parser::TypeLoader.new(environment) end def node(name) name = munge_name(name) if node = @nodes[name] return node end @node_list.each do |node| next unless node.name_is_regex? return node if node.match(name) end nil end def node_exists?(name) @nodes[munge_name(name)] end def nodes? @nodes.length > 0 end def add_definition(instance) dupe_check(instance, @hostclasses) { |dupe| "'#{instance.name}' is already defined#{dupe.error_context} as a class; cannot redefine as a definition" } dupe_check(instance, @definitions) { |dupe| "Definition '#{instance.name}' is already defined#{dupe.error_context}; cannot be redefined" } @definitions[instance.name] = instance end def definition(name) @definitions[munge_name(name)] end def find_node(namespaces, name) @nodes[munge_name(name)] end def find_hostclass(namespaces, name) find_or_load(namespaces, name, :hostclass) end def find_definition(namespaces, name) find_or_load(namespaces, name, :definition) end [:hostclasses, :nodes, :definitions].each do |m| define_method(m) do instance_variable_get("@#{m}").dup end end def require_reparse? @parse_failed || stale? end def stale? @watched_files.values.detect { |file| file.changed? } end def version return @version if defined?(@version) if environment[:config_version] == "" @version = Time.now.to_i return @version end @version = Puppet::Util.execute([environment[:config_version]]).strip rescue Puppet::ExecutionFailure => e raise Puppet::ParseError, "Unable to set config_version: #{e.message}" end def watch_file(file) @watched_files[file] = Puppet::Util::LoadedFile.new(file) end def watching_file?(file) @watched_files.include?(file) end private # Return a list of all possible fully-qualified names that might be # meant by the given name, in the context of namespaces. def resolve_namespaces(namespaces, name) name = name.downcase if name =~ /^::/ # name is explicitly fully qualified, so just return it, sans # initial "::". return [name.sub(/^::/, '')] end if name == "" # The name "" has special meaning--it always refers to a "main" # hostclass which contains all toplevel resources. return [""] end namespaces = [namespaces] unless namespaces.is_a?(Array) namespaces = namespaces.collect { |ns| ns.downcase } result = [] namespaces.each do |namespace| ary = namespace.split("::") # Search each namespace nesting in innermost-to-outermost order. while ary.length > 0 result << "#{ary.join("::")}::#{name}" ary.pop end # Finally, search the toplevel namespace. result << name end return result.uniq end # Resolve namespaces and find the given object. Autoload it if # necessary. def find_or_load(namespaces, name, type) resolve_namespaces(namespaces, name).each do |fqname| if result = send(type, fqname) || loader.try_load_fqname(type, fqname) return result end end # Nothing found. return nil end def munge_name(name) name.to_s.downcase end def dupe_check(instance, hash) return unless dupe = hash[instance.name] message = yield dupe instance.fail Puppet::ParseError, message end end diff --git a/lib/puppet/ssl/base.rb b/lib/puppet/ssl/base.rb index f9bbcac64..a5a94a7a1 100644 --- a/lib/puppet/ssl/base.rb +++ b/lib/puppet/ssl/base.rb @@ -1,79 +1,78 @@ +require 'openssl/digest' require 'puppet/ssl' # The base class for wrapping SSL instances. class Puppet::SSL::Base # For now, use the YAML separator. SEPARATOR = "\n---\n" def self.from_multiple_s(text) text.split(SEPARATOR).collect { |inst| from_s(inst) } end def self.to_multiple_s(instances) instances.collect { |inst| inst.to_s }.join(SEPARATOR) end def self.wraps(klass) @wrapped_class = klass end def self.wrapped_class raise(Puppet::DevError, "#{self} has not declared what class it wraps") unless defined?(@wrapped_class) @wrapped_class end attr_accessor :name, :content # Is this file for the CA? def ca? name == Puppet::SSL::Host.ca_name end def generate raise Puppet::DevError, "#{self.class} did not override 'generate'" end def initialize(name) @name = name.to_s.downcase end # Read content from disk appropriately. def read(path) @content = wrapped_class.new(File.read(path)) end # Convert our thing to pem. def to_s return "" unless content content.to_pem end # Provide the full text of the thing we're dealing with. def to_text return "" unless content content.to_text end def fingerprint(md = :MD5) - require 'openssl/digest' - # ruby 1.8.x openssl digest constants are string # but in 1.9.x they are symbols mds = md.to_s.upcase if OpenSSL::Digest.constants.include?(mds) md = mds elsif OpenSSL::Digest.constants.include?(mds.to_sym) md = mds.to_sym else raise ArgumentError, "#{md} is not a valid digest algorithm for fingerprinting certificate #{name}" end OpenSSL::Digest.const_get(md).hexdigest(content.to_der).scan(/../).join(':').upcase end private def wrapped_class self.class.wrapped_class end end diff --git a/lib/puppet/util/autoload.rb b/lib/puppet/util/autoload.rb index 8051d01af..246ff5fc3 100644 --- a/lib/puppet/util/autoload.rb +++ b/lib/puppet/util/autoload.rb @@ -1,156 +1,158 @@ require 'puppet/util/warnings' # Autoload paths, either based on names or all at once. class Puppet::Util::Autoload require 'puppet/util/autoload/file_cache' include Puppet::Util include Puppet::Util::Warnings include Puppet::Util::Autoload::FileCache @autoloaders = {} @loaded = [] class << self attr_reader :autoloaders private :autoloaders end # Send [], []=, and :clear to the @autloaders hash Puppet::Util.classproxy self, :autoloaders, "[]", "[]=" # List all loaded files. def self.list_loaded @loaded.sort { |a,b| a[0] <=> b[0] }.collect do |path, hash| "#{path}: #{hash[:file]}" end end # Has a given path been loaded? This is used for testing whether a # changed file should be loaded or just ignored. This is only # used in network/client/master, when downloading plugins, to # see if a given plugin is currently loaded and thus should be # reloaded. def self.loaded?(path) path = path.to_s.sub(/\.rb$/, '') @loaded.include?(path) end # Save the fact that a given path has been loaded. This is so # we can load downloaded plugins if they've already been loaded # into memory. def self.loaded(file) $" << file + ".rb" unless $".include?(file) @loaded << file unless @loaded.include?(file) end attr_accessor :object, :path, :objwarn, :wrap def initialize(obj, path, options = {}) @path = path.to_s raise ArgumentError, "Autoload paths cannot be fully qualified" if absolute_path?(@path) @object = obj self.class[obj] = self options.each do |opt, value| opt = opt.intern if opt.is_a? String begin self.send(opt.to_s + "=", value) rescue NoMethodError raise ArgumentError, "#{opt} is not a valid option" end end @wrap = true unless defined?(@wrap) end # Load a single plugin by name. We use 'load' here so we can reload a # given plugin. def load(name,env=nil) path = name.to_s + ".rb" searchpath(env).each do |dir| file = File.join(dir, path) next unless file_exist?(file) begin Kernel.load file, @wrap name = symbolize(name) loaded name, file return true rescue SystemExit,NoMemoryError raise rescue Exception => detail puts detail.backtrace if Puppet[:trace] raise Puppet::Error, "Could not autoload #{name}: #{detail}" end end false end # Mark the named object as loaded. Note that this supports unqualified # queries, while we store the result as a qualified query in the class. def loaded(name, file) self.class.loaded(File.join(@path, name.to_s)) end # Indicate whether the specfied plugin has been loaded. def loaded?(name) self.class.loaded?(File.join(@path, name.to_s)) end # Load all instances that we can. This uses require, rather than load, # so that already-loaded files don't get reloaded unnecessarily. def loadall # Load every instance of everything we can find. files_to_load.each do |file| name = File.basename(file).chomp(".rb").intern next if loaded?(name) begin Kernel.require file loaded(name, file) rescue SystemExit,NoMemoryError raise rescue Exception => detail puts detail.backtrace if Puppet[:trace] raise Puppet::Error, "Could not autoload #{file}: #{detail}" end end end def files_to_load searchpath.map { |dir| Dir.glob("#{dir}/*.rb") }.flatten end # The list of directories to search through for loadable plugins. def searchpath(env=nil) search_directories(env).uniq.collect { |d| File.join(d, @path) }.find_all { |d| FileTest.directory?(d) } end def module_directories(env=nil) - # We have to require this late in the process because otherwise we might have - # load order issues. - require 'puppet/node/environment' + # We have to require this late in the process because otherwise we might + # have load order issues. Since require is much slower than defined?, we + # can skip that - and save some 2,155 invocations of require in my real + # world testing. --daniel 2012-07-10 + require 'puppet/node/environment' unless defined?(Puppet::Node::Environment) real_env = Puppet::Node::Environment.new(env) # We're using a per-thread cache of said module directories, so that # we don't scan the filesystem each time we try to load something with # this autoload instance. But since we don't want to cache for the eternity # this env_module_directories gets reset after the compilation on the master. # This is also reset after an agent ran. # One of the side effect of this change is that this module directories list will be # shared among all autoload that we have running at a time. But that won't be an issue # as by definition those directories are shared by all autoload. Thread.current[:env_module_directories] ||= {} Thread.current[:env_module_directories][real_env] ||= real_env.modulepath.collect do |dir| Dir.entries(dir).reject { |f| f =~ /^\./ }.collect { |f| File.join(dir, f) } end.flatten.collect { |d| [File.join(d, "plugins"), File.join(d, "lib")] }.flatten.find_all do |d| FileTest.directory?(d) end end def search_directories(env=nil) [module_directories(env), Puppet[:libdir].split(File::PATH_SEPARATOR), $LOAD_PATH].flatten end end diff --git a/lib/puppet/util/checksums.rb b/lib/puppet/util/checksums.rb index dd90caa5f..a505bfc72 100644 --- a/lib/puppet/util/checksums.rb +++ b/lib/puppet/util/checksums.rb @@ -1,148 +1,143 @@ +require 'digest/md5' +require 'digest/sha1' + # A stand-alone module for calculating checksums # in a generic way. module Puppet::Util::Checksums class FakeChecksum def <<(*args) self end end # Is the provided string a checksum? def checksum?(string) string =~ /^\{(\w{3,5})\}\S+/ end # Strip the checksum type from an existing checksum def sumdata(checksum) checksum =~ /^\{(\w+)\}(.+)/ ? $2 : nil end # Strip the checksum type from an existing checksum def sumtype(checksum) checksum =~ /^\{(\w+)\}/ ? $1 : nil end # Calculate a checksum using Digest::MD5. def md5(content) - require 'digest/md5' Digest::MD5.hexdigest(content) end # Calculate a checksum of the first 500 chars of the content using Digest::MD5. def md5lite(content) md5(content[0..511]) end # Calculate a checksum of a file's content using Digest::MD5. def md5_file(filename, lite = false) - require 'digest/md5' - digest = Digest::MD5.new checksum_file(digest, filename, lite) end # Calculate a checksum of the first 500 chars of a file's content using Digest::MD5. def md5lite_file(filename) md5_file(filename, true) end def md5_stream(&block) - require 'digest/md5' digest = Digest::MD5.new yield digest digest.hexdigest end alias :md5lite_stream :md5_stream # Return the :mtime timestamp of a file. def mtime_file(filename) File.stat(filename).send(:mtime) end # by definition this doesn't exist # but we still need to execute the block given def mtime_stream noop_digest = FakeChecksum.new yield noop_digest nil end def mtime(content) "" end # Calculate a checksum using Digest::SHA1. def sha1(content) - require 'digest/sha1' Digest::SHA1.hexdigest(content) end # Calculate a checksum of the first 500 chars of the content using Digest::SHA1. def sha1lite(content) sha1(content[0..511]) end # Calculate a checksum of a file's content using Digest::SHA1. def sha1_file(filename, lite = false) - require 'digest/sha1' - digest = Digest::SHA1.new checksum_file(digest, filename, lite) end # Calculate a checksum of the first 500 chars of a file's content using Digest::SHA1. def sha1lite_file(filename) sha1_file(filename, true) end def sha1_stream - require 'digest/sha1' digest = Digest::SHA1.new yield digest digest.hexdigest end alias :sha1lite_stream :sha1_stream # Return the :ctime of a file. def ctime_file(filename) File.stat(filename).send(:ctime) end alias :ctime_stream :mtime_stream def ctime(content) "" end # Return a "no checksum" def none_file(filename) "" end def none_stream noop_digest = FakeChecksum.new yield noop_digest "" end def none(content) "" end private # Perform an incremental checksum on a file. def checksum_file(digest, filename, lite = false) buffer = lite ? 512 : 4096 File.open(filename, 'rb') do |file| while content = file.read(buffer) digest << content break if lite end end digest.hexdigest end end diff --git a/lib/puppet/util/diff.rb b/lib/puppet/util/diff.rb index b8bbbe6af..fa20de1b9 100644 --- a/lib/puppet/util/diff.rb +++ b/lib/puppet/util/diff.rb @@ -1,76 +1,77 @@ +require 'tempfile' + # Provide a diff between two strings. module Puppet::Util::Diff include Puppet::Util require 'tempfile' def diff(old, new) return '' unless diff_cmd = Puppet[:diff] and diff_cmd != "" command = [diff_cmd] if args = Puppet[:diff_args] and args != "" command << args end command << old << new execute(command, :failonfail => false) end module_function :diff # return diff string of two input strings # format defaults to unified # context defaults to 3 lines def lcs_diff(data_old, data_new, format=:unified, context_lines=3) unless Puppet.features.diff? Puppet.warning "Cannot provide diff without the diff/lcs Ruby library" return "" end data_old = data_old.split(/\n/).map! { |e| e.chomp } data_new = data_new.split(/\n/).map! { |e| e.chomp } output = "" diffs = ::Diff::LCS.diff(data_old, data_new) return output if diffs.empty? oldhunk = hunk = nil file_length_difference = 0 diffs.each do |piece| begin hunk = ::Diff::LCS::Hunk.new( data_old, data_new, piece, context_lines, file_length_difference) file_length_difference = hunk.file_length_difference next unless oldhunk # Hunks may overlap, which is why we need to be careful when our # diff includes lines of context. Otherwise, we might print # redundant lines. if (context_lines > 0) and hunk.overlaps?(oldhunk) hunk.unshift(oldhunk) else output << oldhunk.diff(format) end ensure oldhunk = hunk output << "\n" end end # Handle the last remaining hunk output << oldhunk.diff(format) << "\n" end def string_file_diff(path, string) - require 'tempfile' tempfile = Tempfile.new("puppet-diffing") tempfile.open tempfile.print string tempfile.close notice "\n" + diff(path, tempfile.path) tempfile.delete end end diff --git a/lib/puppet/util/filetype.rb b/lib/puppet/util/filetype.rb index fdc6ccd9b..51b0d9175 100755 --- a/lib/puppet/util/filetype.rb +++ b/lib/puppet/util/filetype.rb @@ -1,283 +1,281 @@ # Basic classes for reading, writing, and emptying files. Not much # to see here. require 'puppet/util/selinux' +require 'tempfile' require 'fileutils' class Puppet::Util::FileType attr_accessor :loaded, :path, :synced include Puppet::Util::SELinux class << self attr_accessor :name include Puppet::Util::ClassGen end # Create a new filetype. def self.newfiletype(name, &block) @filetypes ||= {} klass = genclass( name, :block => block, :prefix => "FileType", :hash => @filetypes ) # Rename the read and write methods, so that we're sure they # maintain the stats. klass.class_eval do # Rename the read method define_method(:real_read, instance_method(:read)) define_method(:read) do begin val = real_read @loaded = Time.now if val return val.gsub(/# HEADER.*\n/,'') else return "" end rescue Puppet::Error => detail raise rescue => detail puts detail.backtrace if Puppet[:trace] raise Puppet::Error, "#{self.class} could not read #{@path}: #{detail}" end end # And then the write method define_method(:real_write, instance_method(:write)) define_method(:write) do |text| begin val = real_write(text) @synced = Time.now return val rescue Puppet::Error => detail raise rescue => detail puts detail.backtrace if Puppet[:debug] raise Puppet::Error, "#{self.class} could not write #{@path}: #{detail}" end end end end def self.filetype(type) @filetypes[type] end # Pick or create a filebucket to use. def bucket @bucket ||= Puppet::Type.type(:filebucket).mkdefaultbucket.bucket end def initialize(path) raise ArgumentError.new("Path is nil") if path.nil? @path = path end # Operate on plain files. newfiletype(:flat) do # Back the file up before replacing it. def backup bucket.backup(@path) if File.exists?(@path) end # Read the file. def read if File.exist?(@path) File.read(@path) else return nil end end # Remove the file. def remove File.unlink(@path) if File.exist?(@path) end # Overwrite the file. def write(text) - require "tempfile" tf = Tempfile.new("puppet") tf.print text; tf.flush FileUtils.cp(tf.path, @path) tf.close # If SELinux is present, we need to ensure the file has its expected context set_selinux_default_context(@path) end end # Operate on plain files. newfiletype(:ram) do @@tabs = {} def self.clear @@tabs.clear end def initialize(path) super @@tabs[@path] ||= "" end # Read the file. def read Puppet.info "Reading #{@path} from RAM" @@tabs[@path] end # Remove the file. def remove Puppet.info "Removing #{@path} from RAM" @@tabs[@path] = "" end # Overwrite the file. def write(text) Puppet.info "Writing #{@path} to RAM" @@tabs[@path] = text end end # Handle Linux-style cron tabs. newfiletype(:crontab) do def initialize(user) self.path = user end def path=(user) begin @uid = Puppet::Util.uid(user) rescue Puppet::Error => detail raise Puppet::Error, "Could not retrieve user #{user}" end # XXX We have to have the user name, not the uid, because some # systems *cough*linux*cough* require it that way @path = user end # Read a specific @path's cron tab. def read %x{#{cmdbase} -l 2>/dev/null} end # Remove a specific @path's cron tab. def remove if %w{Darwin FreeBSD}.include?(Facter.value("operatingsystem")) %x{/bin/echo yes | #{cmdbase} -r 2>/dev/null} else %x{#{cmdbase} -r 2>/dev/null} end end # Overwrite a specific @path's cron tab; must be passed the @path name # and the text with which to create the cron tab. def write(text) IO.popen("#{cmdbase()} -", "w") { |p| p.print text } end private # Only add the -u flag when the @path is different. Fedora apparently # does not think I should be allowed to set the @path to my own user name def cmdbase cmd = nil if @uid == Puppet::Util::SUIDManager.uid || Facter.value(:operatingsystem) == "HP-UX" return "crontab" else return "crontab -u #{@path}" end end end # SunOS has completely different cron commands; this class implements # its versions. newfiletype(:suntab) do # Read a specific @path's cron tab. def read output = Puppet::Util.execute(%w{crontab -l}, :uid => @path) return "" if output.include?("can't open your crontab") raise Puppet::Error, "User #{@path} not authorized to use cron" if output.include?("you are not authorized to use cron") return output rescue => detail raise Puppet::Error, "Could not read crontab for #{@path}: #{detail}" end # Remove a specific @path's cron tab. def remove Puppet::Util.execute(%w{crontab -r}, :uid => @path) rescue => detail raise Puppet::Error, "Could not remove crontab for #{@path}: #{detail}" end # Overwrite a specific @path's cron tab; must be passed the @path name # and the text with which to create the cron tab. def write(text) puts text - require "tempfile" output_file = Tempfile.new("puppet") fh = output_file.open fh.print text fh.close # We have to chown the stupid file to the user. File.chown(Puppet::Util.uid(@path), nil, output_file.path) begin Puppet::Util.execute(["crontab", output_file.path], :uid => @path) rescue => detail raise Puppet::Error, "Could not write crontab for #{@path}: #{detail}" end output_file.delete end end # Support for AIX crontab with output different than suntab's crontab command. newfiletype(:aixtab) do # Read a specific @path's cron tab. def read output = Puppet::Util.execute(%w{crontab -l}, :uid => @path) raise Puppet::Error, "User #{@path} not authorized to use cron" if output.include?("You are not authorized to use the cron command") return output rescue => detail raise Puppet::Error, "Could not read crontab for #{@path}: #{detail}" end # Remove a specific @path's cron tab. def remove Puppet::Util.execute(%w{crontab -r}, :uid => @path) rescue => detail raise Puppet::Error, "Could not remove crontab for #{@path}: #{detail}" end # Overwrite a specific @path's cron tab; must be passed the @path name # and the text with which to create the cron tab. def write(text) - require "tempfile" output_file = Tempfile.new("puppet") fh = output_file.open fh.print text fh.close # We have to chown the stupid file to the user. File.chown(Puppet::Util.uid(@path), nil, output_file.path) begin Puppet::Util.execute(["crontab", output_file.path], :uid => @path) rescue => detail raise Puppet::Error, "Could not write crontab for #{@path}: #{detail}" ensure output_file.delete end end end end diff --git a/lib/puppet/util/run_mode.rb b/lib/puppet/util/run_mode.rb index 6028aef29..5395a8ef9 100644 --- a/lib/puppet/util/run_mode.rb +++ b/lib/puppet/util/run_mode.rb @@ -1,81 +1,82 @@ +require 'etc' + module Puppet module Util class RunMode def initialize(name) @name = name.to_sym end @@run_modes = Hash.new {|h, k| h[k] = RunMode.new(k)} attr :name def self.[](name) @@run_modes[name] end def master? name == :master end def agent? name == :agent end def user? name == :user end def conf_dir which_dir( (Puppet.features.microsoft_windows? ? File.join(Dir::COMMON_APPDATA, "PuppetLabs", "puppet", "etc") : "/etc/puppet"), "~/.puppet" ) end def var_dir which_dir( (Puppet.features.microsoft_windows? ? File.join(Dir::COMMON_APPDATA, "PuppetLabs", "puppet", "var") : "/var/lib/puppet"), "~/.puppet/var" ) end def run_dir "$vardir/run" end def logopts if master? { :default => "$vardir/log", :mode => 0750, :owner => "service", :group => "service", :desc => "The Puppet log directory." } else ["$vardir/log", "The Puppet log directory."] end end private def which_dir( global, user ) #FIXME: we should test if we're user "puppet" # there's a comment that suggests that we do that # and we currently don't. expand_path case when name == :master; global when Puppet.features.root?; global else user end end def expand_path( dir ) - require 'etc' ENV["HOME"] ||= Etc.getpwuid(Process.uid).dir File.expand_path(dir) end end end end diff --git a/lib/puppet/util/suidmanager.rb b/lib/puppet/util/suidmanager.rb index f270c40e2..513078d34 100644 --- a/lib/puppet/util/suidmanager.rb +++ b/lib/puppet/util/suidmanager.rb @@ -1,173 +1,173 @@ +require 'facter' require 'puppet/util/warnings' require 'forwardable' require 'etc' module Puppet::Util::SUIDManager include Puppet::Util::Warnings extend Forwardable # Note groups= is handled specially due to a bug in OS X 10.6, 10.7, # and probably upcoming releases... to_delegate_to_process = [ :euid=, :euid, :egid=, :egid, :uid=, :uid, :gid=, :gid, :groups ] to_delegate_to_process.each do |method| def_delegator Process, method module_function method end def osx_maj_ver return @osx_maj_ver unless @osx_maj_ver.nil? - require 'facter' # 'kernel' is available without explicitly loading all facts if Facter.value('kernel') != 'Darwin' @osx_maj_ver = false return @osx_maj_ver end # But 'macosx_productversion_major' requires it. Facter.loadfacts @osx_maj_ver = Facter.value('macosx_productversion_major') end module_function :osx_maj_ver def groups=(grouplist) begin return Process.groups = grouplist rescue Errno::EINVAL => e #We catch Errno::EINVAL as some operating systems (OS X in particular) can # cause troubles when using Process#groups= to change *this* user / process # list of supplementary groups membership. This is done via Ruby's function # "static VALUE proc_setgroups(VALUE obj, VALUE ary)" which is effectively # a wrapper for "int setgroups(size_t size, const gid_t *list)" (part of SVr4 # and 4.3BSD but not in POSIX.1-2001) that fails and sets errno to EINVAL. # # This does not appear to be a problem with Ruby but rather an issue on the # operating system side. Therefore we catch the exception and look whether # we run under OS X or not -- if so, then we acknowledge the problem and # re-throw the exception otherwise. if osx_maj_ver and not osx_maj_ver.empty? return true else raise e end end end module_function :groups= def self.root? return Process.uid == 0 unless Puppet.features.microsoft_windows? require 'puppet/util/windows/user' Puppet::Util::Windows::User.admin? end # Methods to handle changing uid/gid of the running process. In general, # these will noop or fail on Windows, and require root to change to anything # but the current uid/gid (which is a noop). # Runs block setting euid and egid if provided then restoring original ids. # If running on Windows or without root, the block will be run with the # current euid/egid. def asuser(new_uid=nil, new_gid=nil) return yield if Puppet.features.microsoft_windows? return yield unless root? return yield unless new_uid or new_gid old_euid, old_egid = self.euid, self.egid begin change_privileges(new_uid, new_gid, false) yield ensure change_privileges(new_uid ? old_euid : nil, old_egid, false) end end module_function :asuser # If `permanently` is set, will permanently change the uid/gid of the # process. If not, it will only set the euid/egid. If only uid is supplied, # the primary group of the supplied gid will be used. If only gid is # supplied, only gid will be changed. This method will fail if used on # Windows. def change_privileges(uid=nil, gid=nil, permanently=false) return unless uid or gid unless gid uid = convert_xid(:uid, uid) gid = Etc.getpwuid(uid).gid end change_group(gid, permanently) change_user(uid, permanently) if uid end module_function :change_privileges # Changes the egid of the process if `permanently` is not set, otherwise # changes gid. This method will fail if used on Windows, or attempting to # change to a different gid without root. def change_group(group, permanently=false) gid = convert_xid(:gid, group) raise Puppet::Error, "No such group #{group}" unless gid if permanently Process::GID.change_privilege(gid) else Process.egid = gid end end module_function :change_group # As change_group, but operates on uids. If changing user permanently, # supplementary groups will be set the to default groups for the new uid. def change_user(user, permanently=false) uid = convert_xid(:uid, user) raise Puppet::Error, "No such user #{user}" unless uid if permanently # If changing uid, we must be root. So initgroups first here. initgroups(uid) Process::UID.change_privilege(uid) else # We must be root to initgroups, so initgroups before dropping euid if # we're root, otherwise elevate euid before initgroups. # change euid (to root) first. if Process.euid == 0 initgroups(uid) Process.euid = uid else Process.euid = uid initgroups(uid) end end end module_function :change_user # Make sure the passed argument is a number. def convert_xid(type, id) map = {:gid => :group, :uid => :user} raise ArgumentError, "Invalid id type #{type}" unless map.include?(type) ret = Puppet::Util.send(type, id) if ret == nil raise Puppet::Error, "Invalid #{map[type]}: #{id}" end ret end module_function :convert_xid # Initialize primary and supplemental groups to those of the target user. We # take the UID and manually look up their details in the system database, # including username and primary group. This method will fail on Windows, or # if used without root to initgroups of another user. def initgroups(uid) pwent = Etc.getpwuid(uid) Process.initgroups(pwent.name, pwent.gid) end module_function :initgroups def run_and_capture(command, new_uid=nil, new_gid=nil) output = Puppet::Util.execute(command, :failonfail => false, :combine => true, :uid => new_uid, :gid => new_gid) [output, $CHILD_STATUS.dup] end module_function :run_and_capture end diff --git a/lib/puppet/util/windows/user.rb b/lib/puppet/util/windows/user.rb index 6722a0e2e..0f3232369 100644 --- a/lib/puppet/util/windows/user.rb +++ b/lib/puppet/util/windows/user.rb @@ -1,44 +1,43 @@ require 'puppet/util/windows' require 'win32/security' +require 'facter' module Puppet::Util::Windows::User include Windows::Security extend Windows::Security def admin? - require 'facter' - majversion = Facter.value(:kernelmajversion) return false unless majversion # if Vista or later, check for unrestricted process token return Win32::Security.elevated_security? unless majversion.to_f < 6.0 # otherwise 2003 or less check_token_membership end module_function :admin? def check_token_membership sid = 0.chr * 80 size = [80].pack('L') member = 0.chr * 4 unless CreateWellKnownSid(WinBuiltinAdministratorsSid, nil, sid, size) raise Puppet::Util::Windows::Error.new("Failed to create administrators SID") end unless IsValidSid(sid) raise Puppet::Util::Windows::Error.new("Invalid SID") end unless CheckTokenMembership(nil, sid, member) raise Puppet::Util::Windows::Error.new("Failed to check membership") end # Is administrators SID enabled in calling thread's access token? member.unpack('L')[0] == 1 end module_function :check_token_membership end