diff --git a/bin/puppetmasterd b/bin/puppetmasterd index a00186b68..33e4f436d 100755 --- a/bin/puppetmasterd +++ b/bin/puppetmasterd @@ -1,284 +1,285 @@ #!/usr/bin/env ruby # # = Synopsis # # The central puppet server. Functions as a certificate authority by default. # # = Usage # # puppetmasterd [-D|--daemonize|--no-daemonize] [-d|--debug] [-h|--help] # [-l|--logdest |console|syslog] [--nobucket] [--nonodes] # [-v|--verbose] [-V|--version] # # = Description # # This is the puppet central daemon. # # = Options # # Note that any configuration parameter that's valid in the configuration file # is also a valid long argument. For example, 'ssldir' is a valid configuration # parameter, so you can specify '--ssldir ' as an argument. # # See the configuration file documentation at # http://reductivelabs.com/projects/puppet/reference/configref.html for # the full list of acceptable parameters. A commented list of all # configuration options can also be generated by running puppetmasterdd with # '--genconfig'. # # daemonize:: # Send the process into the background. This is the default. # # no-daemonize:: # Do not send the process into the background. # # debug:: # Enable full debugging. # # help:: # Print this help message. # # logdest:: # Where to send messages. Choose between syslog, the console, and a log file. # Defaults to sending messages to syslog, or the console # if debugging or verbosity is enabled. # # nobucket:: # Do not function as a file bucket. # # nonodes:: # Do not use individual node designations; each node will receive the result # of evaluating the entire configuration. # # noreports:: # Do not start the reports server. # # verbose:: # Enable verbosity. # # version:: # Print the puppet version number and exit. # # = Example # # puppetmasterd # # = Author # # Luke Kanies # # = Copyright # # Copyright (c) 2005 Reductive Labs, LLC # Licensed under the GNU Public License # Do an initial trap, so that cancels don't get a stack trace. trap(:INT) do $stderr.puts "Cancelling startup" exit(0) end require 'getoptlong' require 'puppet' +require 'puppet/network/handler' require 'puppet/sslcertificates' options = [ [ "--debug", "-d", GetoptLong::NO_ARGUMENT ], [ "--help", "-h", GetoptLong::NO_ARGUMENT ], [ "--logdest", "-l", GetoptLong::REQUIRED_ARGUMENT ], [ "--nobucket", GetoptLong::NO_ARGUMENT ], [ "--noreports", GetoptLong::NO_ARGUMENT ], [ "--nonodes", GetoptLong::NO_ARGUMENT ], [ "--verbose", "-v", GetoptLong::NO_ARGUMENT ], [ "--version", "-V", GetoptLong::NO_ARGUMENT ] ] # Add all of the config parameters as valid options. Puppet.settings.addargs(options) result = GetoptLong.new(*options) master = {} ca = {} report = {} bucket = {} options = { :havereport => true, :havebucket => true, :havemaster => true, :setdest => false, :verbose => false, :debug => false } begin result.each { |opt,arg| case opt # First check to see if the argument is a valid configuration parameter; # if so, set it. NOTE: there is a catch-all at the bottom for defaults.rb when "--debug" options[:debug] = true when "--help" if Puppet.features.usage? RDoc::usage && exit else puts "No help available unless you have RDoc::usage installed" exit end when "--noreports" options[:havereport] = false when "--nomaster" options[:havemaster] = false when "--nobucket" options[:havebucket] = false when "--nonodes" master[:UseNodes] = false when "--logdest" begin Puppet::Util::Log.newdestination(arg) options[:setdest] = true rescue => detail if Puppet[:debug] puts detail.backtrace end $stderr.puts detail.to_s end when "--version" puts "%s" % Puppet.version exit when "--verbose" options[:verbose] = true else Puppet.settings.handlearg(opt, arg) end } rescue GetoptLong::InvalidOption => detail $stderr.puts "Try '#{$0} --help'" #$stderr.puts detail exit(1) end # Now parse the config Puppet.parse_config # Handle the logging settings. if options[:debug] or options[:verbose] if options[:debug] Puppet::Util::Log.level = :debug else Puppet::Util::Log.level = :info end unless Puppet[:daemonize] Puppet::Util::Log.newdestination(:console) options[:setdest] = true end end unless options[:setdest] Puppet::Util::Log.newdestination(:syslog) end Puppet.genconfig Puppet.genmanifest # A temporary solution, to at least make the master work for now. Puppet::Node::Facts.terminus_class = :yaml require 'etc' handlers = { :Status => {}, :FileServer => {} } if options[:havemaster] handlers[:Master] = master end if options[:havereport] handlers[:Report] = report end if Puppet[:ca] handlers[:CA] = ca end if options[:havebucket] handlers[:FileBucket] = bucket end if Puppet[:parseonly] begin Puppet::Network::Handler.master.new(master) rescue => detail if Puppet[:trace] puts detail.backtrace end $stderr.puts detail exit(32) end # we would have already exited if the file weren't syntactically correct exit(0) end webserver = server = nil begin case Puppet[:servertype] when "webrick" # use the default, um, everything require 'puppet/network/http_server/webrick' webserver = server = Puppet::Network::HTTPServer::WEBrick.new(:Handlers => handlers) when "mongrel": require 'puppet/network/http_server/mongrel' server = Puppet::Network::HTTPServer::Mongrel.new(handlers) addr = Puppet[:bindaddress] if addr == "" addr = "127.0.0.1" end webserver = Mongrel::HttpServer.new(addr, Puppet[:masterport]) webserver.register("/", server) else Puppet.err "Invalid server type %s" % Puppet[:servertype] exit(45) end rescue => detail if Puppet[:trace] puts detail.backtrace end $stderr.puts detail exit(1) end if Process.uid == 0 begin Puppet::Util.chuser rescue => detail if Puppet[:debug] puts detail.backtrace end $stderr.puts "Could not change user to %s: %s" % [Puppet[:user], detail] exit(39) end end # Mongrel doesn't shut down like webrick; we really need to write plugins for it. if Puppet[:servertype] == "webrick" Puppet.newservice(server) end Puppet.settraps if Puppet[:daemonize] server.daemonize end Puppet.notice "Starting Puppet server version %s" % [Puppet.version] case Puppet[:servertype] when "webrick" Puppet.start when "mongrel": webserver.run.join end diff --git a/lib/puppet.rb b/lib/puppet.rb index 18037cdc1..57f84d5f7 100644 --- a/lib/puppet.rb +++ b/lib/puppet.rb @@ -1,432 +1,433 @@ # Try to load rubygems. Hey rubygems, I hate you. begin require 'rubygems' rescue LoadError end # see the bottom of the file for further inclusions require 'singleton' require 'facter' require 'puppet/error' require 'puppet/external/event-loop' require 'puppet/util' require 'puppet/util/log' require 'puppet/util/autoload' require 'puppet/util/settings' require 'puppet/util/feature' require 'puppet/util/suidmanager' #------------------------------------------------------------ # the top-level module # # all this really does is dictate how the whole system behaves, through # preferences for things like debugging # # it's also a place to find top-level commands like 'debug' module Puppet PUPPETVERSION = '0.24.1' def Puppet.version return PUPPETVERSION end class << self # So we can monitor signals and such. include SignalObserver include Puppet::Util # To keep a copy of arguments. Set within Config#addargs, because I'm # lazy. attr_accessor :args attr_reader :features attr_writer :name end # the hash that determines how our system behaves @@settings = Puppet::Util::Settings.new # The services running in this process. @services ||= [] # define helper messages for each of the message levels Puppet::Util::Log.eachlevel { |level| define_method(level,proc { |args| if args.is_a?(Array) args = args.join(" ") end Puppet::Util::Log.create( :level => level, :message => args ) }) module_function level } # I keep wanting to use Puppet.error # XXX this isn't actually working right now alias :error :err # The feature collection @features = Puppet::Util::Feature.new('puppet/feature') # Load the base features. require 'puppet/feature/base' # Store a new default value. def self.setdefaults(section, hash) @@settings.setdefaults(section, hash) end # configuration parameter access and stuff def self.[](param) case param when :debug: if Puppet::Util::Log.level == :debug return true else return false end else return @@settings[param] end end # configuration parameter access and stuff def self.[]=(param,value) @@settings[param] = value end def self.clear @@settings.clear end def self.debug=(value) if value Puppet::Util::Log.level=(:debug) else Puppet::Util::Log.level=(:notice) end end def self.settings @@settings end # Load all of the configuration parameters. require 'puppet/defaults' # Prints the contents of a config file with the available config elements, or it # prints a single value of a config element. def self.genconfig if Puppet[:configprint] != "" val = Puppet[:configprint] if val == "all" hash = {} Puppet.settings.each do |name, obj| val = obj.value case val when true, false, "": val = val.inspect end hash[name] = val end hash.sort { |a,b| a[0].to_s <=> b[0].to_s }.each do |name, val| puts "%s = %s" % [name, val] end elsif val =~ /,/ val.split(/\s*,\s*/).sort.each do |v| if Puppet.settings.include?(v) puts "%s = %s" % [v, Puppet[v]] else puts "invalid parameter: %s" % v exit(1) end end else val.split(/\s*,\s*/).sort.each do |v| if Puppet.settings.include?(v) puts Puppet[val] else puts "invalid parameter: %s" % v exit(1) end end end exit(0) end if Puppet[:genconfig] puts Puppet.settings.to_config exit(0) end end def self.genmanifest if Puppet[:genmanifest] puts Puppet.settings.to_manifest exit(0) end end # Run all threads to their ends def self.join defined? @threads and @threads.each do |t| t.join end end # Create a new service that we're supposed to run def self.newservice(service) @services ||= [] @services << service end def self.newthread(&block) @threads ||= [] @threads << Thread.new do yield end end def self.newtimer(hash, &block) timer = nil threadlock(:timers) do @timers ||= [] timer = EventLoop::Timer.new(hash) @timers << timer if block_given? observe_signal(timer, :alarm, &block) end end # In case they need it for something else. timer end # Parse the config file for this process. def self.parse_config(oldconfig = nil) # First look for the old configuration file. oldconfig ||= File.join(Puppet[:confdir], Puppet[:name].to_s + ".conf") if FileTest.exists?(oldconfig) and Puppet[:name] != "puppet" Puppet.warning "Individual config files are deprecated; remove %s and use puppet.conf" % oldconfig Puppet.settings.old_parse(oldconfig) return end # Now check for the normal config. if Puppet[:config] and File.exists? Puppet[:config] Puppet.debug "Parsing %s" % Puppet[:config] Puppet.settings.parse(Puppet[:config]) end end # Relaunch the executable. def self.restart command = $0 + " " + self.args.join(" ") Puppet.notice "Restarting with '%s'" % command Puppet.shutdown(false) Puppet::Util::Log.reopen exec(command) end # Trap a couple of the main signals. This should probably be handled # in a way that anyone else can register callbacks for traps, but, eh. def self.settraps [:INT, :TERM].each do |signal| trap(signal) do Puppet.notice "Caught #{signal}; shutting down" Puppet.debug "Signal caught here:" caller.each { |l| Puppet.debug l } Puppet.shutdown end end # Handle restarting. trap(:HUP) do if client = @services.find { |s| s.is_a? Puppet::Network::Client.master } and client.running? client.restart else Puppet.restart end end # Provide a hook for running clients where appropriate trap(:USR1) do done = 0 Puppet.notice "Caught USR1; triggering client run" @services.find_all { |s| s.is_a? Puppet::Network::Client }.each do |client| if client.respond_to? :running? if client.running? Puppet.info "Ignoring running %s" % client.class else done += 1 begin client.runnow rescue => detail Puppet.err "Could not run client: %s" % detail end end else Puppet.info "Ignoring %s; cannot test whether it is running" % client.class end end unless done > 0 Puppet.notice "No clients were run" end end trap(:USR2) do Puppet::Util::Log.reopen end end # Shutdown our server process, meaning stop all services and all threads. # Optionally, exit. def self.shutdown(leave = true) Puppet.notice "Shutting down" # Unmonitor our timers defined? @timers and @timers.each do |timer| EventLoop.current.ignore_timer timer end # This seems to exit the process, although I can't find where it does # so. Leaving it out doesn't seem to hurt anything. #if EventLoop.current.running? # EventLoop.current.quit #end # Stop our services defined? @services and @services.each do |svc| next unless svc.respond_to?(:shutdown) begin timeout(20) do svc.shutdown end rescue TimeoutError Puppet.err "%s could not shut down within 20 seconds" % svc.class end end # And wait for them all to die, giving a decent amount of time defined? @threads and @threads.each do |thr| begin timeout(20) do thr.join end rescue TimeoutError # Just ignore this, since we can't intelligently provide a warning end end if leave exit(0) end end # Start all of our services and optionally our event loop, which blocks, # waiting for someone, somewhere, to generate events of some kind. def self.start(block = true) # Starting everything in its own thread, fwiw defined? @services and @services.dup.each do |svc| newthread do begin svc.start rescue => detail if Puppet[:trace] puts detail.backtrace end @services.delete svc Puppet.err "Could not start %s: %s" % [svc.class, detail] end end end # We need to give the services a chance to register their timers before # we try to start monitoring them. sleep 0.5 unless @services.length > 0 Puppet.notice "No remaining services; exiting" exit(1) end if defined? @timers and ! @timers.empty? @timers.each do |timer| EventLoop.current.monitor_timer timer end end if block EventLoop.current.run end end # Create the timer that our different objects (uh, mostly the client) # check. def self.timer unless defined? @timer #Puppet.info "Interval is %s" % Puppet[:runinterval] #@timer = EventLoop::Timer.new(:interval => Puppet[:runinterval]) @timer = EventLoop::Timer.new( :interval => Puppet[:runinterval], :tolerance => 1, :start? => true ) EventLoop.current.monitor_timer @timer end @timer end # XXX this should all be done using puppet objects, not using # normal mkdir def self.recmkdir(dir,mode = 0755) if FileTest.exist?(dir) return false else tmp = dir.sub(/^\//,'') path = [File::SEPARATOR] tmp.split(File::SEPARATOR).each { |dir| path.push dir if ! FileTest.exist?(File.join(path)) begin Dir.mkdir(File.join(path), mode) rescue Errno::EACCES => detail Puppet.err detail.to_s return false rescue => detail Puppet.err "Could not create %s: %s" % [path, detail.to_s] return false end elsif FileTest.directory?(File.join(path)) next else FileTest.exist?(File.join(path)) raise Puppet::Error, "Cannot create %s: basedir %s is a file" % [dir, File.join(path)] end } return true end end # Create a new type. Just proxy to the Type class. def self.newtype(name, options = {}, &block) Puppet::Type.newtype(name, options, &block) end # Retrieve a type by name. Just proxy to the Type class. def self.type(name) Puppet::Type.type(name) end end require 'puppet/type' +require 'puppet/network' require 'puppet/module' require 'puppet/util/storage' require 'puppet/parser/interpreter' if Puppet[:storeconfigs] require 'puppet/rails' end diff --git a/lib/puppet/network.rb b/lib/puppet/network.rb new file mode 100644 index 000000000..8993b8869 --- /dev/null +++ b/lib/puppet/network.rb @@ -0,0 +1,3 @@ +# Just a stub, so we can correctly scope other classes. +module Puppet::Network # :nodoc: +end diff --git a/lib/puppet/network/client/master.rb b/lib/puppet/network/client/master.rb index 390f3d4e2..913c51b3d 100644 --- a/lib/puppet/network/client/master.rb +++ b/lib/puppet/network/client/master.rb @@ -1,582 +1,583 @@ # The client for interacting with the puppetmaster config server. require 'sync' require 'timeout' require 'puppet/network/http_pool' class Puppet::Network::Client::Master < Puppet::Network::Client unless defined? @@sync @@sync = Sync.new end attr_accessor :catalog attr_reader :compile_time class << self # Puppetd should only have one instance running, and we need a way # to retrieve it. attr_accessor :instance include Puppet::Util end def self.facts # Retrieve the facts from the central server. if Puppet[:factsync] self.getfacts() end down = Puppet[:downcasefacts] - facts = {} - Facter.each { |name,fact| + facts = Facter.to_hash.inject({}) do |newhash, array| + name, fact = array if down - facts[name] = fact.to_s.downcase + newhash[name] = fact.to_s.downcase else - facts[name] = fact.to_s + newhash[name] = fact.to_s end - } + newhash + end # Add our client version to the list of facts, so people can use it # in their manifests facts["clientversion"] = Puppet.version.to_s # And add our environment as a fact. unless facts.include?("environment") facts["environment"] = Puppet[:environment] end facts end # Return the list of dynamic facts as an array of symbols def self.dynamic_facts Puppet.settings[:dynamicfacts].split(/\s*,\s*/).collect { |fact| fact.downcase } end # Cache the config def cache(text) Puppet.info "Caching catalog at %s" % self.cachefile confdir = ::File.dirname(Puppet[:localconfig]) ::File.open(self.cachefile + ".tmp", "w", 0660) { |f| f.print text } ::File.rename(self.cachefile + ".tmp", self.cachefile) end def cachefile unless defined? @cachefile @cachefile = Puppet[:localconfig] + ".yaml" end @cachefile end def clear @catalog.clear(true) if @catalog Puppet::Type.allclear @catalog = nil end # Initialize and load storage def dostorage begin Puppet::Util::Storage.load @compile_time ||= Puppet::Util::Storage.cache(:configuration)[:compile_time] rescue => detail if Puppet[:trace] puts detail.backtrace end Puppet.err "Corrupt state file %s: %s" % [Puppet[:statefile], detail] begin ::File.unlink(Puppet[:statefile]) retry rescue => detail raise Puppet::Error.new("Cannot remove %s: %s" % [Puppet[:statefile], detail]) end end end # Check whether our catalog is up to date def fresh?(facts) if Puppet[:ignorecache] Puppet.notice "Ignoring cache" return false end unless self.compile_time Puppet.debug "No cached compile time" return false end if facts_changed?(facts) Puppet.info "Facts have changed; recompiling" unless local? return false end newcompile = @driver.freshness # We're willing to give a 2 second drift if newcompile - @compile_time.to_i < 1 return true else Puppet.debug "Server compile time is %s vs %s" % [newcompile, @compile_time.to_i] return false end end # Let the daemon run again, freely in the filesystem. Frolick, little # daemon! def enable lockfile.unlock(:anonymous => true) end # Stop the daemon from making any catalog runs. def disable lockfile.lock(:anonymous => true) end # Retrieve the config from a remote server. If this fails, then # use the cached copy. def getconfig dostorage() facts = nil Puppet::Util.benchmark(:debug, "Retrieved facts") do facts = self.class.facts end raise Puppet::Network::ClientError.new("Could not retrieve any facts") unless facts.length > 0 # Retrieve the plugins. getplugins() if Puppet[:pluginsync] if (self.catalog or FileTest.exist?(self.cachefile)) and self.fresh?(facts) Puppet.info "Configuration is up to date" return if use_cached_config end Puppet.debug("Retrieving catalog") # If we can't retrieve the catalog, just return, which will either # fail, or use the in-memory catalog. unless yaml_objects = get_actual_config(facts) use_cached_config(true) return end begin objects = YAML.load(yaml_objects) rescue => detail msg = "Configuration could not be translated from yaml" msg += "; using cached catalog" if use_cached_config(true) Puppet.warning msg return end self.setclasses(objects.classes) # Clear all existing objects, so we can recreate our stack. clear() if self.catalog # Now convert the objects to a puppet catalog graph. begin @catalog = objects.to_catalog rescue => detail clear() puts detail.backtrace if Puppet[:trace] msg = "Configuration could not be instantiated: %s" % detail msg += "; using cached catalog" if use_cached_config(true) Puppet.warning msg return end if ! @catalog.from_cache self.cache(yaml_objects) end # Keep the state database up to date. @catalog.host_config = true end # A simple proxy method, so it's easy to test. def getplugins self.class.getplugins end # Just so we can specify that we are "the" instance. def initialize(*args) Puppet.settings.use(:main, :ssl, :puppetd) super self.class.instance = self @running = false end # Mark that we should restart. The Puppet module checks whether we're running, # so this only gets called if we're in the middle of a run. def restart # If we're currently running, then just mark for later Puppet.notice "Received signal to restart; waiting until run is complete" @restart = true end # Should we restart? def restart? if defined? @restart @restart else false end end # Retrieve the cached config def retrievecache if FileTest.exists?(self.cachefile) return ::File.read(self.cachefile) else return nil end end # The code that actually runs the catalog. # This just passes any options on to the catalog, # which accepts :tags and :ignoreschedules. def run(options = {}) got_lock = false splay Puppet::Util.sync(:puppetrun).synchronize(Sync::EX) do if !lockfile.lock Puppet.notice "Lock file %s exists; skipping catalog run" % lockfile.lockfile else got_lock = true begin duration = thinmark do self.getconfig end rescue => detail puts detail.backtrace if Puppet[:trace] Puppet.err "Could not retrieve catalog: %s" % detail end if self.catalog @catalog.retrieval_duration = duration Puppet.notice "Starting catalog run" unless @local benchmark(:notice, "Finished catalog run") do @catalog.apply(options) end end # Now close all of our existing http connections, since there's no # reason to leave them lying open. Puppet::Network::HttpPool.clear_http_instances end lockfile.unlock # Did we get HUPped during the run? If so, then restart now that we're # done with the run. if self.restart? Process.kill(:HUP, $$) end end ensure # Just make sure we remove the lock file if we set it. lockfile.unlock if got_lock and lockfile.locked? clear() end def running? lockfile.locked? end # Store the classes in the classfile, but only if we're not local. def setclasses(ary) if @local return end unless ary and ary.length > 0 Puppet.info "No classes to store" return end begin ::File.open(Puppet[:classfile], "w") { |f| f.puts ary.join("\n") } rescue => detail Puppet.err "Could not create class file %s: %s" % [Puppet[:classfile], detail] end end private # Download files from the remote server, returning a list of all # changed files. def self.download(args) hash = { :path => args[:dest], :recurse => true, :source => args[:source], :tag => "#{args[:name]}s", :owner => Process.uid, :group => Process.gid, :purge => true, :force => true, :backup => false } if args[:ignore] hash[:ignore] = args[:ignore].split(/\s+/) end downconfig = Puppet::Node::Catalog.new("downloading") downconfig.add_resource Puppet::Type.type(:file).create(hash) Puppet.info "Retrieving #{args[:name]}s" noop = Puppet[:noop] Puppet[:noop] = false files = [] begin Timeout::timeout(self.timeout) do downconfig.apply do |trans| trans.changed?.find_all do |resource| yield resource if block_given? files << resource[:path] end end end rescue Puppet::Error, Timeout::Error => detail if Puppet[:debug] puts detail.backtrace end Puppet.err "Could not retrieve #{args[:name]}s: %s" % detail end # Now clean up after ourselves downconfig.clear return files ensure # I can't imagine why this is necessary, but apparently at last one person has had problems with noop # being nil here. if noop.nil? Puppet[:noop] = false else Puppet[:noop] = noop end end # Retrieve facts from the central server. def self.getfacts # Download the new facts path = Puppet[:factpath].split(":") files = [] download(:dest => Puppet[:factdest], :source => Puppet[:factsource], :ignore => Puppet[:factsignore], :name => "fact") do |resource| next unless path.include?(::File.dirname(resource[:path])) files << resource[:path] end ensure # Clear all existing definitions. Facter.clear # Reload everything. if Facter.respond_to? :loadfacts Facter.loadfacts elsif Facter.respond_to? :load Facter.load else raise Puppet::Error, "You must upgrade your version of Facter to use centralized facts" end # This loads all existing facts and any new ones. We have to remove and # reload because there's no way to unload specific facts. loadfacts() end # Retrieve the plugins from the central server. We only have to load the # changed plugins, because Puppet::Type loads plugins on demand. def self.getplugins download(:dest => Puppet[:plugindest], :source => Puppet[:pluginsource], :ignore => Puppet[:pluginsignore], :name => "plugin") do |resource| next if FileTest.directory?(resource[:path]) path = resource[:path].sub(Puppet[:plugindest], '').sub(/^\/+/, '') unless Puppet::Util::Autoload.loaded?(path) next end begin Puppet.info "Reloading downloaded file %s" % path load resource[:path] rescue => detail Puppet.warning "Could not reload downloaded file %s: %s" % [resource[:path], detail] end end end def self.loaddir(dir, type) return unless FileTest.directory?(dir) Dir.entries(dir).find_all { |e| e =~ /\.rb$/ }.each do |file| fqfile = ::File.join(dir, file) begin Puppet.info "Loading #{type} %s" % ::File.basename(file.sub(".rb",'')) Timeout::timeout(self.timeout) do load fqfile end rescue => detail Puppet.warning "Could not load #{type} %s: %s" % [fqfile, detail] end end end def self.loadfacts Puppet[:factpath].split(":").each do |dir| loaddir(dir, "fact") end end def self.timeout timeout = Puppet[:configtimeout] case timeout when String: if timeout =~ /^\d+$/ timeout = Integer(timeout) else raise ArgumentError, "Configuration timeout must be an integer" end when Integer: # nothing else raise ArgumentError, "Configuration timeout must be an integer" end return timeout end loadfacts() # Have the facts changed since we last compiled? def facts_changed?(facts) oldfacts = (Puppet::Util::Storage.cache(:configuration)[:facts] || {}).dup newfacts = facts.dup self.class.dynamic_facts.each do |fact| [oldfacts, newfacts].each do |facthash| facthash.delete(fact) if facthash.include?(fact) end end if oldfacts == newfacts return false else # unless oldfacts # puts "no old facts" # return true # end # newfacts.keys.each do |k| # unless newfacts[k] == oldfacts[k] # puts "%s: %s vs %s" % [k, newfacts[k], oldfacts[k]] # end # end return true end end # Actually retrieve the catalog, either from the server or from a # local master. def get_actual_config(facts) begin Timeout::timeout(self.class.timeout) do return get_remote_config(facts) end rescue Timeout::Error Puppet.err "Configuration retrieval timed out" return nil end end # Retrieve a config from a remote master. def get_remote_config(facts) textobjects = "" textfacts = CGI.escape(YAML.dump(facts)) benchmark(:debug, "Retrieved catalog") do # error handling for this is done in the network client begin textobjects = @driver.getconfig(textfacts, "yaml") begin textobjects = CGI.unescape(textobjects) rescue => detail raise Puppet::Error, "Could not CGI.unescape catalog" end rescue => detail Puppet.err "Could not retrieve catalog: %s" % detail return nil end end return nil if textobjects == "" @compile_time = Time.now Puppet::Util::Storage.cache(:configuration)[:facts] = facts Puppet::Util::Storage.cache(:configuration)[:compile_time] = @compile_time return textobjects end def lockfile unless defined?(@lockfile) @lockfile = Puppet::Util::Pidlock.new(Puppet[:puppetdlockfile]) end @lockfile end # Sleep when splay is enabled; else just return. def splay return unless Puppet[:splay] limit = Integer(Puppet[:splaylimit]) # Pick a splay time and then cache it. unless time = Puppet::Util::Storage.cache(:configuration)[:splay_time] time = rand(limit) Puppet::Util::Storage.cache(:configuration)[:splay_time] = time end Puppet.info "Sleeping for %s seconds (splay is enabled)" % time sleep(time) end private # Use our cached config, optionally specifying whether this is # necessary because of a failure. def use_cached_config(because_of_failure = false) return true if self.catalog if because_of_failure and ! Puppet[:usecacheonfailure] @catalog = nil Puppet.warning "Not using cache on failed catalog" return false end return false unless oldtext = self.retrievecache begin @catalog = YAML.load(oldtext).to_catalog @catalog.from_cache = true @catalog.host_config = true rescue => detail puts detail.backtrace if Puppet[:trace] Puppet.warning "Could not load cached catalog: %s" % detail clear return false end return true end end diff --git a/lib/puppet/parser/resource.rb b/lib/puppet/parser/resource.rb index 46be89ca2..b001e165b 100644 --- a/lib/puppet/parser/resource.rb +++ b/lib/puppet/parser/resource.rb @@ -1,443 +1,450 @@ # A resource that we're managing. This handles making sure that only subclasses # can set parameters. class Puppet::Parser::Resource require 'puppet/parser/resource/param' require 'puppet/parser/resource/reference' require 'puppet/util/tagging' include Puppet::Util include Puppet::Util::MethodHelper include Puppet::Util::Errors include Puppet::Util::Logging include Puppet::Util::Tagging attr_accessor :source, :line, :file, :scope, :rails_id attr_accessor :virtual, :override, :translated attr_reader :exported, :evaluated, :params # Determine whether the provided parameter name is a relationship parameter. def self.relationship_parameter?(name) unless defined?(@relationship_names) @relationship_names = Puppet::Type.relationship_params.collect { |p| p.name } end @relationship_names.include?(name) end # Proxy a few methods to our @ref object. [:builtin?, :type, :title].each do |method| define_method(method) do @ref.send(method) end end # Set up some boolean test methods [:exported, :translated, :override, :virtual, :evaluated].each do |method| newmeth = (method.to_s + "?").intern define_method(newmeth) do self.send(method) end end def [](param) param = symbolize(param) if param == :title return self.title end if @params.has_key?(param) @params[param].value else nil end end def builtin=(bool) @ref.builtin = bool end # Retrieve the associated definition and evaluate it. def evaluate if klass = @ref.definedtype finish() return klass.evaluate_code(self) elsif builtin? devfail "Cannot evaluate a builtin type" else self.fail "Cannot find definition %s" % self.type end ensure @evaluated = true end # Mark this resource as both exported and virtual, # or remove the exported mark. def exported=(value) if value @virtual = true @exported = value else @exported = value end end # Do any finishing work on this object, called before evaluation or # before storage/translation. def finish + return if finished? + @finished = true add_defaults() add_metaparams() add_scope_tags() validate() end + # Has this resource already been finished? + def finished? + defined?(@finished) and @finished + end + def initialize(options) # Set all of the options we can. options.each do |option, value| if respond_to?(option.to_s + "=") send(option.to_s + "=", value) options.delete(option) end end unless self.scope raise ArgumentError, "Resources require a scope" end @source ||= scope.source options = symbolize_options(options) # Set up our reference. if type = options[:type] and title = options[:title] options.delete(:type) options.delete(:title) else raise ArgumentError, "Resources require a type and title" end @ref = Reference.new(:type => type, :title => title, :scope => self.scope) @params = {} # Define all of the parameters if params = options[:params] options.delete(:params) params.each do |param| set_parameter(param) end end # Throw an exception if we've got any arguments left to set. unless options.empty? raise ArgumentError, "Resources do not accept %s" % options.keys.collect { |k| k.to_s }.join(", ") end tag(@ref.type) tag(@ref.title) if valid_tag?(@ref.title.to_s) end # Is this resource modeling an isomorphic resource type? def isomorphic? if builtin? return @ref.builtintype.isomorphic? else return true end end # Merge an override resource in. This will throw exceptions if # any overrides aren't allowed. def merge(resource) # Test the resource scope, to make sure the resource is even allowed # to override. unless self.source.object_id == resource.source.object_id || resource.source.child_of?(self.source) raise Puppet::ParseError.new("Only subclasses can override parameters", resource.line, resource.file) end # Some of these might fail, but they'll fail in the way we want. resource.params.each do |name, param| override_parameter(param) end end # Modify this resource in the Rails database. Poor design, yo. def modify_rails(db_resource) args = rails_args args.each do |param, value| db_resource[param] = value unless db_resource[param] == value end # Handle file specially if (self.file and (!db_resource.file or db_resource.file != self.file)) db_resource.file = self.file end updated_params = @params.inject({}) do |hash, ary| hash[ary[0].to_s] = ary[1] hash end db_resource.ar_hash_merge(db_resource.get_params_hash(db_resource.param_values), updated_params, :create => Proc.new { |name, parameter| parameter.to_rails(db_resource) }, :delete => Proc.new { |values| values.each { |value| db_resource.param_values.delete(value) } }, :modify => Proc.new { |db, mem| mem.modify_rails_values(db) }) updated_tags = tags.inject({}) { |hash, tag| hash[tag] = tag hash } db_resource.ar_hash_merge(db_resource.get_tag_hash(), updated_tags, :create => Proc.new { |name, tag| db_resource.add_resource_tag(name) }, :delete => Proc.new { |tag| db_resource.resource_tags.delete(tag) }, :modify => Proc.new { |db, mem| # nothing here }) end # Return the resource name, or the title if no name # was specified. def name unless defined? @name @name = self[:name] || self.title end @name end # This *significantly* reduces the number of calls to Puppet.[]. def paramcheck? unless defined? @@paramcheck @@paramcheck = Puppet[:paramcheck] end @@paramcheck end # A temporary occasion, until I get paths in the scopes figured out. def path to_s end # Return the short version of our name. def ref @ref.to_s end def to_hash @params.inject({}) do |hash, ary| param = ary[1] # Skip "undef" values. if param.value != :undef hash[param.name] = param.value end hash end end # Turn our parser resource into a Rails resource. def to_rails(host) args = rails_args db_resource = host.resources.build(args) # Handle file specially db_resource.file = self.file db_resource.save @params.each { |name, param| param.to_rails(db_resource) } tags.each { |tag| db_resource.add_resource_tag(tag) } return db_resource end def to_s self.ref end # Translate our object to a transportable object. def to_trans return nil if virtual? if builtin? to_transobject else to_transbucket end end def to_transbucket bucket = Puppet::TransBucket.new([]) bucket.type = self.type bucket.name = self.title # TransBuckets don't support parameters, which is why they're being deprecated. return bucket end def to_transobject # Now convert to a transobject obj = Puppet::TransObject.new(@ref.title, @ref.type) to_hash.each do |p, v| if v.is_a?(Reference) v = v.to_ref elsif v.is_a?(Array) v = v.collect { |av| if av.is_a?(Reference) av = av.to_ref end av } end # If the value is an array with only one value, then # convert it to a single value. This is largely so that # the database interaction doesn't have to worry about # whether it returns an array or a string. obj[p.to_s] = if v.is_a?(Array) and v.length == 1 v[0] else v end end obj.file = self.file obj.line = self.line obj.tags = self.tags return obj end private # Add default values from our definition. def add_defaults scope.lookupdefaults(self.type).each do |name, param| unless @params.include?(name) self.debug "Adding default for %s" % name @params[name] = param end end end # Add any metaparams defined in our scope. This actually adds any metaparams # from any parent scope, and there's currently no way to turn that off. def add_metaparams Puppet::Type.eachmetaparam do |name| # Skip metaparams that we already have defined, unless they're relationship metaparams. # LAK:NOTE Relationship metaparams get treated specially -- we stack them, instead of # overriding. next if @params[name] and not self.class.relationship_parameter?(name) # Skip metaparams for which we get no value. next unless val = scope.lookupvar(name.to_s, false) and val != :undefined # The default case: just set the value return set_parameter(name, val) unless @params[name] # For relationship params, though, join the values (a la #446). @params[name].value = [@params[name].value, val].flatten end end def add_scope_tags if scope_resource = scope.resource tag(*scope_resource.tags) end end # Accept a parameter from an override. def override_parameter(param) # This can happen if the override is defining a new parameter, rather # than replacing an existing one. (@params[param.name] = param and return) unless current = @params[param.name] # The parameter is already set. Fail if they're not allowed to override it. unless param.source.child_of?(current.source) if Puppet[:trace] puts caller end msg = "Parameter '%s' is already set on %s" % [param.name, self.to_s] if current.source.to_s != "" msg += " by %s" % current.source end if current.file or current.line fields = [] fields << current.file if current.file fields << current.line.to_s if current.line msg += " at %s" % fields.join(":") end msg += "; cannot redefine" raise Puppet::ParseError.new(msg, param.line, param.file) end # If we've gotten this far, we're allowed to override. # Merge with previous value, if the parameter was generated with the +> syntax. # It's important that we use the new param instance here, not the old one, # so that the source is registered correctly for later overrides. param.value = [current.value, param.value].flatten if param.add @params[param.name] = param end # Verify that all passed parameters are valid. This throws an error if # there's a problem, so we don't have to worry about the return value. def paramcheck(param) param = param.to_s # Now make sure it's a valid argument to our class. These checks # are organized in order of commonhood -- most types, it's a valid # argument and paramcheck is enabled. if @ref.typeclass.validattr?(param) true elsif %w{name title}.include?(param) # always allow these true elsif paramcheck? self.fail Puppet::ParseError, "Invalid parameter '%s' for type '%s'" % [param, @ref.type] end end def rails_args return [:type, :title, :line, :exported].inject({}) do |hash, param| # 'type' isn't a valid column name, so we have to use another name. to = (param == :type) ? :restype : param if value = self.send(param) hash[to] = value end hash end end # Define a parameter in our resource. def set_parameter(param, value = nil) if value param = Puppet::Parser::Resource::Param.new( :name => param, :value => value, :source => self.source ) elsif ! param.is_a?(Puppet::Parser::Resource::Param) raise ArgumentError, "Must pass a parameter or all necessary values" end # And store it in our parameter hash. @params[param.name] = param end # Make sure the resource's parameters are all valid for the type. def validate @params.each do |name, param| # Make sure it's a valid parameter. paramcheck(name) end end end diff --git a/lib/puppet/rails.rb b/lib/puppet/rails.rb index a1192bf20..a021c773a 100644 --- a/lib/puppet/rails.rb +++ b/lib/puppet/rails.rb @@ -1,132 +1,136 @@ # Load the appropriate libraries, or set a class indicating they aren't available require 'facter' require 'puppet' module Puppet::Rails def self.connect # This global init does not work for testing, because we remove # the state dir on every test. return if ActiveRecord::Base.connected? Puppet.settings.use(:main, :rails, :puppetmasterd) ActiveRecord::Base.logger = Logger.new(Puppet[:railslog]) begin loglevel = Logger.const_get(Puppet[:rails_loglevel].upcase) ActiveRecord::Base.logger.level = loglevel rescue => detail Puppet.warning "'%s' is not a valid Rails log level; using debug" % Puppet[:rails_loglevel] ActiveRecord::Base.logger.level = Logger::DEBUG end ActiveRecord::Base.allow_concurrency = true ActiveRecord::Base.verify_active_connections! begin ActiveRecord::Base.establish_connection(database_arguments()) rescue => detail if Puppet[:trace] puts detail.backtrace end raise Puppet::Error, "Could not connect to database: %s" % detail end end # The arguments for initializing the database connection. def self.database_arguments adapter = Puppet[:dbadapter] args = {:adapter => adapter, :log_level => Puppet[:rails_loglevel]} case adapter when "sqlite3": args[:dbfile] = Puppet[:dblocation] when "mysql", "postgresql": args[:host] = Puppet[:dbserver] unless Puppet[:dbserver].empty? args[:username] = Puppet[:dbuser] unless Puppet[:dbuser].empty? args[:password] = Puppet[:dbpassword] unless Puppet[:dbpassword].empty? args[:database] = Puppet[:dbname] socket = Puppet[:dbsocket] args[:socket] = socket unless socket.empty? else raise ArgumentError, "Invalid db adapter %s" % adapter end args end # Set up our database connection. It'd be nice to have a "use" system # that could make callbacks. def self.init unless Puppet.features.rails? raise Puppet::DevError, "No activerecord, cannot init Puppet::Rails" end connect() unless ActiveRecord::Base.connection.tables.include?("resources") require 'puppet/rails/database/schema' Puppet::Rails::Schema.init end if Puppet[:dbmigrate] migrate() end end # Migrate to the latest db schema. def self.migrate dbdir = nil $:.each { |d| tmp = File.join(d, "puppet/rails/database") if FileTest.directory?(tmp) dbdir = tmp break end } unless dbdir raise Puppet::Error, "Could not find Puppet::Rails database dir" end + unless ActiveRecord::Base.connection.tables.include?("resources") + raise Puppet::Error, "Database has problems, can't migrate." + end + Puppet.notice "Migrating" begin ActiveRecord::Migrator.migrate(dbdir) rescue => detail if Puppet[:trace] puts detail.backtrace end raise Puppet::Error, "Could not migrate database: %s" % detail end end # Tear down the database. Mostly only used during testing. def self.teardown unless Puppet.features.rails? raise Puppet::DevError, "No activerecord, cannot init Puppet::Rails" end Puppet.settings.use(:puppetmasterd, :rails) begin ActiveRecord::Base.establish_connection(database_arguments()) rescue => detail if Puppet[:trace] puts detail.backtrace end raise Puppet::Error, "Could not connect to database: %s" % detail end ActiveRecord::Base.connection.tables.each do |t| ActiveRecord::Base.connection.drop_table t end end end if Puppet.features.rails? require 'puppet/rails/host' end diff --git a/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb b/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb new file mode 100644 index 000000000..71ee6aeed --- /dev/null +++ b/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb @@ -0,0 +1,17 @@ +class AddCreatedAtToAllTables < ActiveRecord::Migration + def self.up + ActiveRecord::Base.connection.tables.each do |t| + unless ActiveRecord::Base.connection.columns(t).collect {|c| c.name}.include?("created_at") + add_column t.to_s, :created_at, :datetime + end + end + end + + def self.down + ActiveRecord::Base.connection.tables.each do |t| + unless ActiveRecord::Base.connection.columns(t).collect {|c| c.name}.include?("created_at") + remove_column t.to_s, :created_at + end + end + end +end diff --git a/lib/puppet/rails/external/tagging/README b/lib/puppet/rails/external/tagging/README deleted file mode 100644 index 8d2f90822..000000000 --- a/lib/puppet/rails/external/tagging/README +++ /dev/null @@ -1,4 +0,0 @@ -Acts As Taggable -================= - -Allows for tags to be added to multiple classes. \ No newline at end of file diff --git a/lib/puppet/rails/external/tagging/acts_as_taggable.rb b/lib/puppet/rails/external/tagging/acts_as_taggable.rb deleted file mode 100644 index 7fd7f6b12..000000000 --- a/lib/puppet/rails/external/tagging/acts_as_taggable.rb +++ /dev/null @@ -1,62 +0,0 @@ -module ActiveRecord - module Acts #:nodoc: - module Taggable #:nodoc: - def self.included(base) - base.extend(ClassMethods) - end - - module ClassMethods - def acts_as_taggable(options = {}) - write_inheritable_attribute(:acts_as_taggable_options, { - :taggable_type => ActiveRecord::Base.send(:class_name_of_active_record_descendant, self).to_s, - :from => options[:from] - }) - - class_inheritable_reader :acts_as_taggable_options - - has_many :taggings, :as => :taggable, :dependent => :destroy - has_many :tags, :through => :taggings - - include ActiveRecord::Acts::Taggable::InstanceMethods - extend ActiveRecord::Acts::Taggable::SingletonMethods - end - end - - module SingletonMethods - def find_tagged_with(list) - find_by_sql([ - "SELECT #{table_name}.* FROM #{table_name}, tags, taggings " + - "WHERE #{table_name}.#{primary_key} = taggings.taggable_id " + - "AND taggings.taggable_type = ? " + - "AND taggings.tag_id = tags.id AND tags.name IN (?)", - acts_as_taggable_options[:taggable_type], list - ]) - end - def tags(options = {}) - options.merge!(:taggable_type => self.to_s) - Tag.tags(options) - end - end - - module InstanceMethods - def tag_with(list) - Tag.transaction do - taggings.destroy_all - - Tag.parse(list).each do |name| - if acts_as_taggable_options[:from] - send(acts_as_taggable_options[:from]).tags.find_or_create_by_name(name).on(self) - else - Tag.find_or_create_by_name(name).on(self) - end - end - end - end - - def tag_list - tags.collect { |tag| tag.name.include?(" ") ? "'#{tag.name}'" : tag.name }.join(" ") - end - end - end - end -end diff --git a/lib/puppet/rails/external/tagging/init.rb b/lib/puppet/rails/external/tagging/init.rb deleted file mode 100644 index 5069d8040..000000000 --- a/lib/puppet/rails/external/tagging/init.rb +++ /dev/null @@ -1,5 +0,0 @@ -require 'puppet/rails/external/tagging/acts_as_taggable' -ActiveRecord::Base.send(:include, ActiveRecord::Acts::Taggable) - -require 'puppet/rails/external/tagging/tagging' -require 'puppet/rails/external/tagging/tag' diff --git a/lib/puppet/rails/external/tagging/tag.rb b/lib/puppet/rails/external/tagging/tag.rb deleted file mode 100644 index c6bf4ca78..000000000 --- a/lib/puppet/rails/external/tagging/tag.rb +++ /dev/null @@ -1,50 +0,0 @@ -class Tag < ActiveRecord::Base - has_many :taggings - - def self.tags(options = {}) - query = "select tags.id, name, count(*) as count" - query << " from taggings, tags" - query << " where tags.id = tag_id" - query << " group by tag_id" - query << " order by #{options[:order]}" if options[:order] != nil - query << " limit #{options[:limit]}" if options[:limit] != nil - tags = Tag.find_by_sql(query) - end - - def self.parse(list) - tag_names = [] - - # first, pull out the quoted tags - list.gsub!(/\"(.*?)\"\s*/ ) { tag_names << $1; "" } - - # then, replace all commas with a space - list.gsub!(/,/, " ") - - # then, get whatever's left - tag_names.concat list.split(/\s/) - - # strip whitespace from the names - tag_names = tag_names.map { |t| t.strip } - - # delete any blank tag names - tag_names = tag_names.delete_if { |t| t.empty? } - - return tag_names - end - - def tagged - @tagged ||= taggings.collect { |tagging| tagging.taggable } - end - - def on(taggable) - taggings.build :taggable => taggable - end - - def ==(comparison_object) - super || name == comparison_object.to_s - end - - def to_s - name - end -end diff --git a/lib/puppet/rails/external/tagging/tagging.rb b/lib/puppet/rails/external/tagging/tagging.rb deleted file mode 100644 index e06e88a14..000000000 --- a/lib/puppet/rails/external/tagging/tagging.rb +++ /dev/null @@ -1,12 +0,0 @@ -class Tagging < ActiveRecord::Base - belongs_to :tag - belongs_to :taggable, :polymorphic => true - - def self.tagged_class(taggable) - ActiveRecord::Base.send(:class_name_of_active_record_descendant, taggable.class).to_s - end - - def self.find_taggable(tagged_class, tagged_id) - tagged_class.constantize.find(tagged_id) - end -end \ No newline at end of file diff --git a/lib/puppet/rails/fact_value.rb b/lib/puppet/rails/fact_value.rb index 0eb70be72..b53591d7e 100644 --- a/lib/puppet/rails/fact_value.rb +++ b/lib/puppet/rails/fact_value.rb @@ -1,6 +1,10 @@ class Puppet::Rails::FactValue < ActiveRecord::Base belongs_to :fact_name belongs_to :host + + def to_label + "#{self.fact_name.name}" + end end # $Id: fact_value.rb 1952 2006-12-19 05:47:57Z luke $ diff --git a/lib/puppet/rails/host.rb b/lib/puppet/rails/host.rb index 72898fd97..626edaa88 100644 --- a/lib/puppet/rails/host.rb +++ b/lib/puppet/rails/host.rb @@ -1,152 +1,151 @@ require 'puppet/rails/resource' require 'puppet/rails/fact_name' require 'puppet/rails/source_file' require 'puppet/util/rails/collection_merger' # Puppet::TIME_DEBUG = true class Puppet::Rails::Host < ActiveRecord::Base include Puppet::Util include Puppet::Util::CollectionMerger has_many :fact_values, :dependent => :destroy has_many :fact_names, :through => :fact_values - belongs_to :puppet_classes - has_many :source_files + belongs_to :source_file has_many :resources, :include => :param_values, :dependent => :destroy # If the host already exists, get rid of its objects def self.clean(host) if obj = self.find_by_name(host) obj.rails_objects.clear return obj else return nil end end # Store our host in the database. def self.store(node, resources) args = {} host = nil transaction do #unless host = find_by_name(name) seconds = Benchmark.realtime { unless host = find_by_name(node.name) host = new(:name => node.name) end } Puppet.notice("Searched for host in %0.2f seconds" % seconds) if defined?(Puppet::TIME_DEBUG) if ip = node.parameters["ipaddress"] host.ip = ip end # Store the facts into the database. host.setfacts node.parameters seconds = Benchmark.realtime { host.setresources(resources) } Puppet.notice("Handled resources in %0.2f seconds" % seconds) if defined?(Puppet::TIME_DEBUG) host.last_compile = Time.now host.save end return host end # Return the value of a fact. def fact(name) if fv = self.fact_values.find(:all, :include => :fact_name, :conditions => "fact_names.name = '#{name}'") return fv else return nil end end # returns a hash of fact_names.name => [ fact_values ] for this host. def get_facts_hash fact_values = self.fact_values.find(:all, :include => :fact_name) return fact_values.inject({}) do | hash, value | hash[value.fact_name.name] ||= [] hash[value.fact_name.name] << value hash end end def setfacts(facts) facts = facts.dup ar_hash_merge(get_facts_hash(), facts, :create => Proc.new { |name, values| fact_name = Puppet::Rails::FactName.find_or_create_by_name(name) values = [values] unless values.is_a?(Array) values.each do |value| fact_values.build(:value => value, :fact_name => fact_name) end }, :delete => Proc.new { |values| values.each { |value| self.fact_values.delete(value) } }, :modify => Proc.new { |db, mem| mem = [mem].flatten fact_name = db[0].fact_name db_values = db.collect { |fact_value| fact_value.value } (db_values - (db_values & mem)).each do |value| db.find_all { |fact_value| fact_value.value == value }.each { |fact_value| fact_values.delete(fact_value) } end (mem - (db_values & mem)).each do |value| fact_values.build(:value => value, :fact_name => fact_name) end }) end # Set our resources. def setresources(list) existing = nil seconds = Benchmark.realtime { # Preload the parameters with the resource query, but not the tags, since doing so makes the query take about 10x longer. # I've left the other queries in so that it's straightforward to switch between them for testing, if we so desire. #existing = resources.find(:all, :include => [{:param_values => :param_name, :resource_tags => :puppet_tag}, :source_file]).inject({}) do | hash, resource | #existing = resources.find(:all, :include => [{:resource_tags => :puppet_tag}, :source_file]).inject({}) do | hash, resource | existing = resources.find(:all, :include => [{:param_values => :param_name}, :source_file]).inject({}) do | hash, resource | hash[resource.ref] = resource hash end } Puppet.notice("Searched for resources in %0.2f seconds" % seconds) if defined?(Puppet::TIME_DEBUG) compiled = list.inject({}) do |hash, resource| hash[resource.ref] = resource hash end ar_hash_merge(existing, compiled, :create => Proc.new { |ref, resource| resource.to_rails(self) }, :delete => Proc.new { |resource| self.resources.delete(resource) }, :modify => Proc.new { |db, mem| mem.modify_rails(db) }) end def update_connect_time self.last_connect = Time.now save end end diff --git a/lib/puppet/rails/param_value.rb b/lib/puppet/rails/param_value.rb index 02c29c540..fc00a43d4 100644 --- a/lib/puppet/rails/param_value.rb +++ b/lib/puppet/rails/param_value.rb @@ -1,24 +1,28 @@ class Puppet::Rails::ParamValue < ActiveRecord::Base belongs_to :param_name belongs_to :resource def value val = self[:value] if val =~ /^--- \!/ YAML.load(val) else val end end # I could not find a cleaner way to handle making sure that resource references # were consistently serialized and deserialized. def value=(val) if val.is_a?(Puppet::Parser::Resource::Reference) self[:value] = YAML.dump(val) else self[:value] = val end end + + def to_label + "#{self.param_name.name}" + end end diff --git a/lib/puppet/rails/puppet_class.rb b/lib/puppet/rails/puppet_class.rb deleted file mode 100644 index 35cef8974..000000000 --- a/lib/puppet/rails/puppet_class.rb +++ /dev/null @@ -1,6 +0,0 @@ -class Puppet::Rails::PuppetClass < ActiveRecord::Base - has_many :resources - has_many :source_files - has_many :hosts -end - diff --git a/lib/puppet/rails/resource_tag.rb b/lib/puppet/rails/resource_tag.rb index d06711877..f9694e082 100644 --- a/lib/puppet/rails/resource_tag.rb +++ b/lib/puppet/rails/resource_tag.rb @@ -1,4 +1,8 @@ class Puppet::Rails::ResourceTag < ActiveRecord::Base belongs_to :puppet_tag belongs_to :resource + + def to_label + "#{self.puppet_tag.name}" + end end diff --git a/lib/puppet/rails/source_file.rb b/lib/puppet/rails/source_file.rb index 51d1b1fb5..3ccf87ac6 100644 --- a/lib/puppet/rails/source_file.rb +++ b/lib/puppet/rails/source_file.rb @@ -1,5 +1,8 @@ class Puppet::Rails::SourceFile < ActiveRecord::Base has_one :host - has_one :puppet_class has_one :resource + + def to_label + "#{self.filename}" + end end diff --git a/lib/puppet/transaction.rb b/lib/puppet/transaction.rb index 976bf7c68..14b2037f4 100644 --- a/lib/puppet/transaction.rb +++ b/lib/puppet/transaction.rb @@ -1,737 +1,740 @@ # the class that actually walks our resource/property tree, collects the changes, # and performs them require 'puppet' require 'puppet/propertychange' module Puppet class Transaction attr_accessor :component, :catalog, :ignoreschedules attr_accessor :sorted_resources, :configurator # The report, once generated. attr_reader :report # The list of events generated in this transaction. attr_reader :events - attr_writer :tags - include Puppet::Util # Add some additional times for reporting def addtimes(hash) hash.each do |name, num| @timemetrics[name] = num end end # Check to see if we should actually allow processing, but this really only # matters when a resource is getting deleted. def allow_processing?(resource, changes) # If a resource is going to be deleted but it still has # dependencies, then don't delete it unless it's implicit or the # dependency is itself being deleted. if resource.purging? and resource.deleting? if deps = relationship_graph.dependents(resource) and ! deps.empty? and deps.detect { |d| ! d.deleting? } resource.warning "%s still depend%s on me -- not purging" % [deps.collect { |r| r.ref }.join(","), deps.length > 1 ? "":"s"] return false end end return true end # Are there any failed resources in this transaction? def any_failed? failures = @failures.inject(0) { |failures, array| failures += array[1]; failures } if failures > 0 failures else false end end # Apply all changes for a resource, returning a list of the events # generated. def apply(resource) begin changes = resource.evaluate rescue => detail if Puppet[:trace] puts detail.backtrace end resource.err "Failed to retrieve current state of resource: %s" % detail # Mark that it failed @failures[resource] += 1 # And then return return [] end changes = [changes] unless changes.is_a?(Array) if changes.length > 0 @resourcemetrics[:out_of_sync] += 1 end return [] if changes.empty? or ! allow_processing?(resource, changes) resourceevents = apply_changes(resource, changes) # If there were changes and the resource isn't in noop mode... unless changes.empty? or resource.noop # Record when we last synced resource.cache(:synced, Time.now) # Flush, if appropriate if resource.respond_to?(:flush) resource.flush end # And set a trigger for refreshing this resource if it's a # self-refresher if resource.self_refresh? and ! resource.deleting? # Create an edge with this resource as both the source and # target. The triggering method treats these specially for # logging. events = resourceevents.collect { |e| e.event } set_trigger(Puppet::Relationship.new(resource, resource, :callback => :refresh, :event => events)) end end resourceevents end # Apply each change in turn. def apply_changes(resource, changes) changes.collect { |change| @changes << change @count += 1 change.transaction = self events = nil begin # use an array, so that changes can return more than one # event if they want events = [change.forward].flatten.reject { |e| e.nil? } rescue => detail if Puppet[:trace] puts detail.backtrace end change.property.err "change from %s to %s failed: %s" % [change.property.is_to_s(change.is), change.property.should_to_s(change.should), detail] @failures[resource] += 1 next # FIXME this should support using onerror to determine # behaviour; or more likely, the client calling us # should do so end # Mark that our change happened, so it can be reversed # if we ever get to that point unless events.nil? or (events.is_a?(Array) and (events.empty?) or events.include?(:noop)) change.changed = true @resourcemetrics[:applied] += 1 end events }.flatten.reject { |e| e.nil? } end # Find all of the changed resources. def changed? @changes.find_all { |change| change.changed }.collect { |change| unless change.property.resource raise "No resource for %s" % change.inspect end change.property.resource }.uniq end # Do any necessary cleanup. If we don't get rid of the graphs, the # contained resources might never get cleaned up. def cleanup if defined? @generated relationship_graph.remove_resource(*@generated) end end # Copy an important relationships from the parent to the newly-generated # child resource. def copy_relationships(resource, children) depthfirst = resource.depthfirst? children.each do |gen_child| if depthfirst edge = [gen_child, resource] else edge = [resource, gen_child] end relationship_graph.add_resource(gen_child) unless relationship_graph.resource(gen_child.ref) unless relationship_graph.edge?(edge[1], edge[0]) relationship_graph.add_edge(*edge) else resource.debug "Skipping automatic relationship to %s" % gen_child end end end # Are we deleting this resource? def deleting?(changes) changes.detect { |change| change.property.name == :ensure and change.should == :absent } end # See if the resource generates new resources at evaluation time. def eval_generate(resource) if resource.respond_to?(:eval_generate) begin children = resource.eval_generate rescue => detail if Puppet[:trace] puts detail.backtrace end resource.err "Failed to generate additional resources during transaction: %s" % detail return nil end if children children.each { |child| child.finish } @generated += children return children end end end # Evaluate a single resource. def eval_resource(resource, checkskip = true) events = [] if resource.is_a?(Puppet::Type::Component) raise Puppet::DevError, "Got a component to evaluate" end if checkskip and skip?(resource) @resourcemetrics[:skipped] += 1 else @resourcemetrics[:scheduled] += 1 changecount = @changes.length # We need to generate first regardless, because the recursive # actions sometimes change how the top resource is applied. children = eval_generate(resource) if children and resource.depthfirst? children.each do |child| # The child will never be skipped when the parent isn't events += eval_resource(child, false) end end # Perform the actual changes seconds = thinmark do events += apply(resource) end if children and ! resource.depthfirst? children.each do |child| events += eval_resource(child, false) end end # Create a child/parent relationship. We do this after everything else because # we want explicit relationships to be able to override automatic relationships, # including this one. if children copy_relationships(resource, children) end # A bit of hackery here -- if skipcheck is true, then we're the # top-level resource. If that's the case, then make sure all of # the changes list this resource as a proxy. This is really only # necessary for rollback, since we know the generating resource # during forward changes. if children and checkskip @changes[changecount..-1].each { |change| change.proxy = resource } end # Keep track of how long we spend in each type of resource @timemetrics[resource.class.name] += seconds end # Check to see if there are any events for this resource if triggedevents = trigger(resource) events += triggedevents end # Collect the targets of any subscriptions to those events. We pass # the parent resource in so it will override the source in the events, # since eval_generated children can't have direct relationships. relationship_graph.matching_edges(events, resource).each do |orig_edge| # We have to dup the label here, else we modify the original edge label, # which affects whether a given event will match on the next run, which is, # of course, bad. edge = orig_edge.class.new(orig_edge.source, orig_edge.target) label = orig_edge.label.dup label[:event] = events.collect { |e| e.event } edge.label = label set_trigger(edge) end # And return the events for collection events end # This method does all the actual work of running a transaction. It # collects all of the changes, executes them, and responds to any # necessary events. def evaluate @count = 0 # Start logging. Puppet::Util::Log.newdestination(@report) prepare() begin allevents = @sorted_resources.collect { |resource| if resource.is_a?(Puppet::Type::Component) Puppet.warning "Somehow left a component in the relationship graph" next end ret = nil seconds = thinmark do ret = eval_resource(resource) end if Puppet[:evaltrace] and @catalog.host_config? resource.info "Evaluated in %0.2f seconds" % seconds end ret }.flatten.reject { |e| e.nil? } ensure # And then close the transaction log. Puppet::Util::Log.close(@report) end Puppet.debug "Finishing transaction %s with %s changes" % [self.object_id, @count] @events = allevents allevents end # Determine whether a given resource has failed. def failed?(obj) if @failures[obj] > 0 return @failures[obj] else return false end end # Does this resource have any failed dependencies? def failed_dependencies?(resource) # First make sure there are no failed dependencies. To do this, # we check for failures in any of the vertexes above us. It's not # enough to check the immediate dependencies, which is why we use # a tree from the reversed graph. skip = false deps = relationship_graph.dependencies(resource) deps.each do |dep| if fails = failed?(dep) resource.notice "Dependency %s[%s] has %s failures" % [dep.class.name, dep.name, @failures[dep]] skip = true end end return skip end # Collect any dynamically generated resources. def generate list = @catalog.vertices # Store a list of all generated resources, so that we can clean them up # after the transaction closes. @generated = [] newlist = [] while ! list.empty? list.each do |resource| if resource.respond_to?(:generate) begin made = resource.generate rescue => detail resource.err "Failed to generate additional resources: %s" % detail end next unless made unless made.is_a?(Array) made = [made] end made.uniq! made.each do |res| @catalog.add_resource(res) res.catalog = catalog newlist << res @generated << res res.finish end end end list.clear list = newlist newlist = [] end end # Generate a transaction report. def generate_report @resourcemetrics[:failed] = @failures.find_all do |name, num| num > 0 end.length # Get the total time spent @timemetrics[:total] = @timemetrics.inject(0) do |total, vals| total += vals[1] total end # Add all of the metrics related to resource count and status @report.newmetric(:resources, @resourcemetrics) # Record the relative time spent in each resource. @report.newmetric(:time, @timemetrics) # Then all of the change-related metrics @report.newmetric(:changes, :total => @changes.length ) @report.time = Time.now return @report end # Should we ignore tags? def ignore_tags? ! (@catalog.host_config? or Puppet[:name] == "puppet") end # this should only be called by a Puppet::Type::Component resource now # and it should only receive an array def initialize(resources) if resources.is_a?(Puppet::Node::Catalog) @catalog = resources elsif resources.is_a?(Puppet::PGraph) raise "Transactions should get catalogs now, not PGraph" else raise "Transactions require catalogs" end @resourcemetrics = { :total => @catalog.vertices.length, :out_of_sync => 0, # The number of resources that had changes :applied => 0, # The number of resources fixed :skipped => 0, # The number of resources skipped :restarted => 0, # The number of resources triggered :failed_restarts => 0, # The number of resources that fail a trigger :scheduled => 0 # The number of resources scheduled } # Metrics for distributing times across the different types. @timemetrics = Hash.new(0) # The number of resources that were triggered in this run @triggered = Hash.new { |hash, key| hash[key] = Hash.new(0) } # Targets of being triggered. @targets = Hash.new do |hash, key| hash[key] = [] end # The changes we're performing @changes = [] # The resources that have failed and the number of failures each. This # is used for skipping resources because of failed dependencies. @failures = Hash.new do |h, key| h[key] = 0 end @report = Report.new @count = 0 end # Prefetch any providers that support it. We don't support prefetching # types, just providers. def prefetch prefetchers = {} @catalog.vertices.each do |resource| if provider = resource.provider and provider.class.respond_to?(:prefetch) prefetchers[provider.class] ||= {} prefetchers[provider.class][resource.title] = resource end end # Now call prefetch, passing in the resources so that the provider instances can be replaced. prefetchers.each do |provider, resources| Puppet.debug "Prefetching %s resources for %s" % [provider.name, provider.resource_type.name] begin provider.prefetch(resources) rescue => detail if Puppet[:trace] puts detail.backtrace end Puppet.err "Could not prefetch %s provider '%s': %s" % [provider.resource_type.name, provider.name, detail] end end end # Prepare to evaluate the resources in a transaction. def prepare prefetch() # Now add any dynamically generated resources generate() # This will throw an error if there are cycles in the graph. @sorted_resources = relationship_graph.topsort end def relationship_graph catalog.relationship_graph end # Send off the transaction report. def send_report begin report = generate_report() rescue => detail Puppet.err "Could not generate report: %s" % detail return end if Puppet[:rrdgraph] == true report.graph() end if Puppet[:summarize] puts report.summary end if Puppet[:report] begin reportclient().report(report) rescue => detail Puppet.err "Reporting failed: %s" % detail end end end def reportclient unless defined? @reportclient @reportclient = Puppet::Network::Client.report.new( :Server => Puppet[:reportserver] ) end @reportclient end # Roll all completed changes back. def rollback @targets.clear @triggered.clear allevents = @changes.reverse.collect { |change| # skip changes that were never actually run unless change.changed Puppet.debug "%s was not changed" % change.to_s next end begin events = change.backward rescue => detail Puppet.err("%s rollback failed: %s" % [change,detail]) if Puppet[:trace] puts detail.backtrace end next # at this point, we would normally do error handling # but i haven't decided what to do for that yet # so just record that a sync failed for a given resource #@@failures[change.property.parent] += 1 # this still could get hairy; what if file contents changed, # but a chmod failed? how would i handle that error? dern end # FIXME This won't work right now. relationship_graph.matching_edges(events).each do |edge| @targets[edge.target] << edge end # Now check to see if there are any events for this child. # Kind of hackish, since going backwards goes a change at a # time, not a child at a time. trigger(change.property.resource) # And return the events for collection events }.flatten.reject { |e| e.nil? } end # Is the resource currently scheduled? def scheduled?(resource) self.ignoreschedules or resource.scheduled? end # Set an edge to be triggered when we evaluate its target. def set_trigger(edge) return unless method = edge.callback return unless edge.target.respond_to?(method) if edge.target.respond_to?(:ref) unless edge.source == edge.target edge.source.info "Scheduling %s of %s" % [edge.callback, edge.target.ref] end end @targets[edge.target] << edge end # Should this resource be skipped? def skip?(resource) skip = false if missing_tags?(resource) resource.debug "Not tagged with %s" % tags.join(", ") elsif ! scheduled?(resource) resource.debug "Not scheduled" elsif failed_dependencies?(resource) resource.warning "Skipping because of failed dependencies" else return false end return true end # The tags we should be checking. def tags unless defined? @tags tags = Puppet[:tags] if tags.nil? or tags == "" @tags = [] else @tags = tags.split(/\s*,\s*/) end end @tags end + + def tags=(tags) + tags = [tags] unless tags.is_a?(Array) + @tags = tags + end # Is this resource tagged appropriately? def missing_tags?(resource) return false if self.ignore_tags? or tags.empty? return true unless resource.tagged?(tags) end # Are there any edges that target this resource? def targeted?(resource) # The default value is a new array so we have to test the length of it. @targets.include?(resource) and @targets[resource].length > 0 end # Trigger any subscriptions to a child. This does an upwardly recursive # search -- it triggers the passed resource, but also the resource's parent # and so on up the tree. def trigger(resource) return nil unless targeted?(resource) callbacks = Hash.new { |hash, key| hash[key] = [] } trigged = [] @targets[resource].each do |edge| # Collect all of the subs for each callback callbacks[edge.callback] << edge end callbacks.each do |callback, subs| noop = true subs.each do |edge| if edge.event.nil? or ! edge.event.include?(:noop) noop = false end end if noop resource.notice "Would have triggered %s from %s dependencies" % [callback, subs.length] # And then add an event for it. return [Puppet::Event.new( :event => :noop, :transaction => self, :source => resource )] end if subs.length == 1 and subs[0].source == resource message = "Refreshing self" else message = "Triggering '%s' from %s dependencies" % [callback, subs.length] end resource.notice message # At this point, just log failures, don't try to react # to them in any way. begin resource.send(callback) @resourcemetrics[:restarted] += 1 rescue => detail resource.err "Failed to call %s on %s: %s" % [callback, resource, detail] @resourcemetrics[:failed_restarts] += 1 if Puppet[:trace] puts detail.backtrace end end # And then add an event for it. trigged << Puppet::Event.new( :event => :triggered, :transaction => self, :source => resource ) triggered(resource, callback) end if trigged.empty? return nil else return trigged end end def triggered(resource, method) @triggered[resource][method] += 1 end def triggered?(resource, method) @triggered[resource][method] end end end require 'puppet/transaction/report' diff --git a/spec/unit/other/transaction.rb b/spec/unit/other/transaction.rb index e277a24c0..0db470d26 100755 --- a/spec/unit/other/transaction.rb +++ b/spec/unit/other/transaction.rb @@ -1,28 +1,33 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../spec_helper' require 'puppet/transaction' describe Puppet::Transaction, " when determining tags" do before do @config = Puppet::Node::Catalog.new @transaction = Puppet::Transaction.new(@config) end it "should default to the tags specified in the :tags setting" do Puppet.expects(:[]).with(:tags).returns("one") @transaction.tags.should == %w{one} end it "should split tags based on ','" do Puppet.expects(:[]).with(:tags).returns("one,two") @transaction.tags.should == %w{one two} end it "should use any tags set after creation" do Puppet.expects(:[]).with(:tags).never @transaction.tags = %w{one two} @transaction.tags.should == %w{one two} end + + it "should always convert assigned tags to an array" do + @transaction.tags = "one::two" + @transaction.tags.should == %w{one::two} + end end diff --git a/spec/unit/parser/resource.rb b/spec/unit/parser/resource.rb index 035590341..9ce7b391b 100755 --- a/spec/unit/parser/resource.rb +++ b/spec/unit/parser/resource.rb @@ -1,164 +1,170 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../spec_helper' # LAK: FIXME This is just new tests for resources; I have # not moved all tests over yet. describe Puppet::Parser::Resource do before do @parser = Puppet::Parser::Parser.new :Code => "" @source = @parser.newclass "" @node = Puppet::Node.new("yaynode") @compiler = Puppet::Parser::Compiler.new(@node, @parser) @scope = @compiler.topscope end it "should be isomorphic if it is builtin and models an isomorphic type" do Puppet::Type.type(:file).expects(:isomorphic?).returns(true) @resource = Puppet::Parser::Resource.new(:type => "file", :title => "whatever", :scope => @scope, :source => @source).isomorphic?.should be_true end it "should not be isomorphic if it is builtin and models a non-isomorphic type" do Puppet::Type.type(:file).expects(:isomorphic?).returns(false) @resource = Puppet::Parser::Resource.new(:type => "file", :title => "whatever", :scope => @scope, :source => @source).isomorphic?.should be_false end it "should be isomorphic if it is not builtin" do @parser.newdefine "whatever" @resource = Puppet::Parser::Resource.new(:type => "whatever", :title => "whatever", :scope => @scope, :source => @source).isomorphic?.should be_true end describe "when evaluating" do before do @type = Puppet::Parser::Resource @definition = @parser.newdefine "mydefine" @class = @parser.newclass "myclass" @nodedef = @parser.newnode("mynode")[0] end it "should evaluate the associated AST definition" do res = @type.new(:type => "mydefine", :title => "whatever", :scope => @scope, :source => @source) @definition.expects(:evaluate_code).with(res) res.evaluate end it "should evaluate the associated AST class" do res = @type.new(:type => "class", :title => "myclass", :scope => @scope, :source => @source) @class.expects(:evaluate_code).with(res) res.evaluate end it "should evaluate the associated AST node" do res = @type.new(:type => "node", :title => "mynode", :scope => @scope, :source => @source) @nodedef.expects(:evaluate_code).with(res) res.evaluate end end describe "when finishing" do before do @definition = @parser.newdefine "mydefine" @class = @parser.newclass "myclass" @nodedef = @parser.newnode("mynode")[0] @resource = Puppet::Parser::Resource.new(:type => "mydefine", :title => "whatever", :scope => @scope, :source => @source) end + it "should do nothing if it has already been finished" do + @resource.finish + @resource.expects(:add_metaparams).never + @resource.finish + end + it "should copy metaparams from its scope" do @scope.setvar("noop", "true") @resource.class.publicize_methods(:add_metaparams) { @resource.add_metaparams } @resource["noop"].should == "true" end it "should not copy metaparams that it already has" do @resource.class.publicize_methods(:set_parameter) { @resource.set_parameter("noop", "false") } @scope.setvar("noop", "true") @resource.class.publicize_methods(:add_metaparams) { @resource.add_metaparams } @resource["noop"].should == "false" end it "should stack relationship metaparams from its container if it already has them" do @resource.class.publicize_methods(:set_parameter) { @resource.set_parameter("require", "resource") } @scope.setvar("require", "container") @resource.class.publicize_methods(:add_metaparams) { @resource.add_metaparams } @resource["require"].sort.should == %w{container resource} end it "should flatten the array resulting from stacking relationship metaparams" do @resource.class.publicize_methods(:set_parameter) { @resource.set_parameter("require", ["resource1", "resource2"]) } @scope.setvar("require", %w{container1 container2}) @resource.class.publicize_methods(:add_metaparams) { @resource.add_metaparams } @resource["require"].sort.should == %w{container1 container2 resource1 resource2} end it "should add any tags from the scope resource" do scope_resource = stub 'scope_resource', :tags => %w{one two} @scope.stubs(:resource).returns(scope_resource) @resource.class.publicize_methods(:add_scope_tags) { @resource.add_scope_tags } @resource.tags.should be_include("one") @resource.tags.should be_include("two") end end describe "when being tagged" do before do @scope_resource = stub 'scope_resource', :tags => %w{srone srtwo} @scope = stub 'scope', :resource => @scope_resource @resource = Puppet::Parser::Resource.new(:type => "file", :title => "yay", :scope => @scope, :source => mock('source')) end it "should get tagged with the resource type" do @resource.tags.should be_include("file") end it "should get tagged with the title" do @resource.tags.should be_include("yay") end it "should get tagged with each name in the title if the title is a qualified class name" do resource = Puppet::Parser::Resource.new(:type => "file", :title => "one::two", :scope => @scope, :source => mock('source')) resource.tags.should be_include("one") resource.tags.should be_include("two") end it "should get tagged with each name in the type if the type is a qualified class name" do resource = Puppet::Parser::Resource.new(:type => "one::two", :title => "whatever", :scope => @scope, :source => mock('source')) resource.tags.should be_include("one") resource.tags.should be_include("two") end it "should not get tagged with non-alphanumeric titles" do resource = Puppet::Parser::Resource.new(:type => "file", :title => "this is a test", :scope => @scope, :source => mock('source')) resource.tags.should_not be_include("this is a test") end it "should fail on tags containing '*' characters" do lambda { @resource.tag("bad*tag") }.should raise_error(Puppet::ParseError) end it "should fail on tags starting with '-' characters" do lambda { @resource.tag("-badtag") }.should raise_error(Puppet::ParseError) end it "should fail on tags containing ' ' characters" do lambda { @resource.tag("bad tag") }.should raise_error(Puppet::ParseError) end it "should allow alpha tags" do lambda { @resource.tag("good_tag") }.should_not raise_error(Puppet::ParseError) end end end diff --git a/test/network/client/master.rb b/test/network/client/master.rb index 67c47fa6d..41796575f 100755 --- a/test/network/client/master.rb +++ b/test/network/client/master.rb @@ -1,585 +1,577 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../lib/puppettest' require 'puppettest' require 'mocha' class TestMasterClient < Test::Unit::TestCase include PuppetTest::ServerTest def setup super @master = Puppet::Network::Client.master end def mkmaster(options = {}) options[:UseNodes] = false options[:Local] = true if code = options[:Code] Puppet[:code] = code else Puppet[:manifest] = options[:Manifest] || mktestmanifest end # create our master # this is the default server setup master = Puppet::Network::Handler.master.new(options) return master end def mkclient(master = nil) master ||= mkmaster() client = Puppet::Network::Client.master.new( :Master => master ) return client end def test_disable FileUtils.mkdir_p(Puppet[:statedir]) manifest = mktestmanifest master = mkmaster(:Manifest => manifest) client = mkclient(master) assert_nothing_raised("Could not disable client") { client.disable } client.expects(:getconfig).never client.run client = mkclient(master) client.expects(:getconfig) assert_nothing_raised("Could not enable client") { client.enable } client.run end # Make sure we're getting the client version in our list of facts def test_clientversionfact facts = nil assert_nothing_raised { facts = Puppet::Network::Client.master.facts } assert_equal(Puppet.version.to_s, facts["clientversion"]) end # Make sure non-string facts don't make things go kablooie def test_nonstring_facts FileUtils.mkdir_p(Puppet[:statedir]) # Add a nonstring fact Facter.add("nonstring") do setcode { 1 } end assert_equal(1, Facter.nonstring, "Fact was a string from facter") client = mkclient() assert(! FileTest.exists?(@createdfile)) assert_nothing_raised { client.run } end # This method downloads files, and yields each file object if a block is given. def test_download source = tempfile() dest = tempfile() sfile = File.join(source, "file") dfile = File.join(dest, "file") Dir.mkdir(source) File.open(sfile, "w") {|f| f.puts "yay"} files = [] assert_nothing_raised do files = Puppet::Network::Client.master.download(:dest => dest, :source => source, :name => "testing") end assert(FileTest.directory?(dest), "dest dir was not created") assert(FileTest.file?(dfile), "dest file was not created") assert_equal(File.read(sfile), File.read(dfile), "Dest file had incorrect contents") assert_equal([dest, dfile].sort, files.sort, "Changed files were not returned correctly") end def test_getplugins Puppet[:filetimeout] = -1 Puppet[:pluginsource] = tempfile() Dir.mkdir(Puppet[:pluginsource]) Dir.mkdir(File.join(Puppet[:pluginsource], "testing")) $loaded = [] loader = Puppet::Util::Autoload.new(self, "testing") myplugin = File.join(Puppet[:pluginsource], "testing", "myplugin.rb") File.open(myplugin, "w") do |f| f.puts %{$loaded << :myplugin} end assert_nothing_raised("Could not get plugins") { Puppet::Network::Client.master.getplugins } destfile = File.join(Puppet[:plugindest], "testing", "myplugin.rb") assert(File.exists?(destfile), "Did not get plugin") assert(loader.load(:myplugin), "Did not load downloaded plugin") assert($loaded.include?(:myplugin), "Downloaded code was not evaluated") # Now modify the file and make sure the type is replaced File.open(myplugin, "w") do |f| f.puts %{$loaded << :changed} end assert_nothing_raised("Could not get plugin changes") { Puppet::Network::Client.master.getplugins } assert($loaded.include?(:changed), "Changed code was not evaluated") # Now try it again, to make sure we don't have any objects lying around assert_nothing_raised { Puppet::Network::Client.master.getplugins } end def test_getfacts Puppet[:filetimeout] = -1 Puppet[:factsource] = tempfile() Dir.mkdir(Puppet[:factsource]) hostname = Facter.value(:hostname) myfact = File.join(Puppet[:factsource], "myfact.rb") File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "yayness" } end } end assert_nothing_raised { Puppet::Network::Client.master.getfacts } destfile = File.join(Puppet[:factdest], "myfact.rb") assert(File.exists?(destfile), "Did not get fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") assert_equal("yayness", Facter.value(:myfact), "Did not get correct fact value") # Now modify the file and make sure the type is replaced File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "funtest" } end } end assert_nothing_raised { Puppet::Network::Client.master.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") # Now run it again and make sure the fact still loads assert_nothing_raised { Puppet::Network::Client.master.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") end # Make sure that setting environment by fact takes precedence to configuration def test_setenvironmentwithfact name = "environment" value = "test_environment" - Puppet[:filetimeout] = -1 - Puppet[:factsource] = tempfile() - Dir.mkdir(Puppet[:factsource]) - file = File.join(Puppet[:factsource], "#{name}.rb") - File.open(file, "w") do |f| - f.puts %{Facter.add("#{name}") do setcode { "#{value}" } end } - end - - Puppet::Network::Client.master.getfacts + Facter.stubs(:to_hash).returns(name => value) assert_equal(value, Puppet::Network::Client.master.facts[name]) end # Make sure we load all facts on startup. def test_loadfacts dirs = [tempfile(), tempfile()] count = 0 names = [] dirs.each do |dir| Dir.mkdir(dir) name = "fact%s" % count names << name file = File.join(dir, "%s.rb" % name) # Write out a plugin file File.open(file, "w") do |f| f.puts %{Facter.add("#{name}") do setcode { "#{name}" } end } end count += 1 end Puppet[:factpath] = dirs.join(":") names.each do |name| assert_nil(Facter.value(name), "Somehow retrieved invalid fact") end assert_nothing_raised { Puppet::Network::Client.master.loadfacts } names.each do |name| assert_equal(name, Facter.value(name), "Did not retrieve facts") end end if Process.uid == 0 # Testing #283. Make sure plugins et al are downloaded as the running user. def test_download_ownership dir = tstdir() dest = tstdir() file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "funtest" } user = nonrootuser() group = nonrootgroup() chowner = Puppet::Type.type(:file).create :path => dir, :owner => user.name, :group => group.name, :recurse => true assert_apply(chowner) chowner.remove assert_equal(user.uid, File.stat(file).uid) assert_equal(group.gid, File.stat(file).gid) assert_nothing_raised { Puppet::Network::Client.master.download(:dest => dest, :source => dir, :name => "testing" ) {} } destfile = File.join(dest, "file") assert(FileTest.exists?(destfile), "Did not create destfile") assert_equal(Process.uid, File.stat(destfile).uid) end end # Test retrieving all of the facts. def test_facts facts = nil assert_nothing_raised do facts = Puppet::Network::Client.master.facts end Facter.to_hash.each do |fact, value| assert_equal(facts[fact.downcase], value.to_s, "%s is not equal" % fact.inspect) end # Make sure the puppet version got added assert_equal(Puppet::PUPPETVERSION, facts["clientversion"], "client version did not get added") # And make sure the ruby version is in there assert_equal(RUBY_VERSION, facts["rubyversion"], "ruby version did not get added") end # #424 def test_caching_of_compile_time file = tempfile() manifest = tempfile() File.open(manifest, "w") { |f| f.puts "file { '#{file}': content => yay }" } Puppet::Node::Facts.indirection.stubs(:save) driver = mkmaster(:Manifest => manifest) driver.local = false master = mkclient(driver) # We have to make everything thinks it's remote, because there's no local caching info master.local = false assert(! master.fresh?(master.class.facts), "Considered fresh with no compile at all") assert_nothing_raised { master.run } assert(master.fresh?(master.class.facts), "not considered fresh after compile") # Now make sure the config time is cached assert(master.compile_time, "No stored config time") assert_equal(master.compile_time, Puppet::Util::Storage.cache(:configuration)[:compile_time], "times did not match") time = master.compile_time master.clear File.unlink(file) Puppet::Util::Storage.store # Now make a new master Puppet::Util::Storage.clear master = mkclient(driver) master.run assert_equal(time, master.compile_time, "time was not retrieved from cache") assert(FileTest.exists?(file), "file was not created on second run") end # #540 - make sure downloads aren't affected by noop def test_download_in_noop source = tempfile File.open(source, "w") { |f| f.puts "something" } dest = tempfile Puppet[:noop] = true assert_nothing_raised("Could not download in noop") do @master.download(:dest => dest, :source => source, :tag => "yay") end assert(FileTest.exists?(dest), "did not download in noop mode") assert(Puppet[:noop], "noop got disabled in run") end # #491 - make sure a missing config doesn't kill us def test_missing_localconfig master = mkclient master.local = false driver = master.send(:instance_variable_get, "@driver") driver.local = false Puppet::Node::Facts.indirection.stubs(:save) # Retrieve the configuration master.getconfig # Now the config is up to date, so get rid of the @objects var and # the cached config master.clear File.unlink(master.cachefile) assert_nothing_raised("Missing cache file threw error") do master.getconfig end assert(! @logs.detect { |l| l.message =~ /Could not load/}, "Tried to load cache when it is non-existent") end # #519 - cache the facts so that we notice if they change. def test_factchanges_cause_recompile $value = "one" Facter.add(:testfact) do setcode { $value } end assert_equal("one", Facter.value(:testfact), "fact was not set correctly") master = mkclient master.local = false driver = master.send(:instance_variable_get, "@driver") driver.local = false Puppet::Node::Facts.indirection.stubs(:save) assert_nothing_raised("Could not compile config") do master.getconfig end $value = "two" Facter.clear Facter.loadfacts Facter.add(:testfact) do setcode { $value } end facts = master.class.facts assert_equal("two", Facter.value(:testfact), "fact did not change") assert(master.send(:facts_changed?, facts), "master does not think facts changed") assert(! master.fresh?(facts), "master is considered fresh after facts changed") assert_nothing_raised("Could not recompile when facts changed") do master.getconfig end end def test_locking master = mkclient class << master def getconfig raise ArgumentError, "Just testing" end end master.run assert(! master.send(:lockfile).locked?, "Master is still locked after failure") end # Make sure we get a value for timeout def test_config_timeout master = Puppet::Network::Client.client(:master) time = Integer(Puppet[:configtimeout]) assert_equal(time, master.timeout, "Did not get default value for timeout") assert_equal(time, master.timeout, "Did not get default value for timeout on second run") # Reset it Puppet[:configtimeout] = "50" assert_equal(50, master.timeout, "Did not get changed default value for timeout") assert_equal(50, master.timeout, "Did not get changed default value for timeout on second run") # Now try an integer Puppet[:configtimeout] = 100 assert_equal(100, master.timeout, "Did not get changed integer default value for timeout") assert_equal(100, master.timeout, "Did not get changed integer default value for timeout on second run") end # #569 -- Make sure we can ignore dynamic facts. def test_dynamic_facts client = mkclient assert_equal(%w{memorysize memoryfree swapsize swapfree}, client.class.dynamic_facts, "Did not get correct defaults for dynamic facts") # Cache some values for comparison cached = {"one" => "yep", "two" => "nope"} Puppet::Util::Storage.cache(:configuration)[:facts] = cached assert(! client.send(:facts_changed?, cached), "Facts incorrectly considered to be changed") # Now add some values to the passed result and make sure we get a positive newfacts = cached.dup newfacts["changed"] = "something" assert(client.send(:facts_changed?, newfacts), "Did not catch changed fact") # Now add a dynamic fact and make sure it's ignored newfacts = cached.dup newfacts["memorysize"] = "something" assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") # And try it with both cached["memorysize"] = "something else" assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") # And finally, with only in the cache newfacts.delete("memorysize") assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") end def test_splay client = mkclient # Make sure we default to no splay client.expects(:sleep).never assert_nothing_raised("Failed to call splay") do client.send(:splay) end # Now set it to true and make sure we get the right value client = mkclient client.expects(:sleep) Puppet[:splay] = true assert_nothing_raised("Failed to call sleep when splay is true") do client.send(:splay) end time = Puppet::Util::Storage.cache(:configuration)[:splay_time] assert(time, "Splay time was not cached") # Now try it again client = mkclient client.expects(:sleep).with(time) assert_nothing_raised("Failed to call sleep when splay is true with a cached value") do client.send(:splay) end end def test_environment_is_added_to_facts facts = Puppet::Network::Client::Master.facts assert_equal(facts["environment"], Puppet[:environment], "Did not add environment to client facts") # Now set it to a real value Puppet[:environments] = "something,else" Puppet[:environment] = "something" facts = Puppet::Network::Client::Master.facts assert_equal(facts["environment"], Puppet[:environment], "Did not add environment to client facts") end # This is partially to fix #532, but also to save on memory. def test_remove_objects_after_every_run client = mkclient ftype = Puppet::Type.type(:file) file = ftype.create :title => "/what/ever", :ensure => :present config = Puppet::Node::Catalog.new config.add_resource(file) config.expects :apply client.catalog = config client.expects(:getconfig) client.run assert_nil(ftype[@createdfile], "file object was not removed from memory") end # #685 def test_http_failures_do_not_kill_puppetd client = mkclient client.meta_def(:getconfig) { raise "A failure" } assert_nothing_raised("Failure in getconfig threw an error") do client.run end end def test_invalid_catalogs_do_not_get_cached master = mkmaster :Code => "notify { one: require => File[yaytest] }" master.local = false # so it gets cached client = mkclient(master) client.stubs(:facts).returns({}) client.local = false Puppet::Node::Facts.indirection.stubs(:terminus_class).returns(:memory) # Make sure the config is not cached. client.expects(:cache).never client.getconfig # Doesn't throw an exception, but definitely fails. client.run end end diff --git a/test/rails/railsparameter.rb b/test/rails/railsparameter.rb index d83115b1a..7c99ac38d 100755 --- a/test/rails/railsparameter.rb +++ b/test/rails/railsparameter.rb @@ -1,72 +1,74 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../lib/puppettest' require 'puppet' require 'puppet/rails' require 'puppettest' require 'puppettest/railstesting' # Don't do any tests w/out this class if defined? ActiveRecord::Base class TestRailsParameter < Test::Unit::TestCase include PuppetTest::RailsTesting def params {"myname" => "myval", "multiple" => %w{one two three}} end # Create a resource param from a rails parameter def test_to_resourceparam railsinit # Now create a source parser = mkparser source = parser.newclass "myclass" host = Puppet::Rails::Host.new(:name => "myhost") + + host.save resource = host.resources.create( :title => "/tmp/to_resource", :restype => "file", :exported => true) # Use array and non-array values, to make sure we get things back in # the same form. params.each do |name, value| param = Puppet::Rails::ParamName.find_or_create_by_name(name) if value.is_a? Array values = value else values = [value] end valueobjects = values.collect do |v| resource.param_values.create(:value => v, :param_name => param) end assert(param, "Did not create rails parameter") # The id doesn't get assigned until we save end resource.save # And try to convert our parameter params.each do |name, value| param = Puppet::Rails::ParamName.find_by_name(name) pp = nil assert_nothing_raised do pp = param.to_resourceparam(resource, source) end assert_instance_of(Puppet::Parser::Resource::Param, pp) assert_equal(name.to_sym, pp.name, "parameter name was not equal") assert_equal(value, pp.value, "value was not equal for %s" % value.inspect) end end end else $stderr.puts "Install Rails for Rails and Caching tests" end diff --git a/test/rails/railsresource.rb b/test/rails/railsresource.rb index 58058472d..3df5001be 100755 --- a/test/rails/railsresource.rb +++ b/test/rails/railsresource.rb @@ -1,250 +1,251 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../lib/puppettest' require 'puppet' require 'puppet/rails' require 'puppettest' require 'puppettest/railstesting' require 'puppettest/resourcetesting' require 'puppettest/parsertesting' # Don't do any tests w/out this class if Puppet.features.rails? class TestRailsResource < Test::Unit::TestCase include PuppetTest::RailsTesting include PuppetTest::ResourceTesting include PuppetTest::ParserTesting def setup super railsinit end def teardown railsteardown super end def mktest_resource # We need a host for resources host = Puppet::Rails::Host.new(:name => "myhost") + host.save # Now build a resource resource = host.resources.create( :title => "/tmp/to_resource", :restype => "file", :exported => true) # Now add some params params.each do |param, value| pn = Puppet::Rails::ParamName.find_or_create_by_name(param) pv = resource.param_values.create(:value => value, :param_name => pn) end host.save return resource end def params {"owner" => "root", "mode" => "644"} end # Create a resource param from a rails parameter def test_to_resource resource = mktest_resource # We need a scope scope = mkscope # Find the new resource and include all it's parameters. resource = Puppet::Rails::Resource.find_by_id(resource.id) # Now, try to convert our resource to a real resource res = nil assert_nothing_raised do res = resource.to_resource(scope) end assert_instance_of(Puppet::Parser::Resource, res) assert_equal("root", res[:owner]) assert_equal("644", res[:mode]) assert_equal("/tmp/to_resource", res.title) assert_equal(scope.source, res.source) end def test_parameters resource = mktest_resource setparams = nil assert_nothing_raised do setparams = resource.parameters.inject({}) { |h, a| h[a[0]] = a[1][0] h } end assert_equal(params, setparams, "Did not get the right answer from #parameters") end # Make sure we can retrieve individual parameters by name. def test_parameter resource = mktest_resource params.each do |p,v| assert_equal(v, resource.parameter(p), "%s is not correct" % p) end end end else $stderr.puts "Install Rails for Rails and Caching tests" end # A separate class for testing rails integration class TestExportedResources < PuppetTest::TestCase include PuppetTest include PuppetTest::ParserTesting include PuppetTest::ResourceTesting include PuppetTest::RailsTesting Parser = Puppet::Parser AST = Parser::AST Reference = Puppet::Parser::Resource::Reference def setup super Puppet[:trace] = false @scope = mkscope end confine "Missing rails support" => Puppet.features.rails? # Compare a parser resource to a rails resource. def compare_resources(host, res, updating, options = {}) newobj = nil # If the resource is in the db, then use modify_rails, else use to_rails if newobj = Puppet::Rails::Resource.find_by_restype_and_title(res.type, res.title) assert_nothing_raised("Call to modify_rails failed") do res.modify_rails(newobj) end else assert_nothing_raised("Call to to_rails failed") do newobj = res.to_rails(host) end end assert_instance_of(Puppet::Rails::Resource, newobj) newobj.save if updating tail = "on update" else tail = "" end # Make sure we find our object and only our object count = 0 obj = nil Puppet::Rails::Resource.find(:all).each do |obj| assert_equal(newobj.id, obj.id, "A new resource was created instead of modifying an existing resource") count += 1 [:title, :restype, :line, :exported].each do |param| if param == :restype method = :type else method = param end assert_equal(res.send(method), obj[param], "attribute %s was not set correctly in rails %s" % [param, tail]) end end assert_equal(1, count, "Got too many resources %s" % tail) # Now make sure we can find it again assert_nothing_raised do obj = Puppet::Rails::Resource.find_by_restype_and_title( res.type, res.title, :include => :param_names ) end assert_instance_of(Puppet::Rails::Resource, obj) # Make sure we get the parameters back params = options[:params] || [obj.param_names.collect { |p| p.name }, res.to_hash.keys].flatten.collect { |n| n.to_s }.uniq params.each do |name| param = obj.param_names.find_by_name(name) if res[name] assert(param, "resource did not keep %s %s" % [name, tail]) else assert(! param, "resource did not delete %s %s" % [name, tail]) end if param values = param.param_values.collect { |pv| pv.value } should = res[param.name] should = [should] unless should.is_a?(Array) assert_equal(should, values, "%s was different %s" % [param.name, tail]) end end return obj end def test_to_rails railsteardown railsinit ref1 = Reference.new :type => "exec", :title => "one" ref2 = Reference.new :type => "exec", :title => "two" res = mkresource :type => "file", :title => "/tmp/testing", :source => @source, :scope => @scope, :params => {:owner => "root", :source => ["/tmp/A", "/tmp/B"], :mode => "755", :require => [ref1, ref2], :subscribe => ref1} res.line = 50 # We also need a Rails Host to store under host = Puppet::Rails::Host.new(:name => Facter.hostname) railsres = compare_resources(host, res, false, :params => %w{owner source mode}) # Now make sure our parameters did not change assert_instance_of(Array, res[:require], "Parameter array changed") res[:require].each do |ref| assert_instance_of(Reference, ref, "Resource reference changed") end assert_instance_of(Reference, res[:subscribe], "Resource reference changed") # And make sure that the rails resource actually has resource references params = railsres.parameters [params["subscribe"], params["require"]].flatten.each do |ref| assert_instance_of(Reference, ref, "Resource reference is no longer a reference") end # Now make some changes to our resource. We're removing the mode, # changing the source, and adding 'check'. res = mkresource :type => "file", :title => "/tmp/testing", :source => @source, :scope => @scope, :params => {:owner => "bin", :source => ["/tmp/A", "/tmp/C"], :check => "checksum", :require => [ref1, ref2], :subscribe => ref2} res.line = 75 res.exported = true railsres = compare_resources(host, res, true, :params => %w{owner source mode check}) # Again make sure our parameters did not change assert_instance_of(Array, res[:require], "Parameter array changed") res[:require].each do |ref| assert_instance_of(Reference, ref, "Resource reference changed") end # Again with the serialization checks params = railsres.parameters [params["subscribe"], params["require"]].flatten.each do |ref| assert_instance_of(Reference, ref, "Resource reference is no longer a reference") end end end diff --git a/test/ral/type/file.rb b/test/ral/type/file.rb index cbbe818ae..5cf989aa3 100755 --- a/test/ral/type/file.rb +++ b/test/ral/type/file.rb @@ -1,1782 +1,1776 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../lib/puppettest' require 'puppettest' require 'puppettest/support/utils' require 'fileutils' class TestFile < Test::Unit::TestCase include PuppetTest::Support::Utils include PuppetTest::FileTesting def mkfile(hash) file = nil assert_nothing_raised { file = Puppet.type(:file).create(hash) } return file end def mktestfile tmpfile = tempfile() File.open(tmpfile, "w") { |f| f.puts rand(100) } @@tmpfiles.push tmpfile mkfile(:name => tmpfile) end def setup super @file = Puppet::Type.type(:file) $method = @method_name Puppet[:filetimeout] = -1 end def teardown Puppet::Util::Storage.clear system("rm -rf %s" % Puppet[:statefile]) super end def initstorage Puppet::Util::Storage.init Puppet::Util::Storage.load end def clearstorage Puppet::Util::Storage.store Puppet::Util::Storage.clear end def test_owner file = mktestfile() users = {} count = 0 # collect five users Etc.passwd { |passwd| if count > 5 break else count += 1 end users[passwd.uid] = passwd.name } fake = {} # find a fake user while true a = rand(1000) begin Etc.getpwuid(a) rescue fake[a] = "fakeuser" break end end uid, name = users.shift us = {} us[uid] = name users.each { |uid, name| assert_apply(file) assert_nothing_raised() { file[:owner] = name } assert_nothing_raised() { file.retrieve } assert_apply(file) } end def test_group file = mktestfile() [%x{groups}.chomp.split(/ /), Process.groups].flatten.each { |group| assert_nothing_raised() { file[:group] = group } assert(file.property(:group)) assert(file.property(:group).should) } end def test_groups_fails_when_invalid assert_raise(Puppet::Error, "did not fail when the group was empty") do Puppet::Type.type(:file).create :path => "/some/file", :group => "" end end if Puppet::Util::SUIDManager.uid == 0 def test_createasuser dir = tmpdir() user = nonrootuser() path = File.join(tmpdir, "createusertesting") @@tmpfiles << path file = nil assert_nothing_raised { file = Puppet.type(:file).create( :path => path, :owner => user.name, :ensure => "file", :mode => "755" ) } comp = mk_catalog("createusertest", file) assert_events([:file_created], comp) end def test_nofollowlinks basedir = tempfile() Dir.mkdir(basedir) file = File.join(basedir, "file") link = File.join(basedir, "link") File.open(file, "w", 0644) { |f| f.puts "yayness"; f.flush } File.symlink(file, link) # First test 'user' user = nonrootuser() inituser = File.lstat(link).uid File.lchown(inituser, nil, link) obj = nil assert_nothing_raised { obj = Puppet.type(:file).create( :title => link, :owner => user.name ) } obj.retrieve # Make sure it defaults to managing the link assert_events([:file_changed], obj) assert_equal(user.uid, File.lstat(link).uid) assert_equal(inituser, File.stat(file).uid) File.chown(inituser, nil, file) File.lchown(inituser, nil, link) # Try following obj[:links] = :follow assert_events([:file_changed], obj) assert_equal(user.uid, File.stat(file).uid) assert_equal(inituser, File.lstat(link).uid) # And then explicitly managing File.chown(inituser, nil, file) File.lchown(inituser, nil, link) obj[:links] = :manage assert_events([:file_changed], obj) assert_equal(user.uid, File.lstat(link).uid) assert_equal(inituser, File.stat(file).uid) obj.delete(:owner) obj[:links] = :follow # And then test 'group' group = nonrootgroup initgroup = File.stat(file).gid obj[:group] = group.name - assert_events([:file_changed], obj) - assert_equal(initgroup, File.stat(file).gid) - assert_equal(group.gid, File.lstat(link).gid) - File.chown(nil, initgroup, file) - File.lchown(nil, initgroup, link) - obj[:links] = :follow assert_events([:file_changed], obj) assert_equal(group.gid, File.stat(file).gid) File.chown(nil, initgroup, file) File.lchown(nil, initgroup, link) obj[:links] = :manage assert_events([:file_changed], obj) assert_equal(group.gid, File.lstat(link).gid) assert_equal(initgroup, File.stat(file).gid) end def test_ownerasroot file = mktestfile() users = {} count = 0 # collect five users Etc.passwd { |passwd| if count > 5 break else count += 1 end next if passwd.uid < 0 users[passwd.uid] = passwd.name } fake = {} # find a fake user while true a = rand(1000) begin Etc.getpwuid(a) rescue fake[a] = "fakeuser" break end end users.each { |uid, name| assert_nothing_raised() { file[:owner] = name } changes = [] assert_nothing_raised() { changes << file.evaluate } assert(changes.length > 0) assert_apply(file) currentvalue = file.retrieve assert(file.insync?(currentvalue)) assert_nothing_raised() { file[:owner] = uid } assert_apply(file) currentvalue = file.retrieve # make sure changing to number doesn't cause a sync assert(file.insync?(currentvalue)) } # We no longer raise an error here, because we check at run time #fake.each { |uid, name| # assert_raise(Puppet::Error) { # file[:owner] = name # } # assert_raise(Puppet::Error) { # file[:owner] = uid # } #} end def test_groupasroot file = mktestfile() [%x{groups}.chomp.split(/ /), Process.groups].flatten.each { |group| next unless Puppet::Util.gid(group) # grr. assert_nothing_raised() { file[:group] = group } assert(file.property(:group)) assert(file.property(:group).should) assert_apply(file) currentvalue = file.retrieve assert(file.insync?(currentvalue)) assert_nothing_raised() { file.delete(:group) } } end if Facter.value(:operatingsystem) == "Darwin" def test_sillyowner file = tempfile() File.open(file, "w") { |f| f.puts "" } File.chown(-2, nil, file) assert(File.stat(file).uid > 120000, "eh?") user = nonrootuser obj = Puppet::Type.newfile( :path => file, :owner => user.name ) assert_apply(obj) assert_equal(user.uid, File.stat(file).uid) end end else $stderr.puts "Run as root for complete owner and group testing" end def test_create %w{a b c d}.collect { |name| tempfile() + name.to_s }.each { |path| file =nil assert_nothing_raised() { file = Puppet.type(:file).create( :name => path, :ensure => "file" ) } assert_events([:file_created], file) assert_events([], file) assert(FileTest.file?(path), "File does not exist") assert(file.insync?(file.retrieve)) @@tmpfiles.push path } end def test_create_dir basedir = tempfile() Dir.mkdir(basedir) %w{a b c d}.collect { |name| "#{basedir}/%s" % name }.each { |path| file = nil assert_nothing_raised() { file = Puppet.type(:file).create( :name => path, :ensure => "directory" ) } assert(! FileTest.directory?(path), "Directory %s already exists" % [path]) assert_events([:directory_created], file) assert_events([], file) assert(file.insync?(file.retrieve)) assert(FileTest.directory?(path)) @@tmpfiles.push path } end def test_modes file = mktestfile # Set it to something else initially File.chmod(0775, file.title) [0644,0755,0777,0641].each { |mode| assert_nothing_raised() { file[:mode] = mode } assert_events([:file_changed], file) assert_events([], file) assert(file.insync?(file.retrieve)) assert_nothing_raised() { file.delete(:mode) } } end def test_checksums types = %w{md5 md5lite timestamp time} exists = "/tmp/sumtest-exists" nonexists = "/tmp/sumtest-nonexists" @@tmpfiles << exists @@tmpfiles << nonexists # try it both with files that exist and ones that don't files = [exists, nonexists] initstorage File.open(exists,File::CREAT|File::TRUNC|File::WRONLY) { |of| of.puts "initial text" } types.each { |type| files.each { |path| if Puppet[:debug] Puppet.warning "Testing %s on %s" % [type,path] end file = nil events = nil # okay, we now know that we have a file... assert_nothing_raised() { file = Puppet.type(:file).create( :name => path, :ensure => "file", :checksum => type ) } trans = nil currentvalues = file.retrieve if file.title !~ /nonexists/ sum = file.property(:checksum) assert(sum.insync?(currentvalues[sum]), "file is not in sync") end events = assert_apply(file) assert(events) assert(! events.include?(:file_changed), "File incorrectly changed") assert_events([], file) # We have to sleep because the time resolution of the time-based # mechanisms is greater than one second sleep 1 if type =~ /time/ assert_nothing_raised() { File.open(path,File::CREAT|File::TRUNC|File::WRONLY) { |of| of.puts "some more text, yo" } } Puppet.type(:file).clear # now recreate the file assert_nothing_raised() { file = Puppet.type(:file).create( :name => path, :checksum => type ) } trans = nil assert_events([:file_changed], file) # Run it a few times to make sure we aren't getting # spurious changes. sum = nil assert_nothing_raised do sum = file.property(:checksum).retrieve end assert(file.property(:checksum).insync?(sum), "checksum is not in sync") sleep 1.1 if type =~ /time/ assert_nothing_raised() { File.unlink(path) File.open(path,File::CREAT|File::TRUNC|File::WRONLY) { |of| # We have to put a certain amount of text in here or # the md5-lite test fails 2.times { of.puts rand(100) } of.flush } } assert_events([:file_changed], file) # verify that we're actually getting notified when a file changes assert_nothing_raised() { Puppet.type(:file).clear } if path =~ /nonexists/ File.unlink(path) end } } end def cyclefile(path) # i had problems with using :name instead of :path [:name,:path].each { |param| file = nil changes = nil comp = nil trans = nil initstorage assert_nothing_raised { file = Puppet.type(:file).create( param => path, :recurse => true, :checksum => "md5" ) } comp = Puppet.type(:component).create( :name => "component" ) comp.push file assert_nothing_raised { trans = comp.evaluate } assert_nothing_raised { trans.evaluate } clearstorage Puppet::Type.allclear } end def test_localrecurse # Create a test directory path = tempfile() dir = @file.create :path => path, :mode => 0755, :recurse => true config = mk_catalog(dir) Dir.mkdir(path) # Make sure we return nothing when there are no children ret = nil assert_nothing_raised() { ret = dir.localrecurse(true) } assert_equal([], ret, "empty dir returned children") # Now make a file and make sure we get it test = File.join(path, "file") File.open(test, "w") { |f| f.puts "yay" } assert_nothing_raised() { ret = dir.localrecurse(true) } fileobj = @file[test] assert(fileobj, "child object was not created") assert_equal([fileobj], ret, "child object was not returned") # And that it inherited our recurse setting assert_equal(true, fileobj[:recurse], "file did not inherit recurse") # Make sure it's not returned again assert_nothing_raised() { ret = dir.localrecurse(true) } assert_equal([], ret, "child object was returned twice") # Now just for completion, make sure we will return many files files = [] 10.times do |i| f = File.join(path, i.to_s) files << f File.open(f, "w") do |o| o.puts "" end end assert_nothing_raised() { ret = dir.localrecurse(true) } assert_equal(files.sort, ret.collect { |f| f.title }.sort, "child object was returned twice") # Clean everything up and start over files << test files.each do |f| File.unlink(f) end # Now make sure we correctly ignore things dir[:ignore] = "*.out" bad = File.join(path, "test.out") good = File.join(path, "yayness") [good, bad].each do |f| File.open(f, "w") { |o| o.puts "" } end assert_nothing_raised() { ret = dir.localrecurse(true) } assert_equal([good], ret.collect { |f| f.title }, "ignore failed") # Now make sure purging works dir[:purge] = true dir[:ignore] = "svn" assert_nothing_raised() { ret = dir.localrecurse(true) } assert_equal([bad], ret.collect { |f| f.title }, "purge failed") badobj = @file[bad] assert(badobj, "did not create bad object") end def test_recurse basedir = tempfile() FileUtils.mkdir_p(basedir) # Create our file dir = nil assert_nothing_raised { dir = Puppet.type(:file).create( :path => basedir, :check => %w{owner mode group} ) } return_nil = false # and monkey-patch it [:localrecurse, :linkrecurse].each do |m| dir.meta_def(m) do |recurse| if return_nil # for testing nil return, of course return nil else return [recurse] end end end # We have to special-case this, because it returns a list of # found files. dir.meta_def(:sourcerecurse) do |recurse| if return_nil # for testing nil return, of course return nil else return [recurse], [] end end # First try it with recurse set to false dir[:recurse] = false assert_nothing_raised do assert_nil(dir.recurse) end # Now try it with the different valid positive values [true, "true", "inf", 50].each do |value| assert_nothing_raised { dir[:recurse] = value} # Now make sure the methods are called appropriately ret = nil assert_nothing_raised do ret = dir.recurse end # We should only call the localrecurse method, so make sure # that's the case if value == 50 # Make sure our counter got decremented assert_equal([49], ret, "did not call localrecurse") else assert_equal([true], ret, "did not call localrecurse") end end # Make sure it doesn't recurse when we've set recurse to false [false, "false"].each do |value| assert_nothing_raised { dir[:recurse] = value } ret = nil assert_nothing_raised() { ret = dir.recurse } assert_nil(ret) end dir[:recurse] = true # Now add a target, so we do the linking thing dir[:target] = tempfile() ret = nil assert_nothing_raised { ret = dir.recurse } assert_equal([true, true], ret, "did not call linkrecurse") # And add a source, and make sure we call that dir[:source] = tempfile() assert_nothing_raised { ret = dir.recurse } assert_equal([true, true, true], ret, "did not call linkrecurse") # Lastly, make sure we correctly handle returning nil return_nil = true assert_nothing_raised { ret = dir.recurse } end def test_recurse? file = Puppet::Type.type(:file).create :path => tempfile # Make sure we default to false assert(! file.recurse?, "Recurse defaulted to true") [true, "true", 10, "inf"].each do |value| file[:recurse] = value assert(file.recurse?, "%s did not cause recursion" % value) end [false, "false", 0].each do |value| file[:recurse] = value assert(! file.recurse?, "%s caused recursion" % value) end end def test_recursion basedir = tempfile() subdir = File.join(basedir, "subdir") tmpfile = File.join(basedir,"testing") FileUtils.mkdir_p(subdir) dir = nil [true, "true", "inf", 50].each do |value| assert_nothing_raised { dir = Puppet.type(:file).create( :path => basedir, :recurse => value, :check => %w{owner mode group} ) } config = mk_catalog dir children = nil assert_nothing_raised { children = dir.eval_generate } assert_equal([subdir], children.collect {|c| c.title }, "Incorrect generated children") # Remove our subdir resource, subdir_resource = config.resource(:file, subdir) config.remove_resource(subdir_resource) # Create the test file File.open(tmpfile, "w") { |f| f.puts "yayness" } assert_nothing_raised { children = dir.eval_generate } # And make sure we get both resources back. assert_equal([subdir, tmpfile].sort, children.collect {|c| c.title }.sort, "Incorrect generated children when recurse == %s" % value.inspect) File.unlink(tmpfile) Puppet.type(:file).clear end end def test_filetype_retrieval file = nil # Verify it retrieves files of type directory assert_nothing_raised { file = Puppet.type(:file).create( :name => tmpdir(), :check => :type ) } assert_nothing_raised { file.evaluate } assert_equal("directory", file.property(:type).retrieve) # And then check files assert_nothing_raised { file = Puppet.type(:file).create( :name => tempfile(), :ensure => "file" ) } assert_apply(file) file[:check] = "type" assert_apply(file) assert_equal("file", file.property(:type).retrieve) file[:type] = "directory" currentvalues = {} assert_nothing_raised { currentvalues = file.retrieve } # The 'retrieve' method sets @should to @is, so they're never # out of sync. It's a read-only class. assert(file.insync?(currentvalues)) end def test_remove basedir = tempfile() subdir = File.join(basedir, "this") FileUtils.mkdir_p(subdir) dir = nil assert_nothing_raised { dir = Puppet.type(:file).create( :path => basedir, :recurse => true, :check => %w{owner mode group} ) } mk_catalog dir assert_nothing_raised { dir.eval_generate } obj = nil assert_nothing_raised { obj = Puppet.type(:file)[subdir] } assert(obj, "Could not retrieve subdir object") assert_nothing_raised { obj.remove(true) } assert_nothing_raised { obj = Puppet.type(:file)[subdir] } assert_nil(obj, "Retrieved removed object") end def test_path dir = tempfile() path = File.join(dir, "subdir") assert_nothing_raised("Could not make file") { FileUtils.mkdir_p(File.dirname(path)) File.open(path, "w") { |f| f.puts "yayness" } } file = nil dirobj = nil assert_nothing_raised("Could not make file object") { dirobj = Puppet.type(:file).create( :path => dir, :recurse => true, :check => %w{mode owner group} ) } mk_catalog dirobj assert_nothing_raised { dirobj.eval_generate } assert_nothing_raised { file = dirobj.class[path] } assert(file, "Could not retrieve file object") assert_equal("/%s" % file.ref, file.path) end def test_autorequire basedir = tempfile() subfile = File.join(basedir, "subfile") baseobj = Puppet.type(:file).create( :name => basedir, :ensure => "directory" ) subobj = Puppet.type(:file).create( :name => subfile, :ensure => "file" ) edge = nil assert_nothing_raised do edge = subobj.autorequire.shift end assert_equal(baseobj, edge.source, "file did not require its parent dir") assert_equal(subobj, edge.target, "file did not require its parent dir") end def test_content file = tempfile() str = "This is some content" obj = nil assert_nothing_raised { obj = Puppet.type(:file).create( :name => file, :content => str ) } assert(!obj.insync?(obj.retrieve), "Object is incorrectly in sync") assert_events([:file_created], obj) currentvalues = obj.retrieve assert(obj.insync?(currentvalues), "Object is not in sync") text = File.read(file) assert_equal(str, text, "Content did not copy correctly") newstr = "Another string, yo" obj[:content] = newstr assert(!obj.insync?(obj.retrieve), "Object is incorrectly in sync") assert_events([:file_changed], obj) text = File.read(file) assert_equal(newstr, text, "Content did not copy correctly") currentvalues = obj.retrieve assert(obj.insync?(currentvalues), "Object is not in sync") end # Unfortunately, I know this fails def disabled_test_recursivemkdir path = tempfile() subpath = File.join(path, "this", "is", "a", "dir") file = nil assert_nothing_raised { file = Puppet.type(:file).create( :name => subpath, :ensure => "directory", :recurse => true ) } comp = mk_catalog("yay", file) comp.finalize assert_apply(comp) #assert_events([:directory_created], comp) assert(FileTest.directory?(subpath), "Did not create directory") end # Make sure that content updates the checksum on the same run def test_checksumchange_for_content dest = tempfile() File.open(dest, "w") { |f| f.puts "yayness" } file = nil assert_nothing_raised { file = Puppet.type(:file).create( :name => dest, :checksum => "md5", :content => "This is some content" ) } file.retrieve assert_events([:file_changed], file) file.retrieve assert_events([], file) end # Make sure that content updates the checksum on the same run def test_checksumchange_for_ensure dest = tempfile() file = nil assert_nothing_raised { file = Puppet.type(:file).create( :name => dest, :checksum => "md5", :ensure => "file" ) } file.retrieve assert_events([:file_created], file) file.retrieve assert_events([], file) end # Make sure that content gets used before ensure def test_contentbeatsensure dest = tempfile() file = nil assert_nothing_raised { file = Puppet.type(:file).create( :name => dest, :ensure => "file", :content => "this is some content, yo" ) } currentvalues = file.retrieve assert_events([:file_created], file) file.retrieve assert_events([], file) assert_events([], file) end # Make sure that content gets used before ensure def test_deletion_beats_source dest = tempfile() source = tempfile() File.open(source, "w") { |f| f.puts "yay" } file = nil assert_nothing_raised { file = Puppet.type(:file).create( :name => dest, :ensure => :absent, :source => source ) } file.retrieve assert_events([], file) assert(! FileTest.exists?(dest), "file was copied during deletion") # Now create the dest, and make sure it gets deleted File.open(dest, "w") { |f| f.puts "boo" } assert_events([:file_removed], file) assert(! FileTest.exists?(dest), "file was not deleted during deletion") end def test_nameandpath path = tempfile() file = nil assert_nothing_raised { file = Puppet.type(:file).create( :title => "fileness", :path => path, :content => "this is some content" ) } assert_apply(file) assert(FileTest.exists?(path)) end # Make sure that a missing group isn't fatal at object instantiation time. def test_missinggroup file = nil assert_nothing_raised { file = Puppet.type(:file).create( :path => tempfile(), :group => "fakegroup" ) } assert(file.property(:group), "Group property failed") end def test_modecreation path = tempfile() file = Puppet.type(:file).create( :path => path, :ensure => "file", :mode => "0777" ) assert_equal(0777, file.should(:mode), "Mode did not get set correctly") assert_apply(file) assert_equal(0777, File.stat(path).mode & 007777, "file mode is incorrect") File.unlink(path) file[:ensure] = "directory" assert_apply(file) assert_equal(0777, File.stat(path).mode & 007777, "directory mode is incorrect") end # If both 'ensure' and 'content' are used, make sure that all of the other # properties are handled correctly. def test_contentwithmode path = tempfile() file = nil assert_nothing_raised { file = Puppet.type(:file).create( :path => path, :ensure => "file", :content => "some text\n", :mode => 0755 ) } assert_apply(file) assert_equal("%o" % 0755, "%o" % (File.stat(path).mode & 007777)) end def test_backupmodes File.umask(0022) file = tempfile() newfile = tempfile() File.open(file, "w", 0411) { |f| f.puts "yayness" } obj = nil assert_nothing_raised { obj = Puppet::Type.type(:file).create( :path => file, :content => "rahness\n", :backup => ".puppet-bak" ) } assert_apply(obj) backupfile = file + obj[:backup] @@tmpfiles << backupfile assert(FileTest.exists?(backupfile), "Backup file %s does not exist" % backupfile) assert_equal(0411, filemode(backupfile), "File mode is wrong for backupfile") bucket = "bucket" bpath = tempfile() Dir.mkdir(bpath) Puppet::Type.type(:filebucket).create( :title => bucket, :path => bpath ) obj[:backup] = bucket obj[:content] = "New content" assert_apply(obj) md5 = "18cc17fa3047fcc691fdf49c0a7f539a" dir, file, pathfile = Puppet::Network::Handler.filebucket.paths(bpath, md5) assert_equal(0440, filemode(file)) end def test_largefilechanges source = tempfile() dest = tempfile() # Now make a large file File.open(source, "w") { |f| 500.times { |i| f.puts "line %s" % i } } obj = Puppet::Type.type(:file).create( :title => dest, :source => source ) assert_events([:file_created], obj) File.open(source, File::APPEND|File::WRONLY) { |f| f.puts "another line" } assert_events([:file_changed], obj) # Now modify the dest file File.open(dest, File::APPEND|File::WRONLY) { |f| f.puts "one more line" } assert_events([:file_changed, :file_changed], obj) end def test_replacefilewithlink path = tempfile() link = tempfile() File.open(path, "w") { |f| f.puts "yay" } File.open(link, "w") { |f| f.puts "a file" } file = nil assert_nothing_raised { file = Puppet.type(:file).create( :ensure => path, :path => link ) } assert_events([:link_created], file) assert(FileTest.symlink?(link), "Link was not created") assert_equal(path, File.readlink(link), "Link was created incorrectly") end def test_file_with_spaces dir = tempfile() Dir.mkdir(dir) source = File.join(dir, "file spaces") dest = File.join(dir, "another space") File.open(source, "w") { |f| f.puts :yay } obj = Puppet::Type.type(:file).create( :path => dest, :source => source ) assert(obj, "Did not create file") assert_apply(obj) assert(FileTest.exists?(dest), "File did not get created") end def test_present_matches_anything path = tempfile() file = Puppet::Type.newfile(:path => path, :ensure => :present) currentvalues = file.retrieve assert(! file.insync?(currentvalues), "File incorrectly in sync") # Now make a file File.open(path, "w") { |f| f.puts "yay" } currentvalues = file.retrieve assert(file.insync?(currentvalues), "File not in sync") # Now make a directory File.unlink(path) Dir.mkdir(path) currentvalues = file.retrieve assert(file.insync?(currentvalues), "Directory not considered 'present'") Dir.rmdir(path) # Now make a link file[:links] = :manage otherfile = tempfile() File.symlink(otherfile, path) currentvalues = file.retrieve assert(file.insync?(currentvalues), "Symlink not considered 'present'") File.unlink(path) # Now set some content, and make sure it works file[:content] = "yayness" assert_apply(file) assert_equal("yayness", File.read(path), "Content did not get set correctly") end # Make sure unmanaged files are purged. def test_purge sourcedir = tempfile() destdir = tempfile() Dir.mkdir(sourcedir) Dir.mkdir(destdir) sourcefile = File.join(sourcedir, "sourcefile") dsourcefile = File.join(destdir, "sourcefile") localfile = File.join(destdir, "localfile") purgee = File.join(destdir, "to_be_purged") File.open(sourcefile, "w") { |f| f.puts "funtest" } # this file should get removed File.open(purgee, "w") { |f| f.puts "footest" } lfobj = Puppet::Type.newfile( :title => "localfile", :path => localfile, :content => "rahtest", :ensure => :file, :backup => false ) destobj = Puppet::Type.newfile(:title => "destdir", :path => destdir, :source => sourcedir, :backup => false, :recurse => true) config = mk_catalog(lfobj, destobj) config.apply assert(FileTest.exists?(dsourcefile), "File did not get copied") assert(FileTest.exists?(localfile), "Local file did not get created") assert(FileTest.exists?(purgee), "Purge target got prematurely purged") assert_nothing_raised { destobj[:purge] = true } config.apply assert(FileTest.exists?(localfile), "Local file got purged") assert(FileTest.exists?(dsourcefile), "Source file got purged") assert(! FileTest.exists?(purgee), "File did not get purged") end # Testing #274. Make sure target can be used without 'ensure'. def test_target_without_ensure source = tempfile() dest = tempfile() File.open(source, "w") { |f| f.puts "funtest" } obj = nil assert_nothing_raised { obj = Puppet::Type.newfile(:path => dest, :target => source) } assert_apply(obj) end def test_autorequire_owner_and_group file = tempfile() comp = nil user = nil group =nil home = nil ogroup = nil assert_nothing_raised { user = Puppet.type(:user).create( :name => "pptestu", :home => file, :gid => "pptestg" ) home = Puppet.type(:file).create( :path => file, :owner => "pptestu", :group => "pptestg", :ensure => "directory" ) group = Puppet.type(:group).create( :name => "pptestg" ) comp = mk_catalog(user, group, home) } # Now make sure we get a relationship for each of these rels = nil assert_nothing_raised { rels = home.autorequire } assert(rels.detect { |e| e.source == user }, "owner was not autorequired") assert(rels.detect { |e| e.source == group }, "group was not autorequired") end # Testing #309 -- //my/file => /my/file def test_slash_deduplication ["/my/////file/for//testing", "//my/file/for/testing///", "/my/file/for/testing"].each do |path| file = nil assert_nothing_raised do file = Puppet::Type.newfile(:path => path) end assert_equal("/my/file/for/testing", file.title) assert_equal(file, Puppet::Type.type(:file)["/my/file/for/testing"]) Puppet::Type.type(:file).clear end end # Testing #304 def test_links_to_directories link = tempfile() file = tempfile() dir = tempfile() Dir.mkdir(dir) bucket = Puppet::Type.newfilebucket :name => "main" File.symlink(dir, link) File.open(file, "w") { |f| f.puts "" } assert_equal(dir, File.readlink(link)) obj = Puppet::Type.newfile :path => link, :ensure => :link, :target => file, :recurse => false, :backup => "main" assert_apply(obj) assert_equal(file, File.readlink(link)) end # Testing #303 def test_nobackups_with_links link = tempfile() new = tempfile() File.open(link, "w") { |f| f.puts "old" } File.open(new, "w") { |f| f.puts "new" } obj = Puppet::Type.newfile :path => link, :ensure => :link, :target => new, :recurse => true, :backup => false assert_nothing_raised do obj.handlebackup end bfile = [link, "puppet-bak"].join(".") assert(! FileTest.exists?(bfile), "Backed up when told not to") assert_apply(obj) assert(! FileTest.exists?(bfile), "Backed up when told not to") end # Make sure we consistently handle backups for all cases. def test_ensure_with_backups # We've got three file types, so make sure we can replace any type # with the other type and that backups are done correctly. types = [:file, :directory, :link] dir = tempfile() path = File.join(dir, "test") linkdest = tempfile() creators = { :file => proc { File.open(path, "w") { |f| f.puts "initial" } }, :directory => proc { Dir.mkdir(path) }, :link => proc { File.symlink(linkdest, path) } } bucket = Puppet::Type.newfilebucket :name => "main", :path => tempfile() obj = Puppet::Type.newfile :path => path, :force => true, :links => :manage Puppet[:trace] = true ["main", false].each do |backup| obj[:backup] = backup obj.finish types.each do |should| types.each do |is| # It makes no sense to replace a directory with a directory # next if should == :directory and is == :directory Dir.mkdir(dir) # Make the thing creators[is].call obj[:ensure] = should if should == :link obj[:target] = linkdest else if obj.property(:target) obj.delete(:target) end end # First try just removing the initial data assert_nothing_raised do obj.remove_existing(should) end unless is == should # Make sure the original is gone assert(! FileTest.exists?(obj[:path]), "remove_existing did not work: " + "did not remove %s with %s" % [is, should]) end FileUtils.rmtree(obj[:path]) # Now make it again creators[is].call property = obj.property(:ensure) currentvalue = property.retrieve unless property.insync?(currentvalue) assert_nothing_raised do property.sync end end FileUtils.rmtree(dir) end end end end if Process.uid == 0 # Testing #364. def test_writing_in_directories_with_no_write_access # Make a directory that our user does not have access to dir = tempfile() Dir.mkdir(dir) # Get a fake user user = nonrootuser # and group group = nonrootgroup # First try putting a file in there path = File.join(dir, "file") file = Puppet::Type.newfile :path => path, :owner => user.name, :group => group.name, :content => "testing" # Make sure we can create it assert_apply(file) assert(FileTest.exists?(path), "File did not get created") # And that it's owned correctly assert_equal(user.uid, File.stat(path).uid, "File has the wrong owner") assert_equal(group.gid, File.stat(path).gid, "File has the wrong group") assert_equal("testing", File.read(path), "file has the wrong content") # Now make a dir subpath = File.join(dir, "subdir") subdir = Puppet::Type.newfile :path => subpath, :owner => user.name, :group => group.name, :ensure => :directory # Make sure we can create it assert_apply(subdir) assert(FileTest.directory?(subpath), "File did not get created") # And that it's owned correctly assert_equal(user.uid, File.stat(subpath).uid, "File has the wrong owner") assert_equal(group.gid, File.stat(subpath).gid, "File has the wrong group") assert_equal("testing", File.read(path), "file has the wrong content") end end # #366 def test_replace_aliases file = Puppet::Type.newfile :path => tempfile() file[:replace] = :yes assert_equal(:true, file[:replace], ":replace did not alias :true to :yes") file[:replace] = :no assert_equal(:false, file[:replace], ":replace did not alias :false to :no") end # #365 -- make sure generated files also use filebuckets. def test_recursive_filebuckets source = tempfile() dest = tempfile() s1 = File.join(source, "1") sdir = File.join(source, "dir") s2 = File.join(sdir, "2") Dir.mkdir(source) Dir.mkdir(sdir) [s1, s2].each { |file| File.open(file, "w") { |f| f.puts "yay: %s" % File.basename(file) } } sums = {} [s1, s2].each do |f| sums[File.basename(f)] = Digest::MD5.hexdigest(File.read(f)) end dfiles = [File.join(dest, "1"), File.join(dest, "dir", "2")] bpath = tempfile bucket = Puppet::Type.type(:filebucket).create :name => "rtest", :path => bpath dipper = bucket.bucket dipper = Puppet::Network::Handler.filebucket.new( :Path => bpath ) assert(dipper, "did not receive bucket client") file = Puppet::Type.newfile :path => dest, :source => source, :recurse => true, :backup => "rtest" assert_apply(file) dfiles.each do |f| assert(FileTest.exists?(f), "destfile %s was not created" % f) end # Now modify the source files to make sure things get backed up correctly [s1, s2].each { |sf| File.open(sf, "w") { |f| f.puts "boo: %s" % File.basename(sf) } } assert_apply(file) dfiles.each do |f| assert_equal("boo: %s\n" % File.basename(f), File.read(f), "file was not copied correctly") end # Make sure we didn't just copy the files over to backup locations dfiles.each do |f| assert(! FileTest.exists?(f + "rtest"), "file %s was copied for backup instead of bucketed" % File.basename(f)) end # Now make sure we can get the source sums from the bucket sums.each do |f, sum| result = nil assert_nothing_raised do result = dipper.getfile(sum) end assert(result, "file %s was not backed to filebucket" % f) assert_equal("yay: %s\n" % f, result, "file backup was not correct") end end def test_backup path = tempfile() file = Puppet::Type.newfile :path => path, :content => "yay" [false, :false, "false"].each do |val| assert_nothing_raised do file[:backup] = val end assert_equal(false, file[:backup], "%s did not translate" % val.inspect) end [true, :true, "true", ".puppet-bak"].each do |val| assert_nothing_raised do file[:backup] = val end assert_equal(".puppet-bak", file[:backup], "%s did not translate" % val.inspect) end # Now try a non-bucket string assert_nothing_raised do file[:backup] = ".bak" end assert_equal(".bak", file[:backup], ".bak did not translate") # Now try a non-existent bucket assert_nothing_raised do file[:backup] = "main" end assert_equal("main", file[:backup], "bucket name was not retained") assert_equal("main", file.bucket, "file's bucket was not set") # And then an existing bucket obj = Puppet::Type.type(:filebucket).create :name => "testing" bucket = obj.bucket assert_nothing_raised do file[:backup] = "testing" end assert_equal("testing", file[:backup], "backup value was reset") assert_equal(obj.bucket, file.bucket, "file's bucket was not set") end def test_pathbuilder dir = tempfile() Dir.mkdir(dir) file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "" } obj = Puppet::Type.newfile :path => dir, :recurse => true, :mode => 0755 mk_catalog obj assert_equal("/%s" % obj.ref, obj.path) list = obj.eval_generate fileobj = obj.class[file] assert(fileobj, "did not generate file object") assert_equal("/%s" % fileobj.ref, fileobj.path, "did not generate correct subfile path") end # Testing #403 def test_removal_with_content_set path = tempfile() File.open(path, "w") { |f| f.puts "yay" } file = Puppet::Type.newfile(:name => path, :ensure => :absent, :content => "foo") assert_apply(file) assert(! FileTest.exists?(path), "File was not removed") end # Testing #434 def test_stripping_extra_slashes_during_lookup file = Puppet::Type.newfile(:path => "/one/two") %w{/one/two/ /one/two /one//two //one//two//}.each do |path| assert(Puppet::Type.type(:file)[path], "could not look up file via path %s" % path) end end # Testing #438 def test_creating_properties_conflict file = tempfile() first = tempfile() second = tempfile() params = [:content, :source, :target] params.each do |param| assert_nothing_raised("%s conflicted with ensure" % [param]) do Puppet::Type.newfile(:path => file, param => first, :ensure => :file) end Puppet::Type.type(:file).clear params.each do |other| next if other == param assert_raise(Puppet::Error, "%s and %s did not conflict" % [param, other]) do Puppet::Type.newfile(:path => file, other => first, param => second) end end end end # Testing #508 if Process.uid == 0 def test_files_replace_with_right_attrs source = tempfile() File.open(source, "w") { |f| f.puts "some text" } File.chmod(0755, source) user = nonrootuser group = nonrootgroup path = tempfile() good = {:uid => user.uid, :gid => group.gid, :mode => 0640} run = Proc.new do |obj, msg| assert_apply(obj) stat = File.stat(obj[:path]) good.each do |should, sval| if should == :mode current = filemode(obj[:path]) else current = stat.send(should) end assert_equal(sval, current, "Attr %s was not correct %s" % [should, msg]) end end file = Puppet::Type.newfile(:path => path, :owner => user.name, :group => group.name, :mode => 0640, :backup => false) {:source => source, :content => "some content"}.each do |attr, value| file[attr] = value # First create the file run.call(file, "upon creation with %s" % attr) # Now change something so that we replace the file case attr when :source: File.open(source, "w") { |f| f.puts "some different text" } when :content: file[:content] = "something completely different" else raise "invalid attr %s" % attr end # Run it again run.call(file, "after modification with %s" % attr) # Now remove the file and the attr file.delete(attr) File.unlink(path) end end end # #505 def test_numeric_recurse dir = tempfile() subdir = File.join(dir, "subdir") other = File.join(subdir, "deeper") file = File.join(other, "file") [dir, subdir, other].each { |d| Dir.mkdir(d) } File.open(file, "w") { |f| f.puts "yay" } File.chmod(0644, file) obj = Puppet::Type.newfile(:path => dir, :mode => 0750, :recurse => "2") config = mk_catalog(obj) children = nil assert_nothing_raised("Failure when recursing") do children = obj.eval_generate end assert(obj.class[subdir], "did not create subdir object") children.each do |c| assert_nothing_raised("Failure when recursing on %s" % c) do c.catalog = config others = c.eval_generate end end oobj = obj.class[other] assert(oobj, "did not create other object") assert_nothing_raised do assert_nil(oobj.eval_generate, "recursed too far") end end # Make sure we default to the "puppet" filebucket, rather than a string def test_backup_defaults_to_bucket path = tempfile file = Puppet::Type.newfile(:path => path, :content => 'some content') file.finish assert_instance_of(Puppet::Network::Client::Dipper, file.bucket, "did not default to a filebucket for backups") end # #515 - make sure 'ensure' other than "link" is deleted during recursion def test_ensure_deleted_during_recursion dir = tempfile() Dir.mkdir(dir) file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "asdfasdf" } obj = Puppet::Type.newfile(:path => dir, :ensure => :directory, :recurse => true) config = mk_catalog(obj) children = nil assert_nothing_raised do children = obj.eval_generate end fobj = obj.class[file] assert(fobj, "did not create file object") assert(fobj.should(:ensure) != :directory, "ensure was passed to child") end # #567 def test_missing_files_are_in_sync file = tempfile obj = Puppet::Type.newfile(:path => file, :mode => 0755) changes = obj.evaluate assert(changes.empty?, "Missing file with no ensure resulted in changes") end def test_root_dir_is_named_correctly obj = Puppet::Type.newfile(:path => '/', :mode => 0755) assert_equal("/", obj.title, "/ directory was changed to empty string") end # #1010 and #1037 -- write should fail if the written checksum does not # match the file we thought we were writing. def test_write_validates_checksum file = tempfile inst = Puppet::Type.newfile(:path => file, :content => "something") tmpfile = file + ".puppettmp" wh = mock 'writehandle', :print => nil rh = mock 'readhandle' rh.expects(:read).with(512).times(2).returns("other").then.returns(nil) File.expects(:open).with { |*args| args[0] == tmpfile and args[1] != "r" }.yields(wh) File.expects(:open).with { |*args| args[0] == tmpfile and args[1] == "r" }.yields(rh) File.stubs(:rename) FileTest.stubs(:exist?).returns(true) FileTest.stubs(:file?).returns(true) inst.expects(:fail) inst.write("something", :whatever) end end diff --git a/test/ral/type/tidy.rb b/test/ral/type/tidy.rb index 60fad6516..17f98df22 100755 --- a/test/ral/type/tidy.rb +++ b/test/ral/type/tidy.rb @@ -1,291 +1,292 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../lib/puppettest' require 'puppettest' require 'puppettest/support/utils' +require 'puppet/type/tidy' class TestTidy < Test::Unit::TestCase include PuppetTest::Support::Utils include PuppetTest::FileTesting def mktmpfile # because luke's home directory is on nfs, it can't be used for testing # as root tmpfile = tempfile() File.open(tmpfile, "w") { |f| f.puts rand(100) } @@tmpfiles.push tmpfile return tmpfile end def mktmpdir dir = File.join(tmpdir(), "puppetlinkdir") unless FileTest.exists?(dir) Dir.mkdir(dir) end @@tmpfiles.push dir return dir end def test_tidydirs dir = mktmpdir file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "some stuff" } tidy = Puppet.type(:tidy).create( :name => dir, :size => "1b", :rmdirs => true, :recurse => true ) assert_events([:file_tidied, :file_tidied], tidy) assert(!FileTest.exists?(file), "Tidied %s still exists" % file) assert(!FileTest.exists?(dir), "Tidied %s still exists" % dir) end def disabled_test_recursion source = mktmpdir() FileUtils.cd(source) { mkranddirsandfiles() } link = nil assert_nothing_raised { link = newlink(:target => source, :recurse => true) } comp = mk_catalog("linktest",link) cycle(comp) path = link.name list = file_list(path) FileUtils.cd(path) { list.each { |file| unless FileTest.directory?(file) assert(FileTest.symlink?(file)) target = File.readlink(file) assert_equal(target,File.join(source,file.sub(/^\.\//,''))) end } } end # Test the different age iterations. def test_age_conversions tidy = Puppet::Type.newtidy :path => tempfile(), :age => "1m" convertors = { :second => 1, :minute => 60 } convertors[:hour] = convertors[:minute] * 60 convertors[:day] = convertors[:hour] * 24 convertors[:week] = convertors[:day] * 7 # First make sure we default to days assert_nothing_raised do tidy[:age] = "2" end assert_equal(2 * convertors[:day], tidy.should(:age), "Converted 2 wrong") convertors.each do |name, number| init = name.to_s[0..0] # The first letter [0, 1, 5].each do |multi| [init, init.upcase].each do |letter| age = multi.to_s + letter.to_s assert_nothing_raised do tidy[:age] = age end assert_equal(multi * convertors[name], tidy.should(:age), "Converted %s wrong" % age) end end end end def test_size_conversions convertors = { :b => 0, :kb => 1, :mb => 2, :gb => 3 } tidy = Puppet::Type.newtidy :path => tempfile(), :age => "1m" # First make sure we default to kb assert_nothing_raised do tidy[:size] = "2" end assert_equal(2048, tidy.should(:size), "Converted 2 wrong") convertors.each do |name, number| init = name.to_s[0..0] # The first letter [0, 1, 5].each do |multi| [init, init.upcase].each do |letter| size = multi.to_s + letter.to_s assert_nothing_raised do tidy[:size] = size end total = multi number.times do total *= 1024 end assert_equal(total, tidy.should(:size), "Converted %s wrong" % size) end end end end def test_agetest tidy = Puppet::Type.newtidy :path => tempfile(), :age => "1m" age = tidy.property(:age) # Set it to something that should be fine assert(age.insync?(Time.now.to_i - 5), "Tried to tidy a low age") # Now to something that should fail assert(! age.insync?(Time.now.to_i - 120), "Incorrectly skipped tidy") end def test_sizetest tidy = Puppet::Type.newtidy :path => tempfile(), :size => "1k" size = tidy.property(:size) # Set it to something that should be fine assert(size.insync?(50), "Tried to tidy a low size") # Now to something that should fail assert(! size.insync?(2048), "Incorrectly skipped tidy") end # Make sure we can remove different types of files def test_tidytypes path = tempfile() tidy = Puppet::Type.newtidy :path => path, :size => "1b", :age => "1s" # Start with a file File.open(path, "w") { |f| f.puts "this is a test" } assert_events([:file_tidied], tidy) assert(! FileTest.exists?(path), "File was not removed") # Then a link dest = tempfile File.open(dest, "w") { |f| f.puts "this is a test" } File.symlink(dest, path) assert_events([:file_tidied], tidy) assert(! FileTest.exists?(path), "Link was not removed") assert(FileTest.exists?(dest), "Destination was removed") # And a directory Dir.mkdir(path) tidy[:rmdirs] = true assert_events([:file_tidied], tidy) assert(! FileTest.exists?(path), "File was not removed") end # Make sure we can specify either attribute and get appropriate behaviour. # I found that the original implementation of this did not work unless both # size and age were specified. def test_one_attribute path = tempfile() File.open(path, "w") { |f| 10.times { f.puts "yayness " } } tidy = Puppet::Type.type(:tidy).create :path => path, :size => "1b" assert_apply(tidy) assert(! FileTest.exists?(path), "file did not get tidied") tidy.class.clear # Now try one with just an age attribute. time = Time.now - 10 stat = stub 'stat', :mtime => time, :atime => time, :ftype => "file" File.stubs(:lstat) File.stubs(:lstat).with(path).returns(stat) File.open(path, "w") { |f| 10.times { f.puts "yayness " } } tidy = Puppet::Type.type(:tidy).create :path => path, :age => "5s" assert_apply(tidy) assert(! FileTest.exists?(path), "file did not get tidied") end # Testing #355. def test_remove_dead_links dir = tempfile() link = File.join(dir, "link") target = tempfile() Dir.mkdir(dir) File.symlink(target, link) tidy = Puppet::Type.newtidy :path => dir, :size => "1b", :recurse => true assert_apply(tidy) assert(! FileTest.symlink?(link), "link was not tidied") end def test_glob_matches_single dir = mktmpdir files = { :remove => File.join(dir, "01-foo"), :keep => File.join(dir, "default") } files.each do |tag, file| File.open(file, "w") { |f| f.puts "some stuff" } end tidy = Puppet.type(:tidy).create( :name => dir, :size => "1b", :rmdirs => true, :recurse => true, :matches => "01-*" ) assert_apply(tidy) assert(FileTest.exists?(files[:keep]), "%s was tidied" % files[:keep]) assert(!FileTest.exists?(files[:remove]), "Tidied %s still exists" % files[:remove]) end def test_glob_matches_multiple dir = mktmpdir files = { :remove1 => File.join(dir, "01-foo"), :remove2 => File.join(dir, "02-bar"), :keep1 => File.join(dir, "default") } files.each do |tag, file| File.open(file, "w") { |f| f.puts "some stuff" } end tidy = Puppet.type(:tidy).create( :name => dir, :size => "1b", :rmdirs => true, :recurse => true, :matches => ["01-*", "02-*"] ) assert_apply(tidy) assert(FileTest.exists?(files[:keep1]), "%s was tidied" % files[:keep1]) assert(!FileTest.exists?(files[:remove1]), "Tidied %s still exists" % files[:remove1]) assert(!FileTest.exists?(files[:remove2]), "Tidied %s still exists" % files[:remove2]) end end