diff --git a/lib/puppet/network/client/master.rb b/lib/puppet/network/client/master.rb index 390f3d4e2..913c51b3d 100644 --- a/lib/puppet/network/client/master.rb +++ b/lib/puppet/network/client/master.rb @@ -1,582 +1,583 @@ # The client for interacting with the puppetmaster config server. require 'sync' require 'timeout' require 'puppet/network/http_pool' class Puppet::Network::Client::Master < Puppet::Network::Client unless defined? @@sync @@sync = Sync.new end attr_accessor :catalog attr_reader :compile_time class << self # Puppetd should only have one instance running, and we need a way # to retrieve it. attr_accessor :instance include Puppet::Util end def self.facts # Retrieve the facts from the central server. if Puppet[:factsync] self.getfacts() end down = Puppet[:downcasefacts] - facts = {} - Facter.each { |name,fact| + facts = Facter.to_hash.inject({}) do |newhash, array| + name, fact = array if down - facts[name] = fact.to_s.downcase + newhash[name] = fact.to_s.downcase else - facts[name] = fact.to_s + newhash[name] = fact.to_s end - } + newhash + end # Add our client version to the list of facts, so people can use it # in their manifests facts["clientversion"] = Puppet.version.to_s # And add our environment as a fact. unless facts.include?("environment") facts["environment"] = Puppet[:environment] end facts end # Return the list of dynamic facts as an array of symbols def self.dynamic_facts Puppet.settings[:dynamicfacts].split(/\s*,\s*/).collect { |fact| fact.downcase } end # Cache the config def cache(text) Puppet.info "Caching catalog at %s" % self.cachefile confdir = ::File.dirname(Puppet[:localconfig]) ::File.open(self.cachefile + ".tmp", "w", 0660) { |f| f.print text } ::File.rename(self.cachefile + ".tmp", self.cachefile) end def cachefile unless defined? @cachefile @cachefile = Puppet[:localconfig] + ".yaml" end @cachefile end def clear @catalog.clear(true) if @catalog Puppet::Type.allclear @catalog = nil end # Initialize and load storage def dostorage begin Puppet::Util::Storage.load @compile_time ||= Puppet::Util::Storage.cache(:configuration)[:compile_time] rescue => detail if Puppet[:trace] puts detail.backtrace end Puppet.err "Corrupt state file %s: %s" % [Puppet[:statefile], detail] begin ::File.unlink(Puppet[:statefile]) retry rescue => detail raise Puppet::Error.new("Cannot remove %s: %s" % [Puppet[:statefile], detail]) end end end # Check whether our catalog is up to date def fresh?(facts) if Puppet[:ignorecache] Puppet.notice "Ignoring cache" return false end unless self.compile_time Puppet.debug "No cached compile time" return false end if facts_changed?(facts) Puppet.info "Facts have changed; recompiling" unless local? return false end newcompile = @driver.freshness # We're willing to give a 2 second drift if newcompile - @compile_time.to_i < 1 return true else Puppet.debug "Server compile time is %s vs %s" % [newcompile, @compile_time.to_i] return false end end # Let the daemon run again, freely in the filesystem. Frolick, little # daemon! def enable lockfile.unlock(:anonymous => true) end # Stop the daemon from making any catalog runs. def disable lockfile.lock(:anonymous => true) end # Retrieve the config from a remote server. If this fails, then # use the cached copy. def getconfig dostorage() facts = nil Puppet::Util.benchmark(:debug, "Retrieved facts") do facts = self.class.facts end raise Puppet::Network::ClientError.new("Could not retrieve any facts") unless facts.length > 0 # Retrieve the plugins. getplugins() if Puppet[:pluginsync] if (self.catalog or FileTest.exist?(self.cachefile)) and self.fresh?(facts) Puppet.info "Configuration is up to date" return if use_cached_config end Puppet.debug("Retrieving catalog") # If we can't retrieve the catalog, just return, which will either # fail, or use the in-memory catalog. unless yaml_objects = get_actual_config(facts) use_cached_config(true) return end begin objects = YAML.load(yaml_objects) rescue => detail msg = "Configuration could not be translated from yaml" msg += "; using cached catalog" if use_cached_config(true) Puppet.warning msg return end self.setclasses(objects.classes) # Clear all existing objects, so we can recreate our stack. clear() if self.catalog # Now convert the objects to a puppet catalog graph. begin @catalog = objects.to_catalog rescue => detail clear() puts detail.backtrace if Puppet[:trace] msg = "Configuration could not be instantiated: %s" % detail msg += "; using cached catalog" if use_cached_config(true) Puppet.warning msg return end if ! @catalog.from_cache self.cache(yaml_objects) end # Keep the state database up to date. @catalog.host_config = true end # A simple proxy method, so it's easy to test. def getplugins self.class.getplugins end # Just so we can specify that we are "the" instance. def initialize(*args) Puppet.settings.use(:main, :ssl, :puppetd) super self.class.instance = self @running = false end # Mark that we should restart. The Puppet module checks whether we're running, # so this only gets called if we're in the middle of a run. def restart # If we're currently running, then just mark for later Puppet.notice "Received signal to restart; waiting until run is complete" @restart = true end # Should we restart? def restart? if defined? @restart @restart else false end end # Retrieve the cached config def retrievecache if FileTest.exists?(self.cachefile) return ::File.read(self.cachefile) else return nil end end # The code that actually runs the catalog. # This just passes any options on to the catalog, # which accepts :tags and :ignoreschedules. def run(options = {}) got_lock = false splay Puppet::Util.sync(:puppetrun).synchronize(Sync::EX) do if !lockfile.lock Puppet.notice "Lock file %s exists; skipping catalog run" % lockfile.lockfile else got_lock = true begin duration = thinmark do self.getconfig end rescue => detail puts detail.backtrace if Puppet[:trace] Puppet.err "Could not retrieve catalog: %s" % detail end if self.catalog @catalog.retrieval_duration = duration Puppet.notice "Starting catalog run" unless @local benchmark(:notice, "Finished catalog run") do @catalog.apply(options) end end # Now close all of our existing http connections, since there's no # reason to leave them lying open. Puppet::Network::HttpPool.clear_http_instances end lockfile.unlock # Did we get HUPped during the run? If so, then restart now that we're # done with the run. if self.restart? Process.kill(:HUP, $$) end end ensure # Just make sure we remove the lock file if we set it. lockfile.unlock if got_lock and lockfile.locked? clear() end def running? lockfile.locked? end # Store the classes in the classfile, but only if we're not local. def setclasses(ary) if @local return end unless ary and ary.length > 0 Puppet.info "No classes to store" return end begin ::File.open(Puppet[:classfile], "w") { |f| f.puts ary.join("\n") } rescue => detail Puppet.err "Could not create class file %s: %s" % [Puppet[:classfile], detail] end end private # Download files from the remote server, returning a list of all # changed files. def self.download(args) hash = { :path => args[:dest], :recurse => true, :source => args[:source], :tag => "#{args[:name]}s", :owner => Process.uid, :group => Process.gid, :purge => true, :force => true, :backup => false } if args[:ignore] hash[:ignore] = args[:ignore].split(/\s+/) end downconfig = Puppet::Node::Catalog.new("downloading") downconfig.add_resource Puppet::Type.type(:file).create(hash) Puppet.info "Retrieving #{args[:name]}s" noop = Puppet[:noop] Puppet[:noop] = false files = [] begin Timeout::timeout(self.timeout) do downconfig.apply do |trans| trans.changed?.find_all do |resource| yield resource if block_given? files << resource[:path] end end end rescue Puppet::Error, Timeout::Error => detail if Puppet[:debug] puts detail.backtrace end Puppet.err "Could not retrieve #{args[:name]}s: %s" % detail end # Now clean up after ourselves downconfig.clear return files ensure # I can't imagine why this is necessary, but apparently at last one person has had problems with noop # being nil here. if noop.nil? Puppet[:noop] = false else Puppet[:noop] = noop end end # Retrieve facts from the central server. def self.getfacts # Download the new facts path = Puppet[:factpath].split(":") files = [] download(:dest => Puppet[:factdest], :source => Puppet[:factsource], :ignore => Puppet[:factsignore], :name => "fact") do |resource| next unless path.include?(::File.dirname(resource[:path])) files << resource[:path] end ensure # Clear all existing definitions. Facter.clear # Reload everything. if Facter.respond_to? :loadfacts Facter.loadfacts elsif Facter.respond_to? :load Facter.load else raise Puppet::Error, "You must upgrade your version of Facter to use centralized facts" end # This loads all existing facts and any new ones. We have to remove and # reload because there's no way to unload specific facts. loadfacts() end # Retrieve the plugins from the central server. We only have to load the # changed plugins, because Puppet::Type loads plugins on demand. def self.getplugins download(:dest => Puppet[:plugindest], :source => Puppet[:pluginsource], :ignore => Puppet[:pluginsignore], :name => "plugin") do |resource| next if FileTest.directory?(resource[:path]) path = resource[:path].sub(Puppet[:plugindest], '').sub(/^\/+/, '') unless Puppet::Util::Autoload.loaded?(path) next end begin Puppet.info "Reloading downloaded file %s" % path load resource[:path] rescue => detail Puppet.warning "Could not reload downloaded file %s: %s" % [resource[:path], detail] end end end def self.loaddir(dir, type) return unless FileTest.directory?(dir) Dir.entries(dir).find_all { |e| e =~ /\.rb$/ }.each do |file| fqfile = ::File.join(dir, file) begin Puppet.info "Loading #{type} %s" % ::File.basename(file.sub(".rb",'')) Timeout::timeout(self.timeout) do load fqfile end rescue => detail Puppet.warning "Could not load #{type} %s: %s" % [fqfile, detail] end end end def self.loadfacts Puppet[:factpath].split(":").each do |dir| loaddir(dir, "fact") end end def self.timeout timeout = Puppet[:configtimeout] case timeout when String: if timeout =~ /^\d+$/ timeout = Integer(timeout) else raise ArgumentError, "Configuration timeout must be an integer" end when Integer: # nothing else raise ArgumentError, "Configuration timeout must be an integer" end return timeout end loadfacts() # Have the facts changed since we last compiled? def facts_changed?(facts) oldfacts = (Puppet::Util::Storage.cache(:configuration)[:facts] || {}).dup newfacts = facts.dup self.class.dynamic_facts.each do |fact| [oldfacts, newfacts].each do |facthash| facthash.delete(fact) if facthash.include?(fact) end end if oldfacts == newfacts return false else # unless oldfacts # puts "no old facts" # return true # end # newfacts.keys.each do |k| # unless newfacts[k] == oldfacts[k] # puts "%s: %s vs %s" % [k, newfacts[k], oldfacts[k]] # end # end return true end end # Actually retrieve the catalog, either from the server or from a # local master. def get_actual_config(facts) begin Timeout::timeout(self.class.timeout) do return get_remote_config(facts) end rescue Timeout::Error Puppet.err "Configuration retrieval timed out" return nil end end # Retrieve a config from a remote master. def get_remote_config(facts) textobjects = "" textfacts = CGI.escape(YAML.dump(facts)) benchmark(:debug, "Retrieved catalog") do # error handling for this is done in the network client begin textobjects = @driver.getconfig(textfacts, "yaml") begin textobjects = CGI.unescape(textobjects) rescue => detail raise Puppet::Error, "Could not CGI.unescape catalog" end rescue => detail Puppet.err "Could not retrieve catalog: %s" % detail return nil end end return nil if textobjects == "" @compile_time = Time.now Puppet::Util::Storage.cache(:configuration)[:facts] = facts Puppet::Util::Storage.cache(:configuration)[:compile_time] = @compile_time return textobjects end def lockfile unless defined?(@lockfile) @lockfile = Puppet::Util::Pidlock.new(Puppet[:puppetdlockfile]) end @lockfile end # Sleep when splay is enabled; else just return. def splay return unless Puppet[:splay] limit = Integer(Puppet[:splaylimit]) # Pick a splay time and then cache it. unless time = Puppet::Util::Storage.cache(:configuration)[:splay_time] time = rand(limit) Puppet::Util::Storage.cache(:configuration)[:splay_time] = time end Puppet.info "Sleeping for %s seconds (splay is enabled)" % time sleep(time) end private # Use our cached config, optionally specifying whether this is # necessary because of a failure. def use_cached_config(because_of_failure = false) return true if self.catalog if because_of_failure and ! Puppet[:usecacheonfailure] @catalog = nil Puppet.warning "Not using cache on failed catalog" return false end return false unless oldtext = self.retrievecache begin @catalog = YAML.load(oldtext).to_catalog @catalog.from_cache = true @catalog.host_config = true rescue => detail puts detail.backtrace if Puppet[:trace] Puppet.warning "Could not load cached catalog: %s" % detail clear return false end return true end end diff --git a/test/network/client/master.rb b/test/network/client/master.rb index 67c47fa6d..41796575f 100755 --- a/test/network/client/master.rb +++ b/test/network/client/master.rb @@ -1,585 +1,577 @@ #!/usr/bin/env ruby require File.dirname(__FILE__) + '/../../lib/puppettest' require 'puppettest' require 'mocha' class TestMasterClient < Test::Unit::TestCase include PuppetTest::ServerTest def setup super @master = Puppet::Network::Client.master end def mkmaster(options = {}) options[:UseNodes] = false options[:Local] = true if code = options[:Code] Puppet[:code] = code else Puppet[:manifest] = options[:Manifest] || mktestmanifest end # create our master # this is the default server setup master = Puppet::Network::Handler.master.new(options) return master end def mkclient(master = nil) master ||= mkmaster() client = Puppet::Network::Client.master.new( :Master => master ) return client end def test_disable FileUtils.mkdir_p(Puppet[:statedir]) manifest = mktestmanifest master = mkmaster(:Manifest => manifest) client = mkclient(master) assert_nothing_raised("Could not disable client") { client.disable } client.expects(:getconfig).never client.run client = mkclient(master) client.expects(:getconfig) assert_nothing_raised("Could not enable client") { client.enable } client.run end # Make sure we're getting the client version in our list of facts def test_clientversionfact facts = nil assert_nothing_raised { facts = Puppet::Network::Client.master.facts } assert_equal(Puppet.version.to_s, facts["clientversion"]) end # Make sure non-string facts don't make things go kablooie def test_nonstring_facts FileUtils.mkdir_p(Puppet[:statedir]) # Add a nonstring fact Facter.add("nonstring") do setcode { 1 } end assert_equal(1, Facter.nonstring, "Fact was a string from facter") client = mkclient() assert(! FileTest.exists?(@createdfile)) assert_nothing_raised { client.run } end # This method downloads files, and yields each file object if a block is given. def test_download source = tempfile() dest = tempfile() sfile = File.join(source, "file") dfile = File.join(dest, "file") Dir.mkdir(source) File.open(sfile, "w") {|f| f.puts "yay"} files = [] assert_nothing_raised do files = Puppet::Network::Client.master.download(:dest => dest, :source => source, :name => "testing") end assert(FileTest.directory?(dest), "dest dir was not created") assert(FileTest.file?(dfile), "dest file was not created") assert_equal(File.read(sfile), File.read(dfile), "Dest file had incorrect contents") assert_equal([dest, dfile].sort, files.sort, "Changed files were not returned correctly") end def test_getplugins Puppet[:filetimeout] = -1 Puppet[:pluginsource] = tempfile() Dir.mkdir(Puppet[:pluginsource]) Dir.mkdir(File.join(Puppet[:pluginsource], "testing")) $loaded = [] loader = Puppet::Util::Autoload.new(self, "testing") myplugin = File.join(Puppet[:pluginsource], "testing", "myplugin.rb") File.open(myplugin, "w") do |f| f.puts %{$loaded << :myplugin} end assert_nothing_raised("Could not get plugins") { Puppet::Network::Client.master.getplugins } destfile = File.join(Puppet[:plugindest], "testing", "myplugin.rb") assert(File.exists?(destfile), "Did not get plugin") assert(loader.load(:myplugin), "Did not load downloaded plugin") assert($loaded.include?(:myplugin), "Downloaded code was not evaluated") # Now modify the file and make sure the type is replaced File.open(myplugin, "w") do |f| f.puts %{$loaded << :changed} end assert_nothing_raised("Could not get plugin changes") { Puppet::Network::Client.master.getplugins } assert($loaded.include?(:changed), "Changed code was not evaluated") # Now try it again, to make sure we don't have any objects lying around assert_nothing_raised { Puppet::Network::Client.master.getplugins } end def test_getfacts Puppet[:filetimeout] = -1 Puppet[:factsource] = tempfile() Dir.mkdir(Puppet[:factsource]) hostname = Facter.value(:hostname) myfact = File.join(Puppet[:factsource], "myfact.rb") File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "yayness" } end } end assert_nothing_raised { Puppet::Network::Client.master.getfacts } destfile = File.join(Puppet[:factdest], "myfact.rb") assert(File.exists?(destfile), "Did not get fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") assert_equal("yayness", Facter.value(:myfact), "Did not get correct fact value") # Now modify the file and make sure the type is replaced File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "funtest" } end } end assert_nothing_raised { Puppet::Network::Client.master.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") # Now run it again and make sure the fact still loads assert_nothing_raised { Puppet::Network::Client.master.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") end # Make sure that setting environment by fact takes precedence to configuration def test_setenvironmentwithfact name = "environment" value = "test_environment" - Puppet[:filetimeout] = -1 - Puppet[:factsource] = tempfile() - Dir.mkdir(Puppet[:factsource]) - file = File.join(Puppet[:factsource], "#{name}.rb") - File.open(file, "w") do |f| - f.puts %{Facter.add("#{name}") do setcode { "#{value}" } end } - end - - Puppet::Network::Client.master.getfacts + Facter.stubs(:to_hash).returns(name => value) assert_equal(value, Puppet::Network::Client.master.facts[name]) end # Make sure we load all facts on startup. def test_loadfacts dirs = [tempfile(), tempfile()] count = 0 names = [] dirs.each do |dir| Dir.mkdir(dir) name = "fact%s" % count names << name file = File.join(dir, "%s.rb" % name) # Write out a plugin file File.open(file, "w") do |f| f.puts %{Facter.add("#{name}") do setcode { "#{name}" } end } end count += 1 end Puppet[:factpath] = dirs.join(":") names.each do |name| assert_nil(Facter.value(name), "Somehow retrieved invalid fact") end assert_nothing_raised { Puppet::Network::Client.master.loadfacts } names.each do |name| assert_equal(name, Facter.value(name), "Did not retrieve facts") end end if Process.uid == 0 # Testing #283. Make sure plugins et al are downloaded as the running user. def test_download_ownership dir = tstdir() dest = tstdir() file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "funtest" } user = nonrootuser() group = nonrootgroup() chowner = Puppet::Type.type(:file).create :path => dir, :owner => user.name, :group => group.name, :recurse => true assert_apply(chowner) chowner.remove assert_equal(user.uid, File.stat(file).uid) assert_equal(group.gid, File.stat(file).gid) assert_nothing_raised { Puppet::Network::Client.master.download(:dest => dest, :source => dir, :name => "testing" ) {} } destfile = File.join(dest, "file") assert(FileTest.exists?(destfile), "Did not create destfile") assert_equal(Process.uid, File.stat(destfile).uid) end end # Test retrieving all of the facts. def test_facts facts = nil assert_nothing_raised do facts = Puppet::Network::Client.master.facts end Facter.to_hash.each do |fact, value| assert_equal(facts[fact.downcase], value.to_s, "%s is not equal" % fact.inspect) end # Make sure the puppet version got added assert_equal(Puppet::PUPPETVERSION, facts["clientversion"], "client version did not get added") # And make sure the ruby version is in there assert_equal(RUBY_VERSION, facts["rubyversion"], "ruby version did not get added") end # #424 def test_caching_of_compile_time file = tempfile() manifest = tempfile() File.open(manifest, "w") { |f| f.puts "file { '#{file}': content => yay }" } Puppet::Node::Facts.indirection.stubs(:save) driver = mkmaster(:Manifest => manifest) driver.local = false master = mkclient(driver) # We have to make everything thinks it's remote, because there's no local caching info master.local = false assert(! master.fresh?(master.class.facts), "Considered fresh with no compile at all") assert_nothing_raised { master.run } assert(master.fresh?(master.class.facts), "not considered fresh after compile") # Now make sure the config time is cached assert(master.compile_time, "No stored config time") assert_equal(master.compile_time, Puppet::Util::Storage.cache(:configuration)[:compile_time], "times did not match") time = master.compile_time master.clear File.unlink(file) Puppet::Util::Storage.store # Now make a new master Puppet::Util::Storage.clear master = mkclient(driver) master.run assert_equal(time, master.compile_time, "time was not retrieved from cache") assert(FileTest.exists?(file), "file was not created on second run") end # #540 - make sure downloads aren't affected by noop def test_download_in_noop source = tempfile File.open(source, "w") { |f| f.puts "something" } dest = tempfile Puppet[:noop] = true assert_nothing_raised("Could not download in noop") do @master.download(:dest => dest, :source => source, :tag => "yay") end assert(FileTest.exists?(dest), "did not download in noop mode") assert(Puppet[:noop], "noop got disabled in run") end # #491 - make sure a missing config doesn't kill us def test_missing_localconfig master = mkclient master.local = false driver = master.send(:instance_variable_get, "@driver") driver.local = false Puppet::Node::Facts.indirection.stubs(:save) # Retrieve the configuration master.getconfig # Now the config is up to date, so get rid of the @objects var and # the cached config master.clear File.unlink(master.cachefile) assert_nothing_raised("Missing cache file threw error") do master.getconfig end assert(! @logs.detect { |l| l.message =~ /Could not load/}, "Tried to load cache when it is non-existent") end # #519 - cache the facts so that we notice if they change. def test_factchanges_cause_recompile $value = "one" Facter.add(:testfact) do setcode { $value } end assert_equal("one", Facter.value(:testfact), "fact was not set correctly") master = mkclient master.local = false driver = master.send(:instance_variable_get, "@driver") driver.local = false Puppet::Node::Facts.indirection.stubs(:save) assert_nothing_raised("Could not compile config") do master.getconfig end $value = "two" Facter.clear Facter.loadfacts Facter.add(:testfact) do setcode { $value } end facts = master.class.facts assert_equal("two", Facter.value(:testfact), "fact did not change") assert(master.send(:facts_changed?, facts), "master does not think facts changed") assert(! master.fresh?(facts), "master is considered fresh after facts changed") assert_nothing_raised("Could not recompile when facts changed") do master.getconfig end end def test_locking master = mkclient class << master def getconfig raise ArgumentError, "Just testing" end end master.run assert(! master.send(:lockfile).locked?, "Master is still locked after failure") end # Make sure we get a value for timeout def test_config_timeout master = Puppet::Network::Client.client(:master) time = Integer(Puppet[:configtimeout]) assert_equal(time, master.timeout, "Did not get default value for timeout") assert_equal(time, master.timeout, "Did not get default value for timeout on second run") # Reset it Puppet[:configtimeout] = "50" assert_equal(50, master.timeout, "Did not get changed default value for timeout") assert_equal(50, master.timeout, "Did not get changed default value for timeout on second run") # Now try an integer Puppet[:configtimeout] = 100 assert_equal(100, master.timeout, "Did not get changed integer default value for timeout") assert_equal(100, master.timeout, "Did not get changed integer default value for timeout on second run") end # #569 -- Make sure we can ignore dynamic facts. def test_dynamic_facts client = mkclient assert_equal(%w{memorysize memoryfree swapsize swapfree}, client.class.dynamic_facts, "Did not get correct defaults for dynamic facts") # Cache some values for comparison cached = {"one" => "yep", "two" => "nope"} Puppet::Util::Storage.cache(:configuration)[:facts] = cached assert(! client.send(:facts_changed?, cached), "Facts incorrectly considered to be changed") # Now add some values to the passed result and make sure we get a positive newfacts = cached.dup newfacts["changed"] = "something" assert(client.send(:facts_changed?, newfacts), "Did not catch changed fact") # Now add a dynamic fact and make sure it's ignored newfacts = cached.dup newfacts["memorysize"] = "something" assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") # And try it with both cached["memorysize"] = "something else" assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") # And finally, with only in the cache newfacts.delete("memorysize") assert(! client.send(:facts_changed?, newfacts), "Dynamic facts resulted in a false positive") end def test_splay client = mkclient # Make sure we default to no splay client.expects(:sleep).never assert_nothing_raised("Failed to call splay") do client.send(:splay) end # Now set it to true and make sure we get the right value client = mkclient client.expects(:sleep) Puppet[:splay] = true assert_nothing_raised("Failed to call sleep when splay is true") do client.send(:splay) end time = Puppet::Util::Storage.cache(:configuration)[:splay_time] assert(time, "Splay time was not cached") # Now try it again client = mkclient client.expects(:sleep).with(time) assert_nothing_raised("Failed to call sleep when splay is true with a cached value") do client.send(:splay) end end def test_environment_is_added_to_facts facts = Puppet::Network::Client::Master.facts assert_equal(facts["environment"], Puppet[:environment], "Did not add environment to client facts") # Now set it to a real value Puppet[:environments] = "something,else" Puppet[:environment] = "something" facts = Puppet::Network::Client::Master.facts assert_equal(facts["environment"], Puppet[:environment], "Did not add environment to client facts") end # This is partially to fix #532, but also to save on memory. def test_remove_objects_after_every_run client = mkclient ftype = Puppet::Type.type(:file) file = ftype.create :title => "/what/ever", :ensure => :present config = Puppet::Node::Catalog.new config.add_resource(file) config.expects :apply client.catalog = config client.expects(:getconfig) client.run assert_nil(ftype[@createdfile], "file object was not removed from memory") end # #685 def test_http_failures_do_not_kill_puppetd client = mkclient client.meta_def(:getconfig) { raise "A failure" } assert_nothing_raised("Failure in getconfig threw an error") do client.run end end def test_invalid_catalogs_do_not_get_cached master = mkmaster :Code => "notify { one: require => File[yaytest] }" master.local = false # so it gets cached client = mkclient(master) client.stubs(:facts).returns({}) client.local = false Puppet::Node::Facts.indirection.stubs(:terminus_class).returns(:memory) # Make sure the config is not cached. client.expects(:cache).never client.getconfig # Doesn't throw an exception, but definitely fails. client.run end end