diff --git a/lib/puppet/client/master.rb b/lib/puppet/client/master.rb index 97d224065..fb7922937 100644 --- a/lib/puppet/client/master.rb +++ b/lib/puppet/client/master.rb @@ -1,627 +1,642 @@ # The client for interacting with the puppetmaster config server. require 'sync' class Puppet::Client::MasterClient < Puppet::Client unless defined? @@sync @@sync = Sync.new end @handler = Puppet::Server::Master Puppet.setdefaults("puppetd", :puppetdlockfile => [ "$statedir/puppetdlock", "A lock file to temporarily stop puppetd from doing anything."], :usecacheonfailure => [true, "Whether to use the cached configuration when the remote configuration will not compile. This option is useful for testing new configurations, where you want to fix the broken configuration rather than reverting to a known-good one." ] ) Puppet.setdefaults(:puppetd, :reportserver => ["$server", "The server to which to send transaction reports." ], :report => [false, "Whether to send reports after every transaction." ] ) # Plugin information. Puppet.setdefaults("puppet", :pluginpath => ["$vardir/plugins", "Where Puppet should look for plugins. Multiple directories should be colon-separated, like normal PATH variables."], :plugindest => ["$vardir/plugins", "Where Puppet should store plugins that it pulls down from the central server."], :pluginsource => ["puppet://$server/plugins", "From where to retrieve plugins. The standard Puppet ``file`` type is used for retrieval, so anything that is a valid file source can be used here."], :pluginsync => [false, "Whether plugins should be synced with the central server."], :pluginsignore => [".svn CVS", "What files to ignore when pulling down plugins."] ) # Central fact information. Puppet.setdefaults("puppet", :factpath => ["$vardir/facts", "Where Puppet should look for facts. Multiple directories should be colon-separated, like normal PATH variables."], :factdest => ["$vardir/facts", "Where Puppet should store facts that it pulls down from the central server."], :factsource => ["puppet://$server/facts", "From where to retrieve facts. The standard Puppet ``file`` type is used for retrieval, so anything that is a valid file source can be used here."], :factsync => [false, "Whether facts should be synced with the central server."], :factsignore => [".svn CVS", "What files to ignore when pulling down facts."] ) @drivername = :Master attr_accessor :objects class << self # Puppetd should only have one instance running, and we need a way # to retrieve it. attr_accessor :instance include Puppet::Util end def self.facts # Retrieve the facts from the central server. if Puppet[:factsync] self.getfacts() end facts = {} Facter.each { |name,fact| facts[name] = fact.to_s.downcase } # Add our client version to the list of facts, so people can use it # in their manifests facts["clientversion"] = Puppet.version.to_s facts end # This method actually applies the configuration. def apply(tags = nil, ignoreschedules = false) dostorage() unless defined? @objects raise Puppet::Error, "Cannot apply; objects not defined" end transaction = @objects.evaluate if tags transaction.tags = tags end if ignoreschedules transaction.ignoreschedules = true end transaction.addtimes :config_retrieval => @configtime begin transaction.evaluate rescue Puppet::Error => detail Puppet.err "Could not apply complete configuration: %s" % detail rescue => detail Puppet.err "Found a bug: %s" % detail if Puppet[:trace] puts detail.backtrace end ensure Puppet::Storage.store end - + if Puppet[:report] - begin - report = transaction.report() - if Puppet[:rrdgraph] == true - report.graph() - end - reportclient().report(report) - rescue => detail - Puppet.err "Reporting failed: %s" % detail - end + report(transaction) end return transaction + ensure + if defined? transaction and transaction + transaction.cleanup + end end # Cache the config def cache(text) Puppet.config.use(:puppet, :sslcertificates, :puppetd) Puppet.info "Caching configuration at %s" % self.cachefile confdir = File.dirname(Puppet[:localconfig]) #unless FileTest.exists?(confdir) # Puppet.recmkdir(confdir, 0770) #end File.open(self.cachefile + ".tmp", "w", 0660) { |f| f.print text } File.rename(self.cachefile + ".tmp", self.cachefile) end def cachefile unless defined? @cachefile @cachefile = Puppet[:localconfig] + ".yaml" end @cachefile end def clear #@objects = nil @objects.remove(true) Puppet::Type.allclear end # Disable running the configuration. This can be used from the command # line, but is also used to make sure only one client is running at a time. def disable(running = false) threadlock(:puppetd) do text = nil if running text = Process.pid else text = "" Puppet.notice "Disabling puppetd" end Puppet.config.use(:puppet) begin File.open(Puppet[:puppetdlockfile], "w") { |f| f.puts text } rescue => detail raise Puppet::Error, "Could not lock puppetd: %s" % detail end end end # Initialize and load storage def dostorage begin Puppet::Storage.init Puppet::Storage.load rescue => detail Puppet.err "Corrupt state file %s: %s" % [Puppet[:statefile], detail] begin File.unlink(Puppet[:statefile]) retry rescue => detail raise Puppet::Error.new("Cannot remove %s: %s" % [Puppet[:statefile], detail]) end end end # Enable running again. This can be used from the command line, but # is also used to make sure only one client is running at a time. def enable(running = false) threadlock(:puppetd) do unless running Puppet.debug "Enabling puppetd" end if FileTest.exists? Puppet[:puppetdlockfile] File.unlink(Puppet[:puppetdlockfile]) end end end # Check whether our configuration is up to date def fresh? unless defined? @configstamp return false end # We're willing to give a 2 second drift if @driver.freshness - @configstamp < 1 return true else return false end end # Retrieve the config from a remote server. If this fails, then # use the cached copy. def getconfig if self.fresh? Puppet.info "Config is up to date" return end Puppet.debug("getting config") dostorage() # Retrieve the plugins. if Puppet[:pluginsync] self.class.getplugins() end facts = self.class.facts unless facts.length > 0 raise Puppet::ClientError.new( "Could not retrieve any facts" ) end objects = nil if @local # If we're local, we don't have to do any of the conversion # stuff. objects = @driver.getconfig(facts, "yaml") @configstamp = Time.now.to_i if objects == "" raise Puppet::Error, "Could not retrieve configuration" end else textobjects = "" textfacts = CGI.escape(YAML.dump(facts)) benchmark(:debug, "Retrieved configuration") do # error handling for this is done in the network client begin textobjects = @driver.getconfig(textfacts, "yaml") rescue => detail Puppet.err "Could not retrieve configuration: %s" % detail unless Puppet[:usecacheonfailure] @objects = nil Puppet.warning "Not using cache on failed configuration" return end end end fromcache = false if textobjects == "" textobjects = self.retrievecache if textobjects == "" raise Puppet::Error.new( "Cannot connect to server and there is no cached configuration" ) end Puppet.warning "Could not get config; using cached copy" fromcache = true else @configstamp = Time.now.to_i end begin textobjects = CGI.unescape(textobjects) rescue => detail raise Puppet::Error, "Could not CGI.unescape configuration" end if @cache and ! fromcache self.cache(textobjects) end begin objects = YAML.load(textobjects) rescue => detail raise Puppet::Error, "Could not understand configuration: %s" % detail.to_s end end unless objects.is_a?(Puppet::TransBucket) raise NetworkClientError, "Invalid returned objects of type %s" % objects.class end self.setclasses(objects.classes) # Clear all existing objects, so we can recreate our stack. if defined? @objects Puppet::Type.allclear # Make sure all of the objects are really gone. @objects.remove(true) end @objects = nil # First create the default scheduling objects Puppet.type(:schedule).mkdefaultschedules # Now convert the objects to real Puppet objects @objects = objects.to_type if @objects.nil? raise Puppet::Error, "Configuration could not be processed" end # and perform any necessary final actions before we evaluate. @objects.finalize return @objects end # Just so we can specify that we are "the" instance. def initialize(*args) super @configtime = Time.now self.class.instance = self @running = false end # Make sure only one client runs at a time, and make sure only one thread # runs at a time. However, this does not lock local clients -- you could have # as many separate puppet scripts running as you want. def lock if @local yield else #@@sync.synchronize(Sync::EX) do disable(true) begin yield ensure enable(true) end #end end end def locked? return(FileTest.exists? Puppet[:puppetdlockfile]) end def lockpid if FileTest.exists? Puppet[:puppetdlockfile] text = File.read(Puppet[:puppetdlockfile]).chomp if text =~ /\d+/ return text.to_i else return 0 end else return 0 end end # Mark that we should restart. The Puppet module checks whether we're running, # so this only gets called if we're in the middle of a run. def restart # If we're currently running, then just mark for later Puppet.notice "Received signal to restart; waiting until run is complete" @restart = true end # Should we restart? def restart? if defined? @restart @restart else false end end # Retrieve the cached config def retrievecache if FileTest.exists?(self.cachefile) return File.read(self.cachefile) else return "" end end # The code that actually runs the configuration. def run(tags = nil, ignoreschedules = false) # Check if the lock is stale, so we can clear it if locked? pid = lockpid if pid != 0 begin Process.kill(0, pid) rescue Errno::ESRCH # No process with the given PID exists; stale lockfile File.unlink(Puppet[:puppetdlockfile]) Puppet.notice("Stale lockfile %s left by process %i; removing" % [Puppet[:puppetdlockfile], pid]) lockpid = false else Puppet.notice "Locked by process %s" % pid end end end if locked? Puppet.notice "Lock file %s exists; skipping configuration run" % Puppet[:puppetdlockfile] else lock do @running = true @configtime = thinmark do self.getconfig end if defined? @objects and @objects unless @local Puppet.notice "Starting configuration run" end benchmark(:notice, "Finished configuration run") do self.apply(tags, ignoreschedules) end end @running = false end # Did we get HUPped during the run? If so, then restart now that we're # done with the run. if self.restart? Process.kill(:HUP, $$) end end end def running? @running end # Store the classes in the classfile, but only if we're not local. def setclasses(ary) if @local return end unless ary and ary.length > 0 Puppet.info "No classes to store" return end begin File.open(Puppet[:classfile], "w") { |f| f.puts ary.join("\n") } rescue => detail Puppet.err "Could not create class file %s: %s" % [Puppet[:classfile], detail] end end private + # Download files from the remote server, returning a list of all + # changed files. def self.download(args) objects = Puppet::Type.type(:component).create( :name => "#{args[:name]}_collector" ) hash = { :path => args[:dest], :recurse => true, :source => args[:source], :tag => "#{args[:name]}s", :owner => Process.uid, :group => Process.gid } if args[:ignore] hash[:ignore] = args[:ignore].split(/\s+/) end objects.push Puppet::Type.type(:file).create(hash) Puppet.info "Retrieving #{args[:name]}s" begin trans = objects.evaluate trans.ignoretags = true trans.evaluate rescue Puppet::Error => detail if Puppet[:debug] puts detail.backtrace end Puppet.err "Could not retrieve #{args[:name]}s: %s" % detail end # Now source all of the changed objects, but only source those # that are top-level. + files = [] trans.changed?.find_all do |object| - yield object + yield object if block_given? + files << object[:path] end + trans.cleanup # Now clean up after ourselves objects.remove + files end # Retrieve facts from the central server. def self.getfacts # Clear all existing definitions. Facter.clear # Download the new facts path = Puppet[:factpath].split(":") files = [] download(:dest => Puppet[:factdest], :source => Puppet[:factsource], :ignore => Puppet[:factsignore], :name => "fact") do |object| next unless path.include?(File.dirname(object[:path])) files << object[:path] end ensure # Reload everything. if Facter.respond_to? :loadfacts Facter.loadfacts elsif Facter.respond_to? :load Facter.load else raise Puppet::Error, "You must upgrade your version of Facter to use centralized facts" end # This loads all existing facts and any new ones. We have to remove and # reload because there's no way to unload specific facts. loadfacts() end # Retrieve the plugins from the central server. We only have to load the # changed plugins, because Puppet::Type loads plugins on demand. def self.getplugins path = Puppet[:pluginpath].split(":") download(:dest => Puppet[:plugindest], :source => Puppet[:pluginsource], :ignore => Puppet[:pluginsignore], :name => "plugin") do |object| next unless path.include?(File.dirname(object[:path])) begin Puppet.info "Reloading plugin %s" % File.basename(File.basename(object[:path])).sub(".rb",'') load object[:path] rescue => detail Puppet.warning "Could not reload plugin %s: %s" % [object[:path], detail] end end end def self.loaddir(dir, type) return unless FileTest.directory?(dir) Dir.entries(dir).find_all { |e| e =~ /\.rb$/ }.each do |file| fqfile = File.join(dir, file) begin Puppet.info "Loading #{type} %s" % File.basename(file.sub(".rb",'')) load fqfile rescue => detail Puppet.warning "Could not load #{type} %s: %s" % [fqfile, detail] end end end def self.loadfacts Puppet[:factpath].split(":").each do |dir| loaddir(dir, "fact") end end + + # Send off the transaction report. + def report(transaction) + begin + report = transaction.report() + if Puppet[:rrdgraph] == true + report.graph() + end + reportclient().report(report) + rescue => detail + Puppet.err "Reporting failed: %s" % detail + end + end def reportclient unless defined? @reportclient @reportclient = Puppet::Client::Reporter.new( :Server => Puppet[:reportserver] ) end @reportclient end loadfacts() end # $Id$ diff --git a/lib/puppet/pgraph.rb b/lib/puppet/pgraph.rb index bf156ed2d..d988ff36a 100644 --- a/lib/puppet/pgraph.rb +++ b/lib/puppet/pgraph.rb @@ -1,115 +1,122 @@ #!/usr/bin/env ruby # # Created by Luke A. Kanies on 2006-11-24. # Copyright (c) 2006. All rights reserved. require 'puppet/gratr/digraph' require 'puppet/gratr/import' require 'puppet/gratr/dot' require 'puppet/relationship' # This class subclasses a graph class in order to handle relationships # among resources. class Puppet::PGraph < GRATR::Digraph # This is the type used for splicing. attr_accessor :container_type + def clear + @vertex_dict.clear + if defined? @edge_number + @edge_number.clear + end + end + # The dependencies for a given resource. def dependencies(resource) tree_from_vertex(resource, :dfs).keys end # Override this method to use our class instead. def edge_class() Puppet::Relationship end # Determine all of the leaf nodes below a given vertex. def leaves(vertex, type = :dfs) tree = tree_from_vertex(vertex, type) leaves = tree.keys.find_all { |c| adjacent(c, :direction => :out).empty? } return leaves end # Collect all of the edges that the passed events match. Returns # an array of edges. def matching_edges(events) events.collect do |event| source = event.source unless vertex?(source) Puppet.warning "Got an event from invalid vertex %s" % source.ref next end # Get all of the edges that this vertex should forward events # to, which is the same thing as saying all edges directly below # This vertex in the graph. adjacent(source, :direction => :out, :type => :edges).find_all do |edge| edge.match?(event.event) end.each { |edge| target = edge.target if target.respond_to?(:ref) source.info "Scheduling %s of %s" % [edge.callback, target.ref] end } end.flatten end # Take container information from another graph and use it # to replace any container vertices with their respective leaves. # This creates direct relationships where there were previously # indirect relationships through the containers. def splice!(other, type) vertices.each do |vertex| # Go through each vertex and replace the edges with edges # to the leaves instead next unless vertex.is_a?(type) leaves = other.leaves(vertex) if leaves.empty? remove_vertex!(vertex) next end # First create new edges for each of the :in edges adjacent(vertex, :direction => :in, :type => :edges).each do |edge| leaves.each do |leaf| add_edge!(edge.source, leaf, edge.label) if cyclic? raise ArgumentError, "%s => %s results in a loop" % [up, leaf] end end end # Then for each of the out edges adjacent(vertex, :direction => :out, :type => :edges).each do |edge| leaves.each do |leaf| add_edge!(leaf, edge.target, edge.label) if cyclic? raise ArgumentError, "%s => %s results in a loop" % [leaf, down] end end end # And finally, remove the vertex entirely. remove_vertex!(vertex) end end # For some reason, unconnected vertices do not show up in # this graph. def to_jpg(name) gv = vertices() Dir.chdir("/Users/luke/Desktop/pics") do induced_subgraph(gv).write_to_graphic_file('jpg', name) end end end # $Id$ diff --git a/lib/puppet/server/pelement.rb b/lib/puppet/server/pelement.rb index 3001cd9a1..0c9537957 100755 --- a/lib/puppet/server/pelement.rb +++ b/lib/puppet/server/pelement.rb @@ -1,188 +1,191 @@ require 'puppet' require 'puppet/server' module Puppet # Serve Puppet elements. Useful for querying, copying, and, um, other stuff. class Server::PElement < Server::Handler attr_accessor :local @interface = XMLRPC::Service::Interface.new("pelementserver") { |iface| iface.add_method("string apply(string, string)") iface.add_method("string describe(string, string, array, array)") iface.add_method("string list(string, array, string)") } # Apply a TransBucket as a transaction. def apply(bucket, format = "yaml", client = nil, clientip = nil) unless @local begin case format when "yaml": bucket = YAML::load(Base64.decode64(bucket)) else raise Puppet::Error, "Unsupported format '%s'" % format end rescue => detail raise Puppet::Error, "Could not load YAML TransBucket: %s" % detail end end component = bucket.to_type # Create a client, but specify the remote machine as the server # because the class requires it, even though it's unused client = Puppet::Client::MasterClient.new(:Server => client||"localhost") # Set the objects client.objects = component # And then apply the configuration. This way we're reusing all # the code in there. It should probably just be separated out, though. transaction = client.apply + + # And then clean up + component.remove # It'd be nice to return some kind of report, but... at this point # we have no such facility. return "success" end # Describe a given object. This returns the 'is' values for every state # available on the object type. def describe(type, name, retrieve = nil, ignore = [], format = "yaml", client = nil, clientip = nil) Puppet.info "Describing %s[%s]" % [type, name] @local = true unless client typeklass = nil unless typeklass = Puppet.type(type) raise Puppet::Error, "Puppet type %s is unsupported" % type end obj = nil retrieve ||= :all ignore ||= [] if obj = typeklass[name] obj[:check] = retrieve else begin obj = typeklass.create(:name => name, :check => retrieve) rescue Puppet::Error => detail raise Puppet::Error, "%s[%s] could not be created: %s" % [type, name, detail] end end unless obj raise XMLRPC::FaultException.new( 1, "Could not create %s[%s]" % [type, name] ) end trans = obj.to_trans # Now get rid of any attributes they specifically don't want ignore.each do |st| if trans.include? st trans.delete(st) end end # And get rid of any attributes that are nil trans.each do |attr, value| if value.nil? trans.delete(attr) end end unless @local case format when "yaml": trans = Base64.encode64(YAML::dump(trans)) else raise XMLRPC::FaultException.new( 1, "Unavailable config format %s" % format ) end end return trans end # Create a new fileserving module. def initialize(hash = {}) if hash[:Local] @local = hash[:Local] else @local = false end end # List all of the elements of a given type. def list(type, ignore = [], base = nil, format = "yaml", client = nil, clientip = nil) @local = true unless client typeklass = nil unless typeklass = Puppet.type(type) raise Puppet::Error, "Puppet type %s is unsupported" % type end # They can pass in false ignore ||= [] ignore = [ignore] unless ignore.is_a? Array bucket = TransBucket.new bucket.type = typeklass.name typeklass.list.each do |obj| next if ignore.include? obj.name object = TransObject.new(obj.name, typeklass.name) bucket << object end unless @local case format when "yaml": begin bucket = Base64.encode64(YAML::dump(bucket)) rescue => detail Puppet.err detail raise XMLRPC::FaultException.new( 1, detail.to_s ) end else raise XMLRPC::FaultException.new( 1, "Unavailable config format %s" % format ) end end return bucket end private def authcheck(file, mount, client, clientip) unless mount.allowed?(client, clientip) mount.warning "%s cannot access %s" % [client, file] raise Puppet::Server::AuthorizationError, "Cannot access %s" % mount end end # Deal with ignore parameters. def handleignore(children, path, ignore) ignore.each { |ignore| Dir.glob(File.join(path,ignore), File::FNM_DOTMATCH) { |match| children.delete(File.basename(match)) } } return children end def to_s "pelementserver" end end end # $Id$ diff --git a/lib/puppet/transaction.rb b/lib/puppet/transaction.rb index d6d1669a1..99b03b435 100644 --- a/lib/puppet/transaction.rb +++ b/lib/puppet/transaction.rb @@ -1,591 +1,592 @@ # the class that actually walks our resource/state tree, collects the changes, # and performs them require 'puppet' require 'puppet/statechange' module Puppet class Transaction attr_accessor :component, :resources, :ignoreschedules, :ignoretags attr_accessor :relgraph, :sorted_resources attr_writer :tags include Puppet::Util Puppet.config.setdefaults(:transaction, :tags => ["", "Tags to use to find resources. If this is set, then only resources tagged with the specified tags will be applied. Values must be comma-separated."] ) # Add some additional times for reporting def addtimes(hash) hash.each do |name, num| @timemetrics[name] = num end end # Apply all changes for a resource, returning a list of the events # generated. def apply(resource) begin changes = resource.evaluate rescue => detail if Puppet[:trace] puts detail.backtrace end resource.err "Failed to retrieve current state: %s" % detail # Mark that it failed @failures[resource] += 1 # And then return return [] end unless changes.is_a? Array changes = [changes] end if changes.length > 0 @resourcemetrics[:out_of_sync] += 1 end resourceevents = changes.collect { |change| @changes << change @count += 1 change.transaction = self events = nil begin # use an array, so that changes can return more than one # event if they want events = [change.forward].flatten.reject { |e| e.nil? } rescue => detail if Puppet[:trace] puts detail.backtrace end change.state.err "change from %s to %s failed: %s" % [change.state.is_to_s, change.state.should_to_s, detail] @failures[resource] += 1 next # FIXME this should support using onerror to determine # behaviour; or more likely, the client calling us # should do so end # Mark that our change happened, so it can be reversed # if we ever get to that point unless events.nil? or (events.is_a?(Array) and events.empty?) change.changed = true @resourcemetrics[:applied] += 1 end events }.flatten.reject { |e| e.nil? } unless changes.empty? # Record when we last synced resource.cache(:synced, Time.now) # Flush, if appropriate if resource.respond_to?(:flush) resource.flush end end resourceevents end # Find all of the changed resources. def changed? @changes.find_all { |change| change.changed }.collect { |change| change.state.parent }.uniq end - # Do any necessary cleanup. Basically just removes any generated - # resources. + # Do any necessary cleanup. If we don't get rid of the graphs, the + # contained resources might never get cleaned up. def cleanup @generated.each do |resource| resource.remove end + if defined? @relgraph + @relgraph.clear + end + @resources.clear end # See if the resource generates new resources at evaluation time. def eval_generate(resource) if resource.respond_to?(:eval_generate) if children = resource.eval_generate depthfirst = resource.depthfirst? dependents = @relgraph.adjacent(resource, :direction => :out, :type => :edges) targets = @relgraph.adjacent(resource, :direction => :in, :type => :edges) children.each do |gen_child| if depthfirst @relgraph.add_edge!(gen_child, resource) else @relgraph.add_edge!(resource, gen_child) end dependents.each do |edge| @relgraph.add_edge!(gen_child, edge.target, edge.label) end targets.each do |edge| @relgraph.add_edge!(edge.source, gen_child, edge.label) end @generated << gen_child end return children end end end # Evaluate a single resource. def eval_resource(resource) events = [] if resource.is_a?(Puppet::Type::Component) raise Puppet::DevError, "Got a component to evaluate" end if skip?(resource) @resourcemetrics[:skipped] += 1 else @resourcemetrics[:scheduled] += 1 # We need to generate first regardless, because the recursive # actions sometimes change how the top resource is applied. children = eval_generate(resource) if resource.depthfirst? and children children.each do |child| events += eval_resource(child) end end # Perform the actual changes seconds = thinmark do events += apply(resource) end if ! resource.depthfirst? and children children.each do |child| events += eval_resource(child) end end # Keep track of how long we spend in each type of resource @timemetrics[resource.class.name] += seconds end # Check to see if there are any events for this resource if triggedevents = trigger(resource) events += triggedevents end # Collect the targets of any subscriptions to those events @relgraph.matching_edges(events).each do |edge| @targets[edge.target] << edge end # And return the events for collection events end # This method does all the actual work of running a transaction. It # collects all of the changes, executes them, and responds to any # necessary events. def evaluate @count = 0 # Start logging. Puppet::Log.newdestination(@report) prepare() begin allevents = @sorted_resources.collect { |resource| eval_resource(resource) }.flatten.reject { |e| e.nil? } ensure # And then close the transaction log. Puppet::Log.close(@report) end - - cleanup() Puppet.debug "Finishing transaction %s with %s changes" % [self.object_id, @count] allevents end # Determine whether a given resource has failed. def failed?(obj) if @failures[obj] > 0 return @failures[obj] else return false end end # Does this resource have any failed dependencies? def failed_dependencies?(resource) # First make sure there are no failed dependencies. To do this, # we check for failures in any of the vertexes above us. It's not # enough to check the immediate dependencies, which is why we use # a tree from the reversed graph. skip = false - resource.info "checking for failed deps" @relgraph.reversal.tree_from_vertex(resource, :dfs).keys.each do |dep| if fails = failed?(dep) resource.notice "Dependency %s[%s] has %s failures" % [dep.class.name, dep.name, @failures[dep]] skip = true end end return skip end # Collect any dynamically generated resources. def generate list = @resources.vertices # Store a list of all generated resources, so that we can clean them up # after the transaction closes. @generated = [] newlist = [] while ! list.empty? list.each do |resource| if resource.respond_to?(:generate) made = resource.generate next unless made unless made.is_a?(Array) made = [made] end made.uniq! made.each do |res| @resources.add_vertex!(res) newlist << res @generated << res end end end list.clear list = newlist newlist = [] end end # this should only be called by a Puppet::Type::Component resource now # and it should only receive an array def initialize(resources) @resources = resources.to_graph @resourcemetrics = { :total => @resources.vertices.length, :out_of_sync => 0, # The number of resources that had changes :applied => 0, # The number of resources fixed :skipped => 0, # The number of resources skipped :restarted => 0, # The number of resources triggered :failed_restarts => 0, # The number of resources that fail a trigger :scheduled => 0 # The number of resources scheduled } # Metrics for distributing times across the different types. @timemetrics = Hash.new(0) # The number of resources that were triggered in this run @triggered = Hash.new { |hash, key| hash[key] = Hash.new(0) } # Targets of being triggered. @targets = Hash.new do |hash, key| hash[key] = [] end # The changes we're performing @changes = [] # The resources that have failed and the number of failures each. This # is used for skipping resources because of failed dependencies. @failures = Hash.new do |h, key| h[key] = 0 end @report = Report.new end # Prefetch any providers that support it. We don't support prefetching # types, just providers. def prefetch @resources.collect { |obj| if pro = obj.provider pro.class else nil end }.reject { |o| o.nil? }.uniq.each do |klass| # XXX We need to do something special here in case of failure. if klass.respond_to?(:prefetch) klass.prefetch end end end # Prepare to evaluate the elements in a transaction. def prepare prefetch() # Now add any dynamically generated resources generate() # Create a relationship graph from our resource graph @relgraph = relationship_graph @sorted_resources = @relgraph.topsort end # Create a graph of all of the relationships in our resource graph. def relationship_graph graph = Puppet::PGraph.new # First create the dependency graph @resources.vertices.each do |vertex| graph.add_vertex!(vertex) vertex.builddepends.each do |edge| graph.add_edge!(edge) end end # Then splice in the container information graph.splice!(@resources, Puppet::Type::Component) # Lastly, add in any autorequires graph.vertices.each do |vertex| vertex.autorequire.each do |edge| unless graph.edge?(edge) graph.add_edge!(edge) end end end return graph end # Generate a transaction report. def report @resourcemetrics[:failed] = @failures.find_all do |name, num| num > 0 end.length # Get the total time spent @timemetrics[:total] = @timemetrics.inject(0) do |total, vals| total += vals[1] total end # Unfortunately, RRD does not deal well with changing lists of values, # so we have to pick a list of values and stick with it. In this case, # that means we record the total time, the config time, and that's about # it. We should probably send each type's time as a separate metric. @timemetrics.dup.each do |name, value| if Puppet::Type.type(name) @timemetrics.delete(name) end end # Add all of the metrics related to resource count and status @report.newmetric(:resources, @resourcemetrics) # Record the relative time spent in each resource. @report.newmetric(:time, @timemetrics) # Then all of the change-related metrics @report.newmetric(:changes, :total => @changes.length ) @report.time = Time.now return @report end # Roll all completed changes back. def rollback @targets.clear @triggered.clear allevents = @changes.reverse.collect { |change| # skip changes that were never actually run unless change.changed Puppet.debug "%s was not changed" % change.to_s next end begin events = change.backward rescue => detail Puppet.err("%s rollback failed: %s" % [change,detail]) if Puppet[:trace] puts detail.backtrace end next # at this point, we would normally do error handling # but i haven't decided what to do for that yet # so just record that a sync failed for a given resource #@@failures[change.state.parent] += 1 # this still could get hairy; what if file contents changed, # but a chmod failed? how would i handle that error? dern end @relgraph.matching_edges(events).each do |edge| @targets[edge.target] << edge end # Now check to see if there are any events for this child. # Kind of hackish, since going backwards goes a change at a # time, not a child at a time. trigger(change.state.parent) # And return the events for collection events }.flatten.reject { |e| e.nil? } end # Is the resource currently scheduled? def scheduled?(resource) self.ignoreschedules or resource.scheduled? end # Should this resource be skipped? def skip?(resource) skip = false if ! tagged?(resource) resource.debug "Not tagged with %s" % tags.join(", ") elsif ! scheduled?(resource) resource.debug "Not scheduled" elsif failed_dependencies?(resource) resource.warning "Skipping because of failed dependencies" else return false end return true end # The tags we should be checking. def tags # Allow the tags to be overridden unless defined? @tags @tags = Puppet[:tags] end unless defined? @processed_tags if @tags.nil? or @tags == "" @tags = [] else @tags = [@tags] unless @tags.is_a? Array @tags = @tags.collect do |tag| tag.split(/\s*,\s*/) end.flatten end @processed_tags = true end @tags end # Is this resource tagged appropriately? def tagged?(resource) self.ignoretags or tags.empty? or resource.tagged?(tags) end # Are there any edges that target this resource? def targeted?(resource) @targets[resource] end # Trigger any subscriptions to a child. This does an upwardly recursive # search -- it triggers the passed resource, but also the resource's parent # and so on up the tree. def trigger(child) obj = child callbacks = Hash.new { |hash, key| hash[key] = [] } sources = Hash.new { |hash, key| hash[key] = [] } trigged = [] while obj if @targets.include?(obj) callbacks.clear sources.clear @targets[obj].each do |edge| # Some edges don't have callbacks next unless edge.callback # Collect all of the subs for each callback callbacks[edge.callback] << edge # And collect the sources for logging sources[edge.source] << edge.callback end sources.each do |source, callbacklist| obj.debug "%s[%s] results in triggering %s" % [source.class.name, source.name, callbacklist.join(", ")] end callbacks.each do |callback, subs| message = "Triggering '%s' from %s dependencies" % [callback, subs.length] obj.notice message # At this point, just log failures, don't try to react # to them in any way. begin obj.send(callback) @resourcemetrics[:restarted] += 1 rescue => detail obj.err "Failed to call %s on %s: %s" % [callback, obj, detail] @resourcemetrics[:failed_restarts] += 1 if Puppet[:trace] puts detail.backtrace end end # And then add an event for it. trigged << Puppet::Event.new( :event => :triggered, :transaction => self, :source => obj, :message => message ) triggered(obj, callback) end end obj = obj.parent end if trigged.empty? return nil else return trigged end end def triggered(resource, method) @triggered[resource][method] += 1 end def triggered?(resource, method) @triggered[resource][method] end end end require 'puppet/transaction/report' # $Id$ diff --git a/lib/puppet/type/pfile.rb b/lib/puppet/type/pfile.rb index 24f961a62..0d69ffdda 100644 --- a/lib/puppet/type/pfile.rb +++ b/lib/puppet/type/pfile.rb @@ -1,1026 +1,1029 @@ require 'digest/md5' require 'cgi' require 'etc' require 'uri' require 'fileutils' require 'puppet/type/state' require 'puppet/server/fileserver' module Puppet newtype(:file) do @doc = "Manages local files, including setting ownership and permissions, creation of both files and directories, and retrieving entire files from remote servers. As Puppet matures, it expected that the ``file`` element will be used less and less to manage content, and instead native elements will be used to do so. If you find that you are often copying files in from a central location, rather than using native elements, please contact Reductive Labs and we can hopefully work with you to develop a native element to support what you are doing." newparam(:path) do desc "The path to the file to manage. Must be fully qualified." isnamevar validate do |value| unless value =~ /^#{File::SEPARATOR}/ raise Puppet::Error, "File paths must be fully qualified" end end end newparam(:backup) do desc "Whether files should be backed up before being replaced. If a filebucket is specified, files will be backed up there; else, they will be backed up in the same directory with a ``.puppet-bak`` extension,, and no backups will be made if backup is ``false``. To use filebuckets, you must first create a filebucket in your configuration: filebucket { main: server => puppet } The ``puppetmasterd`` daemon creates a filebucket by default, so you can usually back up to your main server with this configuration. Once you've described the bucket in your configuration, you can use it in any file: file { \"/my/file\": source => \"/path/in/nfs/or/something\", backup => main } This will back the file up to the central server. At this point, the only benefits to doing so are that you do not have backup files lying around on each of your machines, a given version of a file is only backed up once, and you can restore any given file manually, no matter how old. Eventually, transactional support will be able to automatically restore filebucketed files. " attr_reader :bucket defaultto ".puppet-bak" munge do |value| case value when false, "false", :false: false when true, "true", ".puppet-bak", :true: ".puppet-bak" when String: # We can't depend on looking this up right now, # we have to do it after all of the objects # have been instantiated. @bucket = value value else self.fail "Invalid backup type %s" % value.inspect end end # Provide a straight-through hook for setting the bucket. def bucket=(bucket) @value = bucket @bucket = bucket end end newparam(:linkmaker) do desc "An internal parameter used by the *symlink* type to do recursive link creation." end newparam(:recurse) do desc "Whether and how deeply to do recursive management." newvalues(:true, :false, :inf, /^[0-9]+$/) munge do |value| newval = super(value) case newval when :true, :inf: true when :false: false else newval end end end newparam(:replace, :boolean => true) do desc "Whether or not to replace a file that is sourced but exists. This is useful for using file sources purely for initialization." newvalues(:true, :false) defaultto :true end newparam(:force, :boolean => true) do desc "Force the file operation. Currently only used when replacing directories with links." newvalues(:true, :false) defaultto false end newparam(:ignore) do desc "A parameter which omits action on files matching specified patterns during recursion. Uses Ruby's builtin globbing engine, so shell metacharacters are fully supported, e.g. ``[a-z]*``. Matches that would descend into the directory structure are ignored, e.g., ``*/*``." defaultto false validate do |value| unless value.is_a?(Array) or value.is_a?(String) or value == false self.devfail "Ignore must be a string or an Array" end end end newparam(:links) do desc "How to handle links during file actions. During file copying, ``follow`` will copy the target file instead of the link, ``manage`` will copy the link itself, and ``ignore`` will just pass it by. When not copying, ``manage`` and ``ignore`` behave equivalently (because you cannot really ignore links entirely during local recursion), and ``follow`` will manage the file to which the link points." newvalues(:follow, :manage, :ignore) # :ignore and :manage behave equivalently on local files, # but don't copy remote links defaultto :ignore end newparam(:purge, :boolean => true) do desc "Whether unmanaged files should be purged. If you have a filebucket configured the purged files will be uploaded, but if you do not, this will destroy data. Only use this option for generated files unless you really know what you are doing. This option only makes sense when recursively managing directories." defaultto :false newvalues(:true, :false) end # Autorequire any parent directories. autorequire(:file) do + unless self[:path] + raise "no path for %s" % self.ref + end File.dirname(self[:path]) end # Autorequire the owner and group of the file. {:user => :owner, :group => :group}.each do |type, state| autorequire(type) do if @states.include?(state) # The user/group states automatically converts to IDs next unless should = @states[state].shouldorig val = should[0] if val.is_a?(Integer) or val =~ /^\d+$/ nil else val end end end end validate do if self[:content] and self[:source] self.fail "You cannot specify both content and a source" end end # List files, but only one level deep. def self.list(base = "/") unless FileTest.directory?(base) return [] end files = [] Dir.entries(base).reject { |e| e == "." or e == ".." }.each do |name| path = File.join(base, name) if obj = self[path] obj[:check] = :all files << obj else files << self.create( :name => path, :check => :all ) end end files end @depthfirst = false def argument?(arg) @arghash.include?(arg) end # Determine the user to write files as. def asuser if self.should(:owner) and ! self.should(:owner).is_a?(Symbol) writeable = Puppet::SUIDManager.asuser(self.should(:owner)) { FileTest.writable?(File.dirname(self[:path])) } # If the parent directory is writeable, then we execute # as the user in question. Otherwise we'll rely on # the 'owner' state to do things. if writeable asuser = self.should(:owner) end end return asuser end # We have to do some extra finishing, to retrieve our bucket if # there is one def finish # Let's cache these values, since there should really only be # a couple of these buckets @@filebuckets ||= {} # Look up our bucket, if there is one if @parameters.include?(:backup) and bucket = @parameters[:backup].bucket case bucket when String: if obj = @@filebuckets[bucket] # This sets the @value on :backup, too @parameters[:backup].bucket = obj elsif obj = Puppet.type(:filebucket).bucket(bucket) @@filebuckets[bucket] = obj @parameters[:backup].bucket = obj else self.fail "Could not find filebucket %s" % bucket end when Puppet::Client::Dipper: # things are hunky-dorey else self.fail "Invalid bucket type %s" % bucket.class end end super end # Create any children via recursion or whatever. def eval_generate recurse() end # Deal with backups. def handlebackup(file = nil) # let the path be specified file ||= self[:path] # if they specifically don't want a backup, then just say # we're good unless FileTest.exists?(file) return true end unless self[:backup] return true end case File.stat(file).ftype when "directory": if self[:recurse] # we don't need to backup directories when recurse is on return true else backup = self[:backup] case backup when Puppet::Client::Dipper: notice "Recursively backing up to filebucket" require 'find' Find.find(self[:path]) do |f| if File.file?(f) sum = backup.backup(f) self.info "Filebucketed %s to %s with sum %s" % [f, backup.name, sum] end end return true when String: newfile = file + backup # Just move it, since it's a directory. if FileTest.exists?(newfile) remove_backup(newfile) end begin bfile = file + backup # Ruby 1.8.1 requires the 'preserve' addition, but # later versions do not appear to require it. FileUtils.cp_r(file, bfile, :preserve => true) return true rescue => detail # since they said they want a backup, let's error out # if we couldn't make one self.fail "Could not back %s up: %s" % [file, detail.message] end else self.err "Invalid backup type %s" % backup.inspect return false end end when "file": backup = self[:backup] case backup when Puppet::Client::Dipper: sum = backup.backup(file) self.info "Filebucketed to %s with sum %s" % [backup.name, sum] return true when String: newfile = file + backup if FileTest.exists?(newfile) remove_backup(newfile) end begin # FIXME Shouldn't this just use a Puppet object with # 'source' specified? bfile = file + backup # Ruby 1.8.1 requires the 'preserve' addition, but # later versions do not appear to require it. FileUtils.cp(file, bfile, :preserve => true) return true rescue => detail # since they said they want a backup, let's error out # if we couldn't make one self.fail "Could not back %s up: %s" % [file, detail.message] end else self.err "Invalid backup type %s" % backup.inspect return false end when "link": return true else self.notice "Cannot backup files of type %s" % File.stat(file).ftype return false end end def handleignore(children) return children unless self[:ignore] self[:ignore].each { |ignore| ignored = [] Dir.glob(File.join(self[:path],ignore), File::FNM_DOTMATCH) { |match| ignored.push(File.basename(match)) } children = children - ignored } return children end def initialize(hash) # Store a copy of the arguments for later. tmphash = hash.to_hash # Used for caching clients @clients = {} super # Get rid of any duplicate slashes, and remove any trailing slashes. @title = @title.gsub(/\/+/, "/").sub(/\/$/, "") # Clean out as many references to any file paths as possible. # This was the source of many, many bugs. @arghash = tmphash @arghash.delete(self.class.namevar) [:source, :parent].each do |param| if @arghash.include?(param) @arghash.delete(param) end end @stat = nil end # Build a recursive map of a link source def linkrecurse(recurse) target = @states[:target].should method = :lstat if self[:links] == :follow method = :stat end targetstat = nil unless FileTest.exist?(target) return end # Now stat our target targetstat = File.send(method, target) unless targetstat.ftype == "directory" return end # Now that we know our corresponding target is a directory, # change our type info "setting ensure to target" self[:ensure] = :directory unless FileTest.readable? target self.notice "Cannot manage %s: permission denied" % self.name return end children = Dir.entries(target).reject { |d| d =~ /^\.+$/ } # Get rid of ignored children if @parameters.include?(:ignore) children = handleignore(children) end added = [] children.each do |file| Dir.chdir(target) do longname = File.join(target, file) # Files know to create directories when recursion # is enabled and we're making links args = { :recurse => recurse, :ensure => longname } if child = self.newchild(file, true, args) added << child end end end added end # Build up a recursive map of what's around right now def localrecurse(recurse) unless FileTest.exist?(self[:path]) and self.stat.directory? #self.info "%s is not a directory; not recursing" % # self[:path] return end unless FileTest.readable? self[:path] self.notice "Cannot manage %s: permission denied" % self.name return end children = Dir.entries(self[:path]) #Get rid of ignored children if @parameters.include?(:ignore) children = handleignore(children) end added = [] children.each { |file| file = File.basename(file) next if file =~ /^\.\.?$/ # skip . and .. options = {:recurse => recurse} if child = self.newchild(file, true, options) # Mark any unmanaged files for removal if purge is set. # Use the array rather than [] because tidy uses this method, too. if @parameters.include?(:purge) and self.purge? info "purging %s" % child.ref child[:ensure] = :absent else child[:require] = self end added << child end } added end # Create a new file or directory object as a child to the current # object. def newchild(path, local, hash = {}) # make local copy of arguments args = @arghash.dup if path =~ %r{^#{File::SEPARATOR}} self.devfail( "Must pass relative paths to PFile#newchild()" ) else path = File.join(self[:path], path) end args[:path] = path unless hash.include?(:recurse) if args.include?(:recurse) if args[:recurse].is_a?(Integer) args[:recurse] -= 1 # reduce the level of recursion end end end hash.each { |key,value| args[key] = value } child = nil klass = nil # We specifically look in @parameters here, because 'linkmaker' isn't # a valid attribute for subclasses, so using 'self[:linkmaker]' throws # an error. if @parameters.include?(:linkmaker) and args.include?(:source) and ! FileTest.directory?(args[:source]) klass = Puppet.type(:symlink) # clean up the args a lot for links old = args.dup args = { :ensure => old[:source], :path => path } else klass = self.class end # The child might already exist because 'localrecurse' runs # before 'sourcerecurse'. I could push the override stuff into # a separate method or something, but the work is the same other # than this last bit, so it doesn't really make sense. if child = klass[path] unless child.parent.object_id == self.object_id self.debug "Not managing more explicit file %s" % path return nil end # This is only necessary for sourcerecurse, because we might have # created the object with different 'should' values than are # set remotely. unless local args.each { |var,value| next if var == :path next if var == :name # behave idempotently unless child.should(var) == value child[var] = value end } end return nil else # create it anew #notice "Creating new file with args %s" % args.inspect args[:parent] = self begin child = klass.implicitcreate(args) # implicit creation can return nil if child.nil? return nil end rescue Puppet::Error => detail self.notice( "Cannot manage: %s" % [detail.message] ) self.debug args.inspect child = nil rescue => detail self.notice( "Cannot manage: %s" % [detail] ) self.debug args.inspect child = nil end end return child end # Files handle paths specially, because they just lengthen their # path names, rather than including the full parent's title each # time. def pathbuilder if defined? @parent # We only need to behave specially when our parent is also # a file if @parent.is_a?(self.class) # Remove the parent file name ppath = @parent.path.sub(/\/?file=.+/, '') tmp = [] if ppath != "/" and ppath != "" tmp << ppath end tmp << self.class.name.to_s + "=" + self.name return tmp else return super end else # The top-level name is always puppet[top], so we don't # bother with that. And we don't add the hostname # here, it gets added in the log server thingy. if self.name == "puppet[top]" return ["/"] else # We assume that if we don't have a parent that we # should not cache the path return [self.class.name.to_s + "=" + self.name] end end end # Should we be purging? def purge? @parameters.include?(:purge) and (self[:purge] == :true or self[:purge] == "true") end # Recurse into the directory. This basically just calls 'localrecurse' # and maybe 'sourcerecurse', returning the collection of generated # files. def recurse # are we at the end of the recursion? unless self.recurse? return end recurse = self[:recurse] # we might have a string, rather than a number if recurse.is_a?(String) if recurse =~ /^[0-9]+$/ recurse = Integer(recurse) else # anything else is infinite recursion recurse = true end end if recurse.is_a?(Integer) recurse -= 1 end children = [] # We want to do link-recursing before normal recursion so that all # of the target stuff gets copied over correctly. if @states.include? :target and ret = self.linkrecurse(recurse) children += ret end if ret = self.localrecurse(recurse) children += ret end if @states.include?(:source) and ret = self.sourcerecurse(recurse) children += ret end children end # A simple method for determining whether we should be recursing. def recurse? return false unless @parameters.include?(:recurse) val = @parameters[:recurse].value if val and (val == true or val > 0) return true else return false end end # Remove the old backup. def remove_backup(newfile) if self.class.name == :file and self[:links] != :follow method = :lstat else method = :stat end old = File.send(method, newfile).ftype if old == "directory" raise Puppet::Error, "Will not remove directory backup %s; use a filebucket" % newfile end info "Removing old backup of type %s" % File.send(method, newfile).ftype begin File.unlink(newfile) rescue => detail if Puppet[:trace] puts detail.backtrace end self.err "Could not remove old backup: %s" % detail return false end end # Remove any existing data. This is only used when dealing with # links or directories. def remove_existing(should) return unless s = stat(true) unless handlebackup self.fail "Could not back up; will not replace" end unless should.to_s == "link" return if s.ftype.to_s == should.to_s end case s.ftype when "directory": if self[:force] == :true debug "Removing existing directory for replacement with %s" % should FileUtils.rmtree(self[:path]) else notice "Not replacing directory; use 'force' to override" end when "link", "file": debug "Removing existing %s for replacement with %s" % [s.ftype, should] File.unlink(self[:path]) else self.fail "Could not back up files of type %s" % s.ftype end end # a wrapper method to make sure the file exists before doing anything def retrieve unless stat = self.stat(true) self.debug "File does not exist" @states.each { |name,state| state.is = :absent } # If the file doesn't exist but we have a source, then call # retrieve on that state if @states.include?(:source) @states[:source].retrieve end return end states().each { |state| state.retrieve } end # This recurses against the remote source and makes sure the local # and remote structures match. It's run after 'localrecurse'. This # method only does anything when its corresponding remote entry is # a directory; in that case, this method creates file objects that # correspond to any contained remote files. def sourcerecurse(recurse) # we'll set this manually as necessary if @arghash.include?(:ensure) @arghash.delete(:ensure) end r = false if recurse unless recurse == 0 r = 1 end end ignore = self[:ignore] @states[:source].should.each do |source| sourceobj, path = uri2obj(source) # okay, we've got our source object; now we need to # build up a local file structure to match the remote # one server = sourceobj.server desc = server.list(path, self[:links], r, ignore) if desc == "" next end # Now create a new child for every file returned in the list. return desc.split("\n").collect { |line| file, type = line.split("\t") next if file == "/" # skip the listing object name = file.sub(/^\//, '') args = {:source => source + file} if type == file args[:recurse] = nil end self.newchild(name, false, args) }.reject {|c| c.nil? }.each do |f| f.info "sourced" end end return [] end # Set the checksum, from another state. There are multiple states that # modify the contents of a file, and they need the ability to make sure # that the checksum value is in sync. def setchecksum(sum = nil) if @states.include? :checksum if sum @states[:checksum].checksum = sum else # If they didn't pass in a sum, then tell checksum to # figure it out. @states[:checksum].retrieve @states[:checksum].checksum = @states[:checksum].is end end end # Stat our file. Depending on the value of the 'links' attribute, we use # either 'stat' or 'lstat', and we expect the states to use the resulting # stat object accordingly (mostly by testing the 'ftype' value). def stat(refresh = false) method = :stat # Files are the only types that support links if self.class.name == :file and self[:links] != :follow method = :lstat end path = self[:path] # Just skip them when they don't exist at all. unless FileTest.exists?(path) or FileTest.symlink?(path) @stat = nil return @stat end if @stat.nil? or refresh == true begin @stat = File.send(method, self[:path]) rescue Errno::ENOENT => error @stat = nil rescue Errno::EACCES => error self.warning "Could not stat; permission denied" @stat = nil end end return @stat end def uri2obj(source) sourceobj = FileSource.new path = nil unless source devfail "Got a nil source" end if source =~ /^\// source = "file://localhost/%s" % URI.escape(source) sourceobj.mount = "localhost" sourceobj.local = true end begin uri = URI.parse(URI.escape(source)) rescue => detail self.fail "Could not understand source %s: %s" % [source, detail.to_s] end case uri.scheme when "file": unless defined? @@localfileserver @@localfileserver = Puppet::Server::FileServer.new( :Local => true, :Mount => { "/" => "localhost" }, :Config => false ) #@@localfileserver.mount("/", "localhost") end sourceobj.server = @@localfileserver path = "/localhost" + uri.path when "puppet": args = { :Server => uri.host } if uri.port args[:Port] = uri.port end # FIXME We should cache a copy of this server #sourceobj.server = Puppet::NetworkClient.new(args) unless @clients.include?(source) @clients[source] = Puppet::Client::FileClient.new(args) end sourceobj.server = @clients[source] tmp = uri.path if tmp =~ %r{^/(\w+)} sourceobj.mount = $1 path = tmp #path = tmp.sub(%r{^/\w+},'') || "/" else self.fail "Invalid source path %s" % tmp end else self.fail "Got other recursive file proto %s from %s" % [uri.scheme, source] end return [sourceobj, path.sub(/\/\//, '/')] end # Write out the file. We open the file correctly, with all of the # uid and mode and such, and then yield the file handle for actual # writing. def write(usetmp = true) mode = self.should(:mode) remove_existing(:file) # The temporary file path = nil if usetmp path = self[:path] + ".puppettmp" else path = self[:path] end # As the correct user and group Puppet::SUIDManager.asuser(asuser(), self.should(:group)) do f = nil # Open our file with the correct modes if mode Puppet::Util.withumask(000) do f = File.open(path, File::CREAT|File::WRONLY|File::TRUNC, mode) end else f = File.open(path, File::CREAT|File::WRONLY|File::TRUNC) end # Yield it yield f f.flush f.close end # And put our new file in place if usetmp begin File.rename(path, self[:path]) rescue => detail self.err "Could not rename tmp %s for replacing: %s" % [self[:path], detail] ensure # Make sure the created file gets removed if FileTest.exists?(path) File.unlink(path) end end end # And then update our checksum, so the next run doesn't find it. # FIXME This is extra work, because it's going to read the whole # file back in again. self.setchecksum end end # Puppet.type(:pfile) # the filesource class can't include the path, because the path # changes for every file instance class FileSource attr_accessor :mount, :root, :server, :local end # We put all of the states in separate files, because there are so many # of them. The order these are loaded is important, because it determines # the order they are in the state list. require 'puppet/type/pfile/checksum' require 'puppet/type/pfile/content' # can create the file require 'puppet/type/pfile/source' # can create the file require 'puppet/type/pfile/target' require 'puppet/type/pfile/ensure' # can create the file require 'puppet/type/pfile/uid' require 'puppet/type/pfile/group' require 'puppet/type/pfile/mode' require 'puppet/type/pfile/type' end # $Id$ diff --git a/test/client/master.rb b/test/client/master.rb index f89d2cae8..9318fdcd6 100755 --- a/test/client/master.rb +++ b/test/client/master.rb @@ -1,334 +1,429 @@ #!/usr/bin/env ruby $:.unshift("../lib").unshift("../../lib") if __FILE__ =~ /\.rb$/ require 'puppet' require 'puppet/client' require 'puppet/server' require 'puppettest' class TestMasterClient < Test::Unit::TestCase include PuppetTest::ServerTest + + class FakeTrans + def initialize + @counters = Hash.new { |h,k| h[k] = 0 } + end + [:evaluate, :report, :cleanup, :addtimes, :tags, :ignoreschedules].each do |m| + define_method(m.to_s + "=") do |*args| + @counters[m] += 1 + end + define_method(m) do |*args| + @counters[m] += 1 + end + define_method(m.to_s + "?") do + @counters[m] + end + end + end + class FakeComponent + attr_accessor :trans + def evaluate + @trans = FakeTrans.new + @trans + end + end def mkmaster(file = nil) master = nil file ||= mktestmanifest() # create our master assert_nothing_raised() { # this is the default server setup master = Puppet::Server::Master.new( :Manifest => file, :UseNodes => false, :Local => true ) } return master end def mkclient(master = nil) master ||= mkmaster() client = nil assert_nothing_raised() { client = Puppet::Client::MasterClient.new( :Master => master ) } return client end + + def mk_fake_client + server = Puppet::Server::Master.new :Code => "" + master = Puppet::Client::MasterClient.new :Server => server, :Local => true + + # Now create some objects + objects = FakeComponent.new + + master.send(:instance_variable_set, "@objects", objects) + + class << master + def report(r) + @reported ||= 0 + @reported += 1 + end + def reported + @reported ||= 0 + @reported + end + end + return master, objects + end + + def test_apply + master, objects = mk_fake_client + + check = Proc.new do |hash| + assert(objects.trans, "transaction was not created") + trans = objects.trans + hash[:yes].each do |m| + assert_equal(1, trans.send(m.to_s + "?"), "did not call #{m} enough times") + end + hash[:no].each do |m| + assert_equal(0, trans.send(m.to_s + "?"), "called #{m} too many times") + end + end + + # First try it with no arguments + assert_nothing_raised do + master.apply + end + check.call :yes => %w{evaluate cleanup addtimes}, :no => %w{report tags ignoreschedules} + assert_equal(0, master.reported, "master sent report with reports disabled") + + + # Now enable reporting and make sure the report method gets called + Puppet[:report] = true + assert_nothing_raised do + master.apply + end + check.call :yes => %w{evaluate cleanup addtimes}, :no => %w{tags ignoreschedules} + assert_equal(1, master.reported, "master did not send report") + + # Now try it with tags enabled + assert_nothing_raised do + master.apply("tags") + end + check.call :yes => %w{evaluate cleanup tags addtimes}, :no => %w{ignoreschedules} + assert_equal(2, master.reported, "master did not send report") + + # and ignoreschedules + assert_nothing_raised do + master.apply("tags", true) + end + check.call :yes => %w{evaluate cleanup tags ignoreschedules addtimes}, :no => %w{} + assert_equal(3, master.reported, "master did not send report") + end def test_disable manifest = mktestmanifest master = mkmaster(manifest) client = mkclient(master) assert(! FileTest.exists?(@createdfile)) assert_nothing_raised { client.disable } assert_nothing_raised { client.run } assert(! FileTest.exists?(@createdfile), "Disabled client ran") assert_nothing_raised { client.enable } assert_nothing_raised { client.run } assert(FileTest.exists?(@createdfile), "Enabled client did not run") end # Make sure we're getting the client version in our list of facts def test_clientversionfact facts = nil assert_nothing_raised { facts = Puppet::Client::MasterClient.facts } assert_equal(Puppet.version.to_s, facts["clientversion"]) end # Make sure the client correctly locks itself def test_locking manifest = mktestmanifest master = nil # First test with a networked master client = Puppet::Client::MasterClient.new( :Server => "localhost" ) assert_nothing_raised do client.lock do pid = nil assert(client.locked?, "Client is not locked") assert(client.lockpid.is_a?(Integer), "PID #{client.lockpid} is, um, not a pid") end end assert(! client.locked?) # Now test with a local client client = mkclient assert_nothing_raised do client.lock do pid = nil assert(! client.locked?, "Local client is locked") end end assert(! client.locked?) end # Make sure non-string facts don't make things go kablooie def test_nonstring_facts # Add a nonstring fact Facter.add("nonstring") do setcode { 1 } end assert_equal(1, Facter.nonstring, "Fact was a string from facter") client = mkclient() assert(! FileTest.exists?(@createdfile)) assert_nothing_raised { client.run } end + # This method is supposed def test_download source = tempfile() dest = tempfile() sfile = File.join(source, "file") + dfile = File.join(dest, "file") Dir.mkdir(source) File.open(sfile, "w") {|f| f.puts "yay"} files = [] assert_nothing_raised do - - Puppet::Client::Master.download(:dest => dest, :source => source, :name => "testing") do |path| - files << path - end + files = Puppet::Client::MasterClient.download(:dest => dest, :source => source, :name => "testing") end + + assert(FileTest.directory?(dest), "dest dir was not created") + assert(FileTest.file?(dfile), "dest file was not created") + assert_equal(File.read(sfile), File.read(dfile), "Dest file had incorrect contents") + assert_equal([dest, dfile].sort, files.sort, "Changed files were not returned correctly") end def test_getplugins Puppet[:pluginsource] = tempfile() Dir.mkdir(Puppet[:pluginsource]) myplugin = File.join(Puppet[:pluginsource], "myplugin.rb") File.open(myplugin, "w") do |f| f.puts %{Puppet::Type.newtype(:myplugin) do newparam(:argument) do isnamevar end end } end assert_nothing_raised { Puppet::Client::MasterClient.getplugins } destfile = File.join(Puppet[:plugindest], "myplugin.rb") assert(File.exists?(destfile), "Did not get plugin") obj = Puppet::Type.type(:myplugin) assert(obj, "Did not define type") assert(obj.validattr?(:argument), "Did not get namevar") # Now modify the file and make sure the type is replaced File.open(myplugin, "w") do |f| f.puts %{Puppet::Type.newtype(:myplugin) do newparam(:yayness) do isnamevar end newparam(:rahness) do end end } end assert_nothing_raised { Puppet::Client::MasterClient.getplugins } destfile = File.join(Puppet[:pluginpath], "myplugin.rb") obj = Puppet::Type.type(:myplugin) assert(obj, "Did not define type") assert(obj.validattr?(:yayness), "Did not get namevar") assert(obj.validattr?(:rahness), "Did not get other var") assert(! obj.validattr?(:argument), "Old namevar is still valid") # Now try it again, to make sure we don't have any objects lying around assert_nothing_raised { Puppet::Client::MasterClient.getplugins } end def test_getfacts Puppet[:factsource] = tempfile() Dir.mkdir(Puppet[:factsource]) hostname = Facter.value(:hostname) myfact = File.join(Puppet[:factsource], "myfact.rb") File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "yayness" } end } end assert_nothing_raised { Puppet::Client::MasterClient.getfacts } destfile = File.join(Puppet[:factdest], "myfact.rb") assert(File.exists?(destfile), "Did not get fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") assert_equal("yayness", Facter.value(:myfact), "Did not get correct fact value") # Now modify the file and make sure the type is replaced File.open(myfact, "w") do |f| f.puts %{Facter.add("myfact") do setcode { "funtest" } end } end assert_nothing_raised { Puppet::Client::MasterClient.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") # Now run it again and make sure the fact still loads assert_nothing_raised { Puppet::Client::MasterClient.getfacts } assert_equal("funtest", Facter.value(:myfact), "Did not reload fact") assert_equal(hostname, Facter.value(:hostname), "Lost value to hostname") end # Make sure we load all facts on startup. def test_loadfacts dirs = [tempfile(), tempfile()] count = 0 names = [] dirs.each do |dir| Dir.mkdir(dir) name = "fact%s" % count names << name file = File.join(dir, "%s.rb" % name) # Write out a plugin file File.open(file, "w") do |f| f.puts %{Facter.add("#{name}") do setcode { "#{name}" } end } end count += 1 end Puppet[:factpath] = dirs.join(":") names.each do |name| assert_nil(Facter.value(name), "Somehow retrieved invalid fact") end assert_nothing_raised { Puppet::Client::MasterClient.loadfacts } names.each do |name| assert_equal(name, Facter.value(name), "Did not retrieve facts") end end if Process.uid == 0 # Testing #283. Make sure plugins et al are downloaded as the running user. def test_download_ownership dir = tstdir() dest = tstdir() file = File.join(dir, "file") File.open(file, "w") { |f| f.puts "funtest" } user = nonrootuser() group = nonrootgroup() FileUtils.chown_R(user.name, group.name, dir) assert_equal(user.uid, File.stat(file).uid) assert_equal(group.gid, File.stat(file).gid) assert_nothing_raised { Puppet::Client::MasterClient.download(:dest => dest, :source => dir, :name => "testing" ) {} } destfile = File.join(dest, "file") assert(FileTest.exists?(destfile), "Did not create destfile") assert_equal(Process.uid, File.stat(destfile).uid) end end end # $Id$ diff --git a/test/language/interpreter.rb b/test/language/interpreter.rb index 28c6d41a1..2fe9cc601 100755 --- a/test/language/interpreter.rb +++ b/test/language/interpreter.rb @@ -1,914 +1,914 @@ #!/usr/bin/env ruby $:.unshift("../lib").unshift("../../lib") if __FILE__ =~ /\.rb$/ require 'facter' require 'puppet' require 'puppet/parser/interpreter' require 'puppet/parser/parser' require 'puppet/client' require 'puppet/rails' require 'puppettest' require 'puppettest/resourcetesting' require 'puppettest/parsertesting' require 'puppettest/servertest' require 'puppettest/railstesting' require 'timeout' class TestInterpreter < Test::Unit::TestCase include PuppetTest include PuppetTest::ServerTest include PuppetTest::ParserTesting include PuppetTest::ResourceTesting include PuppetTest::RailsTesting AST = Puppet::Parser::AST # create a simple manifest that uses nodes to create a file def mknodemanifest(node, file) createdfile = tempfile() File.open(file, "w") { |f| f.puts "node %s { file { \"%s\": ensure => file, mode => 755 } }\n" % [node, createdfile] } return [file, createdfile] end def test_simple file = tempfile() File.open(file, "w") { |f| f.puts "file { \"/etc\": owner => root }" } assert_nothing_raised { Puppet::Parser::Interpreter.new(:Manifest => file) } end def test_reloadfiles hostname = Facter["hostname"].value file = tempfile() # Create a first version createdfile = mknodemanifest(hostname, file) interp = nil assert_nothing_raised { interp = Puppet::Parser::Interpreter.new(:Manifest => file) } config = nil assert_nothing_raised { config = interp.run(hostname, {}) } sleep(1) # Now create a new file createdfile = mknodemanifest(hostname, file) newconfig = nil assert_nothing_raised { newconfig = interp.run(hostname, {}) } assert(config != newconfig, "Configs are somehow the same") end if defined? ActiveRecord def test_hoststorage assert_nothing_raised { Puppet[:storeconfigs] = true } file = tempfile() File.open(file, "w") { |f| f.puts "file { \"/etc\": owner => root }" } interp = nil assert_nothing_raised { interp = Puppet::Parser::Interpreter.new( :Manifest => file, :UseNodes => false, :ForkSave => false ) } facts = {} Facter.each { |fact, val| facts[fact] = val } objects = nil assert_nothing_raised { objects = interp.run(facts["hostname"], facts) } obj = Puppet::Rails::Host.find_by_name(facts["hostname"]) assert(obj, "Could not find host object") end else $stderr.puts "No ActiveRecord -- skipping collection tests" end if Facter["domain"].value == "madstop.com" begin require 'ldap' $haveldap = true rescue LoadError $stderr.puts "Missing ldap; skipping ldap source tests" $haveldap = false end # Only test ldap stuff on luke's network, since that's the only place we # have data for. if $haveldap def ldapconnect @ldap = LDAP::Conn.new("ldap", 389) @ldap.set_option( LDAP::LDAP_OPT_PROTOCOL_VERSION, 3 ) @ldap.simple_bind("", "") return @ldap end def ldaphost(node) parent = nil classes = nil @ldap.search( "ou=hosts, dc=madstop, dc=com", 2, "(&(objectclass=puppetclient)(cn=%s))" % node ) do |entry| parent = entry.vals("parentnode").shift classes = entry.vals("puppetclass") || [] end return parent, classes end def test_ldapsearch Puppet[:ldapbase] = "ou=hosts, dc=madstop, dc=com" Puppet[:ldapnodes] = true ldapconnect() interp = mkinterp :NodeSources => [:ldap, :code] # Make sure we get nil and nil back when we search for something missing parent, classes = nil assert_nothing_raised do parent, classes = interp.ldapsearch("nosuchhost") end assert_nil(parent, "Got a parent for a non-existent host") assert_nil(classes, "Got classes for a non-existent host") # Make sure we can find 'culain' in ldap assert_nothing_raised do parent, classes = interp.ldapsearch("culain") end realparent, realclasses = ldaphost("culain") assert_equal(realparent, parent) assert_equal(realclasses, classes) end def test_ldapnodes Puppet[:ldapbase] = "ou=hosts, dc=madstop, dc=com" Puppet[:ldapnodes] = true ldapconnect() interp = mkinterp :NodeSources => [:ldap, :code] # culain uses basenode, so create that basenode = interp.newnode([:basenode])[0] # Make sure we get nothing for nonexistent hosts none = nil assert_nothing_raised do none = interp.nodesearch_ldap("nosuchhost") end assert_nil(none, "Got a node for a non-existent host") # Make sure we can find 'culain' in ldap culain = nil assert_nothing_raised do culain = interp.nodesearch_ldap("culain") end assert(culain, "Did not find culain in ldap") assert_nothing_raised do assert_equal(basenode.fqname.to_s, culain.parentclass.fqname.to_s, "Did not get parent class") end end if Puppet::SUIDManager.uid == 0 and Facter["hostname"].value == "culain" def test_ldapreconnect Puppet[:ldapbase] = "ou=hosts, dc=madstop, dc=com" Puppet[:ldapnodes] = true interp = nil assert_nothing_raised { interp = Puppet::Parser::Interpreter.new( :Manifest => mktestmanifest() ) } hostname = "culain.madstop.com" # look for our host assert_nothing_raised { parent, classes = interp.nodesearch_ldap(hostname) } # Now restart ldap system("/etc/init.d/slapd restart 2>/dev/null >/dev/null") sleep(1) # and look again assert_nothing_raised { parent, classes = interp.nodesearch_ldap(hostname) } # Now stop ldap system("/etc/init.d/slapd stop 2>/dev/null >/dev/null") cleanup do system("/etc/init.d/slapd start 2>/dev/null >/dev/null") end # And make sure we actually fail here assert_raise(Puppet::Error) { parent, classes = interp.nodesearch_ldap(hostname) } end else $stderr.puts "Run as root for ldap reconnect tests" end end else $stderr.puts "Not in madstop.com; skipping ldap tests" end # Test that node info and default node info in different sources isn't # bad. def test_multiple_nodesources # Create another node source Puppet::Parser::Interpreter.send(:define_method, :nodesearch_multi) do |*names| if names[0] == "default" gennode("default", {:facts => {}}) else nil end end interp = mkinterp :NodeSources => [:multi, :code] interp.newnode(["node"]) obj = nil assert_nothing_raised do obj = interp.nodesearch("node") end assert(obj, "Did not find node") assert_equal("node", obj.fqname) end # Make sure searchnode behaves as we expect. def test_nodesearch # We use two sources here to catch a weird bug where the default # node is used if the host isn't in the first source. interp = mkinterp # Make some nodes names = %w{node1 node2 node2.domain.com} interp.newnode names interp.newnode %w{default} nodes = {} # Make sure we can find them all, using the direct method names.each do |name| nodes[name] = interp.nodesearch_code(name) assert(nodes[name], "Could not find %s" % name) nodes[name].file = __FILE__ end # Now let's try it with the nodesearch method names.each do |name| node = interp.nodesearch(name) assert(node, "Could not find #{name} via nodesearch") end # Make sure we find the default node when we search for nonexistent nodes assert_nothing_raised do default = interp.nodesearch("nosuchnode") assert(default, "Did not find default node") assert_equal("default", default.fqname) end # Now make sure the longest match always wins node = interp.nodesearch(*%w{node2 node2.domain.com}) assert(node, "Did not find node2") assert_equal("node2.domain.com", node.fqname, "Did not get longest match") end def test_parsedate Puppet[:filetimeout] = 0 main = tempfile() sub = tempfile() mainfile = tempfile() subfile = tempfile() count = 0 updatemain = proc do count += 1 File.open(main, "w") { |f| f.puts "import '#{sub}' file { \"#{mainfile}\": content => #{count} } " } end updatesub = proc do count += 1 File.open(sub, "w") { |f| f.puts "file { \"#{subfile}\": content => #{count} } " } end updatemain.call updatesub.call interp = Puppet::Parser::Interpreter.new( :Manifest => main, :Local => true ) date = interp.parsedate # Now update the site file and make sure we catch it sleep 1 updatemain.call newdate = interp.parsedate assert(date != newdate, "Parsedate was not updated") date = newdate # And then the subfile sleep 1 updatesub.call newdate = interp.parsedate assert(date != newdate, "Parsedate was not updated") end # Make sure our node gets added to the node table. def test_newnode interp = mkinterp # First just try calling it directly assert_nothing_raised { interp.newnode("mynode", :code => :yay) } assert_equal(:yay, interp.nodesearch_code("mynode").code) # Now make sure that trying to redefine it throws an error. assert_raise(Puppet::ParseError) { interp.newnode("mynode", {}) } # Now try one with no code assert_nothing_raised { interp.newnode("simplenode", :parent => :foo) } # Make sure trying to get the parentclass throws an error assert_raise(Puppet::ParseError) do interp.nodesearch_code("simplenode").parentclass end # Now define the parent node interp.newnode(:foo) # And make sure we get things back correctly assert_equal("foo", interp.nodesearch_code("simplenode").parentclass.fqname) assert_nil(interp.nodesearch_code("simplenode").code) # Now make sure that trying to redefine it throws an error. assert_raise(Puppet::ParseError) { interp.newnode("mynode", {}) } # Test multiple names names = ["one", "two", "three"] assert_nothing_raised { interp.newnode(names, {:code => :yay, :parent => :foo}) } names.each do |name| assert_equal(:yay, interp.nodesearch_code(name).code) assert_equal("foo", interp.nodesearch_code(name).parentclass.name) # Now make sure that trying to redefine it throws an error. assert_raise(Puppet::ParseError) { interp.newnode(name, {}) } end end # Make sure we're correctly generating a node definition. def test_gennode interp = mkinterp interp.newnode "base" interp.newclass "yaytest" # Go through the different iterations: [ [nil, "yaytest"], [nil, ["yaytest"]], [nil, nil], [nil, []], ["base", nil], ["base", []], ["base", "yaytest"], ["base", ["yaytest"]] ].each do |parent, classes| node = nil assert_nothing_raised { node = interp.gennode("nodeA", :classes => classes, :parentnode => parent) } assert_instance_of(Puppet::Parser::AST::Node, node) assert_equal("nodeA", node.name) scope = mkscope :interp => interp assert_nothing_raised do node.evaluate :scope => scope end # If there's a parent, make sure it got evaluated if parent assert(scope.classlist.include?("base"), "Did not evaluate parent node") end # If there are classes make sure they got evaluated if classes == ["yaytest"] or classes == "yaytest" assert(scope.classlist.include?("yaytest"), "Did not evaluate class") end end end def test_fqfind interp = mkinterp table = {} # Define a bunch of things. %w{a c a::b a::b::c a::c a::b::c::d a::b::c::d::e::f c::d}.each do |string| table[string] = string end check = proc do |namespace, hash| hash.each do |thing, result| assert_equal(result, interp.fqfind(namespace, thing, table), "Could not find %s in %s" % [thing, namespace]) end end # Now let's do some test lookups. # First do something really simple check.call "a", "b" => "a::b", "b::c" => "a::b::c", "d" => nil, "::c" => "c" check.call "a::b", "c" => "a::b::c", "b" => "a::b", "a" => "a" check.call "a::b::c::d::e", "c" => "a::b::c", "::c" => "c", "c::d" => "a::b::c::d", "::c::d" => "c::d" check.call "", "a" => "a", "a::c" => "a::c" end def test_newdefine interp = mkinterp assert_nothing_raised { interp.newdefine("mydefine", :code => :yay, :arguments => ["a", stringobj("b")]) } mydefine = interp.finddefine("", "mydefine") assert(mydefine, "Could not find definition") assert_equal("mydefine", interp.finddefine("", "mydefine").type) assert_equal("", mydefine.namespace) assert_equal("mydefine", mydefine.type) assert_raise(Puppet::ParseError) do interp.newdefine("mydefine", :code => :yay, :arguments => ["a", stringobj("b")]) end # Now define the same thing in a different scope assert_nothing_raised { interp.newdefine("other::mydefine", :code => :other, :arguments => ["a", stringobj("b")]) } other = interp.finddefine("other", "mydefine") assert(other, "Could not find definition") assert(interp.finddefine("", "other::mydefine"), "Could not find other::mydefine") assert_equal(:other, other.code) assert_equal("other", other.namespace) assert_equal("mydefine", other.type) assert_equal("other::mydefine", other.fqname) end def test_newclass interp = mkinterp mkcode = proc do |ary| classes = ary.collect do |string| AST::FlatString.new(:value => string) end AST::ASTArray.new(:children => classes) end scope = Puppet::Parser::Scope.new(:interp => interp) # First make sure that code is being appended code = mkcode.call(%w{original code}) klass = nil assert_nothing_raised { klass = interp.newclass("myclass", :code => code) } assert(klass, "Did not return class") assert(interp.findclass("", "myclass"), "Could not find definition") assert_equal("myclass", interp.findclass("", "myclass").type) assert_equal(%w{original code}, interp.findclass("", "myclass").code.evaluate(:scope => scope)) # Now create the same class name in a different scope assert_nothing_raised { klass = interp.newclass("other::myclass", :code => mkcode.call(%w{something diff})) } assert(klass, "Did not return class") other = interp.findclass("other", "myclass") assert(other, "Could not find class") assert(interp.findclass("", "other::myclass"), "Could not find class") assert_equal("other::myclass", other.fqname) - assert_equal("other", other.namespace) + assert_equal("other::myclass", other.namespace) assert_equal("myclass", other.type) assert_equal(%w{something diff}, interp.findclass("other", "myclass").code.evaluate(:scope => scope)) # Newclass behaves differently than the others -- it just appends # the code to the existing class. code = mkcode.call(%w{something new}) assert_nothing_raised do klass = interp.newclass("myclass", :code => code) end assert(klass, "Did not return class when appending") assert_equal(%w{original code something new}, interp.findclass("", "myclass").code.evaluate(:scope => scope)) # Make sure newclass deals correctly with nodes with no code klass = interp.newclass("nocode") assert(klass, "Did not return class") assert_nothing_raised do klass = interp.newclass("nocode", :code => mkcode.call(%w{yay test})) end assert(klass, "Did not return class with no code") assert_equal(%w{yay test}, interp.findclass("", "nocode").code.evaluate(:scope => scope)) # Then try merging something into nothing interp.newclass("nocode2", :code => mkcode.call(%w{foo test})) assert(klass, "Did not return class with no code") assert_nothing_raised do klass = interp.newclass("nocode2") end assert(klass, "Did not return class with no code") assert_equal(%w{foo test}, interp.findclass("", "nocode2").code.evaluate(:scope => scope)) # And lastly, nothing and nothing klass = interp.newclass("nocode3") assert(klass, "Did not return class with no code") assert_nothing_raised do klass = interp.newclass("nocode3") end assert(klass, "Did not return class with no code") assert_nil(interp.findclass("", "nocode3").code) end # Now make sure we get appropriate behaviour with parent class conflicts. def test_newclass_parentage interp = mkinterp interp.newclass("base1") interp.newclass("one::two::three") # First create it with no parentclass. assert_nothing_raised { interp.newclass("sub") } assert(interp.findclass("", "sub"), "Could not find definition") assert_nil(interp.findclass("", "sub").parentclass) # Make sure we can't set the parent class to ourself. assert_raise(Puppet::ParseError) { interp.newclass("sub", :parent => "sub") } # Now create another one, with a parentclass. assert_nothing_raised { interp.newclass("sub", :parent => "base1") } # Make sure we get the right parent class, and make sure it's an object. assert_equal(interp.findclass("", "base1"), interp.findclass("", "sub").parentclass) # Now make sure we get a failure if we try to conflict. assert_raise(Puppet::ParseError) { interp.newclass("sub", :parent => "one::two::three") } # Make sure that failure didn't screw us up in any way. assert_equal(interp.findclass("", "base1"), interp.findclass("", "sub").parentclass) # But make sure we can create a class with a fq parent assert_nothing_raised { interp.newclass("another", :parent => "one::two::three") } assert_equal(interp.findclass("", "one::two::three"), interp.findclass("", "another").parentclass) end def test_namesplit interp = mkinterp assert_nothing_raised do {"base::sub" => %w{base sub}, "main" => ["", "main"], "one::two::three::four" => ["one::two::three", "four"], }.each do |name, ary| result = interp.namesplit(name) assert_equal(ary, result, "%s split to %s" % [name, result]) end end end # Make sure you can't have classes and defines with the same name in the # same scope. def test_classes_beat_defines interp = mkinterp assert_nothing_raised { interp.newclass("yay::funtest") } assert_raise(Puppet::ParseError) do interp.newdefine("yay::funtest") end assert_nothing_raised { interp.newdefine("yay::yaytest") } assert_raise(Puppet::ParseError) do interp.newclass("yay::yaytest") end end # Make sure our whole chain works. def test_evaluate interp, scope, source = mkclassframing # Create a define that we'll be using interp.newdefine("wrapper", :code => AST::ASTArray.new(:children => [ resourcedef("file", varref("name"), "owner" => "root") ])) # Now create a resource that uses that define define = mkresource(:type => "wrapper", :title => "/tmp/testing", :scope => scope, :source => source, :params => :none) scope.setresource define # And a normal resource scope.setresource mkresource(:type => "file", :title => "/tmp/rahness", :scope => scope, :source => source, :params => {:owner => "root"}) # Now evaluate everything objects = nil interp.usenodes = false assert_nothing_raised do objects = interp.evaluate(nil, {}) end assert_instance_of(Puppet::TransBucket, objects) end def test_evaliterate interp, scope, source = mkclassframing # Create a top-level definition that creates a builtin object interp.newdefine("one", :arguments => [%w{owner}], :code => AST::ASTArray.new(:children => [ resourcedef("file", varref("name"), "owner" => varref("owner") ) ]) ) # Create another definition to call that one interp.newdefine("two", :arguments => [%w{owner}], :code => AST::ASTArray.new(:children => [ resourcedef("one", varref("name"), "owner" => varref("owner") ) ]) ) # And then a third interp.newdefine("three", :arguments => [%w{owner}], :code => AST::ASTArray.new(:children => [ resourcedef("two", varref("name"), "owner" => varref("owner") ) ]) ) three = Puppet::Parser::Resource.new( :type => "three", :title => "/tmp/yayness", :scope => scope, :source => source, :params => paramify(source, :owner => "root") ) scope.setresource(three) ret = nil assert_nothing_raised do ret = scope.unevaluated end assert_instance_of(Array, ret) assert(1, ret.length) assert_equal([three], ret) assert(ret.detect { |r| r.ref == "three[/tmp/yayness]"}, "Did not get three back as unevaluated") # Now translate the whole tree assert_nothing_raised do interp.evaliterate(scope) end # Now make sure we've got our file file = scope.findresource "file[/tmp/yayness]" assert(file, "Could not find file") assert_equal("root", file[:owner]) end # Make sure we fail if there are any leftover overrides to perform. # This would normally mean that someone is trying to override an object # that does not exist. def test_failonleftovers interp, scope, source = mkclassframing # Make sure we don't fail, since there are no overrides assert_nothing_raised do interp.failonleftovers(scope) end # Add an override, and make sure it causes a failure over1 = mkresource :scope => scope, :source => source, :params => {:one => "yay"} scope.setoverride(over1) assert_raise(Puppet::ParseError) do interp.failonleftovers(scope) end end def test_evalnode interp = mkinterp interp.usenodes = false scope = Parser::Scope.new(:interp => interp) facts = Facter.to_hash # First make sure we get no failures when client is nil assert_nothing_raised do interp.evalnode(nil, scope, facts) end # Now define a node interp.newnode "mynode", :code => AST::ASTArray.new(:children => [ resourcedef("file", "/tmp/testing", "owner" => "root") ]) # Eval again, and make sure it does nothing assert_nothing_raised do interp.evalnode("mynode", scope, facts) end assert_nil(scope.findresource("file[/tmp/testing]"), "Eval'ed node with nodes off") # Now enable usenodes and make sure it works. interp.usenodes = true assert_nothing_raised do interp.evalnode("mynode", scope, facts) end file = scope.findresource("file[/tmp/testing]") assert_instance_of(Puppet::Parser::Resource, file, "Could not find file") end # This is mostly used for the cfengine module def test_specificclasses interp = mkinterp :Classes => %w{klass1 klass2}, :UseNodes => false # Make sure it's not a failure to be missing classes, since # we're using the cfengine class list, which is huge. assert_nothing_raised do interp.evaluate(nil, {}) end interp.newclass("klass1", :code => AST::ASTArray.new(:children => [ resourcedef("file", "/tmp/klass1", "owner" => "root") ])) interp.newclass("klass2", :code => AST::ASTArray.new(:children => [ resourcedef("file", "/tmp/klass2", "owner" => "root") ])) ret = nil assert_nothing_raised do ret = interp.evaluate(nil, {}) end found = ret.flatten.collect do |res| res.name end assert(found.include?("/tmp/klass1"), "Did not evaluate klass1") assert(found.include?("/tmp/klass2"), "Did not evaluate klass2") end if defined? ActiveRecord::Base # We need to make sure finished objects are stored in the db. def test_finish_before_store railsinit interp = mkinterp node = interp.newnode ["myhost"], :code => AST::ASTArray.new(:children => [ resourcedef("file", "/tmp/yay", :group => "root"), defaultobj("file", :owner => "root") ]) interp.newclass "myclass", :code => AST::ASTArray.new(:children => [ ]) interp.newclass "sub", :parent => "myclass", :code => AST::ASTArray.new(:children => [ resourceoverride("file", "/tmp/yay", :owner => "root") ] ) # Now do the rails crap Puppet[:storeconfigs] = true interp.evaluate("myhost", {}) # And then retrieve the object from rails res = Puppet::Rails::RailsResource.find_by_restype_and_title("file", "/tmp/yay") assert(res, "Did not get resource from rails") param = res.rails_parameters.find_by_name("owner") assert(param, "Did not find owner param") assert_equal("root", param[:value]) end end end # $Id$ diff --git a/test/other/dsl.rb b/test/other/dsl.rb index bb7fc86cf..50eac4781 100755 --- a/test/other/dsl.rb +++ b/test/other/dsl.rb @@ -1,216 +1,217 @@ #!/usr/bin/env ruby $:.unshift("../lib").unshift("../../lib") if __FILE__ =~ /\.rb$/ require 'puppet' require 'puppet/dsl' require 'puppet/autoload' require 'puppettest' class TestDSL < Test::Unit::TestCase include PuppetTest include Puppet::DSL def teardown Puppet::Aspect.clear end def test_aspect a = nil assert_nothing_raised do a = aspect :yaytest do end end assert_equal(a, Puppet::Aspect[:yaytest]) # Now make a child aspect b = nil assert_nothing_raised do b = aspect :child, :inherits => :yaytest do end end assert(b.child_of?(a), "Parentage not set up correctly") assert(b.child_of?(:yaytest), "Parentage not set up for symbols") # Now make another subclass c = nil assert_nothing_raised do c = aspect :kid, :inherits => :child do end end assert(c.child_of?(b), "Parentage not set up correctly") assert(c.child_of?(a), "Parentage is not inherited") # Lastly, make a separate aspect x = nil assert_nothing_raised do x = aspect :other do end end assert(! x.child_of?(a), "Parentage came from nowhere") assert(! x.child_of?(b), "Parentage came from nowhere") assert(! x.child_of?(c), "Parentage came from nowhere") # Make sure we can specify the name or the aspect y = nil assert_nothing_raised do x = aspect :naming, :inherits => a do end end assert(x.child_of?(a), "Parentage not set up correctly") # And make sure the parent must exist z = nil assert_raise(RuntimeError) do z = aspect :noparent, :inherits => :nosuchaspect do end end assert(x.child_of?(a), "Parentage not set up correctly") end def test_evaluate parent = child = nil parenteval = childeval = nil assert_nothing_raised do parent = aspect :parent do if parenteval raise "parent already evaluated" end parenteval = true end child = aspect :child, :inherits => parent do if childeval raise "child already evaluated" end childeval = true end end assert_nothing_raised do parent.evaluate() end assert(parenteval, "Parent was not evaluated") assert(parent.evaluated?, "parent was not considered evaluated") # Make sure evaluating twice silently does nothing assert_nothing_raised do parent.evaluate() end # Now evaluate the child assert_nothing_raised do child.evaluate end assert(childeval, "child was not evaluated") assert(child.evaluated?, "child was not considered evaluated") # Now reset them both parenteval = childeval = nil parent.evaluated = false child.evaluated = false # evaluate the child assert_nothing_raised do child.evaluate end # and make sure both get evaluated assert(parenteval, "Parent was not evaluated") assert(parent.evaluated?, "parent was not considered evaluated") assert(childeval, "child was not evaluated") assert(child.evaluated?, "child was not considered evaluated") end def test_acquire evalled = false a = aspect :test do evalled = true end assert_nothing_raised do acquire :test end assert(evalled, "Did not evaluate aspect") assert_nothing_raised do acquire :test end end def test_newresource filetype = Puppet::Type.type(:file) path = tempfile() a = aspect :testing resource = nil assert_nothing_raised do resource = a.newresource filetype, path, :content => "yay", :mode => "640" end assert_instance_of(Puppet::Parser::Resource, resource) assert_equal("yay", resource[:content]) assert_equal("640", resource[:mode]) assert_equal(:testing, resource.source.name) # Now try exporting our aspect assert_nothing_raised do a.evaluate end result = nil assert_nothing_raised do result = a.export end assert_equal([resource], result) # Then try the DSL export assert_nothing_raised do result = export end assert_instance_of(Puppet::TransBucket, result) # And just for kicks, test applying everything assert_nothing_raised do apply() end assert(FileTest.exists?(path), "File did not get created") assert_equal("yay", File.read(path)) end def test_typemethods + Puppet::Type.loadall filetype = Puppet::Type.type(:file) path = tempfile() a = aspect :testing Puppet::Type.eachtype do |type| assert(a.respond_to?(type.name), "Aspects do not have a %s method" % type.name) end file = nil assert_nothing_raised do file = a.file path, :content => "yay", :mode => "640" end assert_instance_of(Puppet::Parser::Resource, file) end end # $Id$ diff --git a/test/other/pgraph.rb b/test/other/pgraph.rb index 3dc232670..88f753131 100644 --- a/test/other/pgraph.rb +++ b/test/other/pgraph.rb @@ -1,116 +1,128 @@ #!/usr/bin/env ruby # # Created by Luke Kanies on 2006-11-16. # Copyright (c) 2006. All rights reserved. $:.unshift("../lib").unshift("../../lib") if __FILE__ =~ /\.rb$/ require 'puppettest' require 'puppettest/graph' class TestPGraph < Test::Unit::TestCase include PuppetTest include PuppetTest::Graph Edge = Puppet::Relationship + def test_clear + graph = Puppet::PGraph.new + graph.add_edge!("a", "b") + graph.add_vertex! "c" + assert_nothing_raised do + graph.clear + end + assert(graph.vertices.empty?, "Still have vertices after clear") + assert(graph.edges.empty?, "still have edges after clear") + end + + def test_matching_edges graph = Puppet::PGraph.new event = Puppet::Event.new(:source => "a", :event => :yay) none = Puppet::Event.new(:source => "a", :event => :NONE) edges = {} edges["a/b"] = Edge["a", "b", {:event => :yay, :callback => :refresh}] edges["a/c"] = Edge["a", "c", {:event => :yay, :callback => :refresh}] graph.add_edge!(edges["a/b"]) # Try it for the trivial case of one target and a matching event assert_equal([edges["a/b"]], graph.matching_edges([event])) # Make sure we get nothing with a different event assert_equal([], graph.matching_edges([none])) # Set up multiple targets and make sure we get them all back graph.add_edge!(edges["a/c"]) assert_equal([edges["a/b"], edges["a/c"]].sort, graph.matching_edges([event]).sort) assert_equal([], graph.matching_edges([none])) end def test_dependencies graph = Puppet::PGraph.new graph.add_edge!("a", "b") graph.add_edge!("a", "c") graph.add_edge!("b", "d") assert_equal(%w{b c d}.sort, graph.dependencies("a").sort) assert_equal(%w{d}.sort, graph.dependencies("b").sort) assert_equal([].sort, graph.dependencies("c").sort) end # Test that we can take a containment graph and rearrange it by dependencies def test_splice one, two, middle, top = build_tree empty = Container.new("empty", []) # Also, add an empty container to top top.push empty contgraph = top.to_graph # Now add a couple of child files, so that we can test whether all containers # get spliced, rather than just components. # Now make a dependency graph deps = Puppet::PGraph.new contgraph.vertices.each do |v| deps.add_vertex(v) end # We have to specify a relationship to our empty container, else it never makes it # into the dep graph in the first place. {one => two, "f" => "c", "h" => middle, "c" => empty}.each do |source, target| deps.add_edge!(source, target, :callback => :refresh) end deps.to_jpg("deps-before") deps.splice!(contgraph, Container) assert(! deps.cyclic?, "Created a cyclic graph") # Now make sure the containers got spliced correctly. contgraph.leaves(middle).each do |leaf| assert(deps.edge?("h", leaf), "no edge for h => %s" % leaf) end one.each do |oobj| two.each do |tobj| assert(deps.edge?(oobj, tobj), "no %s => %s edge" % [oobj, tobj]) end end # Make sure there are no container objects remaining c = deps.vertices.find_all { |v| v.is_a?(Container) } assert(c.empty?, "Still have containers %s" % c.inspect) nons = deps.vertices.find_all { |v| ! v.is_a?(String) } assert(nons.empty?, "still contain non-strings %s" % nons.inspect) deps.to_jpg("deps-after") deps.edges.each do |edge| assert_equal({:callback => :refresh}, edge.label, "Label was not copied on splice") end end # Make sure empty containers are also removed def test_empty_splice end end # $Id$ \ No newline at end of file diff --git a/test/server/pelement.rb b/test/server/pelement.rb index 24836e66c..c86dadc11 100755 --- a/test/server/pelement.rb +++ b/test/server/pelement.rb @@ -1,302 +1,297 @@ #!/usr/bin/env ruby $:.unshift("../lib").unshift("../../lib") if __FILE__ =~ /\.rb$/ require 'puppet' require 'puppet/server/pelement' require 'puppettest' require 'base64' require 'cgi' class TestPElementServer < Test::Unit::TestCase include PuppetTest::ServerTest def verify_described(type, described) described.each do |name, trans| type.clear obj = nil assert_nothing_raised do obj = trans.to_type end assert(obj, "Could not create object") assert_nothing_raised do obj.retrieve end if trans.type == :package assert_equal(Puppet::Type.type(:package).defaultprovider.name, obj[:provider]) end end type.clear end def test_describe_file # Make a file to describe file = tempfile() str = "yayness\n" server = nil assert_nothing_raised do server = Puppet::Server::PElement.new() end # The first run we create the file on the copy, the second run # the file is already there so the object should be in sync 2.times do |i| [ [nil], [[:content, :mode], []], [[], [:content]], [[:content], [:mode]] ].each do |ary| retrieve = ary[0] || [] ignore = ary[1] || [] File.open(file, "w") { |f| f.print str } result = nil assert_nothing_raised do result = server.describe("file", file, *ary) end assert(result, "Could not retrieve file information") assert_instance_of(Puppet::TransObject, result) # Now we have to clear, so that the server's object gets removed Puppet::Type.type(:file).clear # And remove the file, so we can verify it gets recreated if i == 0 File.unlink(file) end object = nil assert_nothing_raised do object = result.to_type end assert(object, "Could not create type") retrieve.each do |state| assert(object.should(state), "Did not retrieve %s" % state) end ignore.each do |state| assert(! object.should(state), "Incorrectly retrieved %s" % state) end if i == 0 assert_events([:file_created], object) else assert_nothing_raised { object.retrieve } assert(object.insync?, "Object was not in sync") end assert(FileTest.exists?(file), "File did not get recreated") if i == 0 if object.should(:content) assert_equal(str, File.read(file), "File contents are not the same") else assert_equal("", File.read(file), "File content was incorrectly made") end end if FileTest.exists? file File.unlink(file) end end end end def test_describe_directory # Make a file to describe file = tempfile() server = nil assert_nothing_raised do server = Puppet::Server::PElement.new() end [ [nil], [[:ensure, :checksum, :mode], []], [[], [:checksum]], [[:ensure, :checksum], [:mode]] ].each do |ary| retrieve = ary[0] || [] ignore = ary[1] || [] Dir.mkdir(file) result = nil assert_nothing_raised do result = server.describe("file", file, *ary) end assert(result, "Could not retrieve file information") assert_instance_of(Puppet::TransObject, result) # Now we have to clear, so that the server's object gets removed Puppet::Type.type(:file).clear # And remove the file, so we can verify it gets recreated Dir.rmdir(file) object = nil assert_nothing_raised do object = result.to_type end assert(object, "Could not create type") retrieve.each do |state| assert(object.should(state), "Did not retrieve %s" % state) end ignore.each do |state| assert(! object.should(state), "Incorrectly retrieved %s" % state) end assert_events([:directory_created], object) assert(FileTest.directory?(file), "Directory did not get recreated") Dir.rmdir(file) end end def test_describe_alltypes # Systems get pretty retarded, so I'm going to set the path to some fake # data for ports #Puppet::Type::ParsedType::Port.path = File.join(basedir, # "test/data/types/ports/1") #Puppet.err Puppet::Type::ParsedType::Port.path server = nil assert_nothing_raised do server = Puppet::Server::PElement.new() end require 'etc' # Make the example schedules, for testing Puppet::Type.type(:schedule).mkdefaultschedules Puppet::Type.eachtype do |type| unless type.respond_to? :list Puppet.warning "%s does not respond to :list" % type.name next end next unless type.name == :package Puppet.info "Describing each %s" % type.name # First do a listing from the server bucket = nil assert_nothing_raised { bucket = server.list(type.name) } #type.clear count = 0 described = {} bucket.each do |obj| assert_instance_of(Puppet::TransObject, obj) break if count > 5 described[obj.name] = server.describe(obj.type, obj.name) count += 1 end verify_described(type, described) count = 0 described = {} Puppet.info "listing again" type.list.each do |obj| assert_instance_of(type, obj) break if count > 5 trans = nil assert_nothing_raised do described[obj.name] = server.describe(type.name, obj.name) end count += 1 end if described.empty? Puppet.notice "Got no example objects for %s" % type.name end # We separate these, in case the list operation creates objects verify_described(type, described) end end def test_apply server = nil assert_nothing_raised do server = Puppet::Server::PElement.new() end file = tempfile() str = "yayness\n" File.open(file, "w") { |f| f.print str } filetrans = nil assert_nothing_raised { filetrans = server.describe("file", file) } Puppet::Type.type(:file).clear - Puppet.err filetrans[:parent].inspect - - #p filetrans - bucket = Puppet::TransBucket.new bucket.type = "file" bucket.push filetrans - #p bucket - oldbucket = bucket.dup File.unlink(file) assert_nothing_raised { server.apply(bucket) } - assert(FileTest.exists?(file), "File did not get recreated") # Now try it as a "nonlocal" server server.local = false yaml = nil assert_nothing_raised { yaml = Base64.encode64(YAML::dump(bucket)) } Puppet::Type.type(:file).clear File.unlink(file) if Base64.decode64(yaml) =~ /(.{20}Loglevel.{20})/ Puppet.warning "YAML is broken on this machine" return end - #puts Base64.decode64(yaml) + # puts Base64.decode64(yaml) + objects = nil assert_nothing_raised("Could not reload yaml") { YAML::load(Base64.decode64(yaml)) } + # The server is supposed to accept yaml and execute it. assert_nothing_raised { server.apply(yaml) } assert(FileTest.exists?(file), "File did not get recreated from YAML") end end # $Id$