diff --git a/lib/puppet/type/file.rb b/lib/puppet/type/file.rb index 4c16c1b09..64533cda2 100644 --- a/lib/puppet/type/file.rb +++ b/lib/puppet/type/file.rb @@ -1,931 +1,936 @@ require 'digest/md5' require 'cgi' require 'etc' require 'uri' require 'fileutils' require 'enumerator' require 'pathname' require 'puppet/parameter/boolean' require 'puppet/util/diff' require 'puppet/util/checksums' require 'puppet/util/backups' require 'puppet/util/symbolic_file_mode' Puppet::Type.newtype(:file) do include Puppet::Util::MethodHelper include Puppet::Util::Checksums include Puppet::Util::Backups include Puppet::Util::SymbolicFileMode @doc = "Manages files, including their content, ownership, and permissions. The `file` type can manage normal files, directories, and symlinks; the type should be specified in the `ensure` attribute. File contents can be managed directly with the `content` attribute, or downloaded from a remote source using the `source` attribute; the latter can also be used to recursively serve directories (when the `recurse` attribute is set to `true` or `local`). On Windows, note that file contents are managed in binary mode; Puppet never automatically translates line endings. **Autorequires:** If Puppet is managing the user or group that owns a file, the file resource will autorequire them. If Puppet is managing any parent directories of a file, the file resource will autorequire them." feature :manages_symlinks, "The provider can manage symbolic links." def self.title_patterns [ [ /^(.*?)\/*\Z/m, [ [ :path ] ] ] ] end newparam(:path) do desc <<-'EOT' The path to the file to manage. Must be fully qualified. On Windows, the path should include the drive letter and should use `/` as the separator character (rather than `\\`). EOT isnamevar validate do |value| unless Puppet::Util.absolute_path?(value) fail Puppet::Error, "File paths must be fully qualified, not '#{value}'" end end munge do |value| if value.start_with?('//') and ::File.basename(value) == "/" # This is a UNC path pointing to a share, so don't add a trailing slash ::File.expand_path(value) else ::File.join(::File.split(::File.expand_path(value))) end end end newparam(:backup) do desc <<-EOT Whether (and how) file content should be backed up before being replaced. This attribute works best as a resource default in the site manifest (`File { backup => main }`), so it can affect all file resources. * If set to `false`, file content won't be backed up. * If set to a string beginning with `.` (e.g., `.puppet-bak`), Puppet will use copy the file in the same directory with that value as the extension of the backup. (A value of `true` is a synonym for `.puppet-bak`.) * If set to any other string, Puppet will try to back up to a filebucket with that title. See the `filebucket` resource type for more details. (This is the preferred method for backup, since it can be centralized and queried.) Default value: `puppet`, which backs up to a filebucket of the same name. (Puppet automatically creates a **local** filebucket named `puppet` if one doesn't already exist.) Backing up to a local filebucket isn't particularly useful. If you want to make organized use of backups, you will generally want to use the puppet master server's filebucket service. This requires declaring a filebucket resource and a resource default for the `backup` attribute in site.pp: # /etc/puppet/manifests/site.pp filebucket { 'main': path => false, # This is required for remote filebuckets. server => 'puppet.example.com', # Optional; defaults to the configured puppet master. } File { backup => main, } If you are using multiple puppet master servers, you will want to centralize the contents of the filebucket. Either configure your load balancer to direct all filebucket traffic to a single master, or use something like an out-of-band rsync task to synchronize the content on all masters. EOT defaultto "puppet" munge do |value| # I don't really know how this is happening. value = value.shift if value.is_a?(Array) case value when false, "false", :false false when true, "true", ".puppet-bak", :true ".puppet-bak" when String value else self.fail "Invalid backup type #{value.inspect}" end end end newparam(:recurse) do desc "Whether to recursively manage the _contents_ of a directory. This attribute is only used when `ensure => directory` is set. The allowed values are: * `false` --- The default behavior. The contents of the directory will not be automatically managed. * `remote` --- If the `source` attribute is set, Puppet will automatically manage the contents of the source directory (or directories), ensuring that equivalent files and directories exist on the target system and that their contents match. Using `remote` will disable the `purge` attribute, but results in faster catalog application than `recurse => true`. The `source` attribute is mandatory when `recurse => remote`. * `true` --- If the `source` attribute is set, this behaves similarly to `recurse => remote`, automatically managing files from the source directory. This also enables the `purge` attribute, which can delete unmanaged files from a directory. See the description of `purge` for more details. The `source` attribute is not mandatory when using `recurse => true`, so you can enable purging in directories where all files are managed individually. By default, setting recurse to `remote` or `true` will manage _all_ subdirectories. You can use the `recurselimit` attribute to limit the recursion depth. " newvalues(:true, :false, :remote) validate { |arg| } munge do |value| newval = super(value) case newval when :true; true when :false; false when :remote; :remote else self.fail "Invalid recurse value #{value.inspect}" end end end newparam(:recurselimit) do desc "How far Puppet should descend into subdirectories, when using `ensure => directory` and either `recurse => true` or `recurse => remote`. The recursion limit affects which files will be copied from the `source` directory, as well as which files can be purged when `purge => true`. Setting `recurselimit => 0` is the same as setting `recurse => false` --- Puppet will manage the directory, but all of its contents will be treated as unmanaged. Setting `recurselimit => 1` will manage files and directories that are directly inside the directory, but will not manage the contents of any subdirectories. Setting `recurselimit => 2` will manage the direct contents of the directory, as well as the contents of the _first_ level of subdirectories. And so on --- 3 will manage the contents of the second level of subdirectories, etc." newvalues(/^[0-9]+$/) munge do |value| newval = super(value) case newval when Integer, Fixnum, Bignum; value when /^\d+$/; Integer(value) else self.fail "Invalid recurselimit value #{value.inspect}" end end end newparam(:replace, :boolean => true, :parent => Puppet::Parameter::Boolean) do desc "Whether to replace a file or symlink that already exists on the local system but whose content doesn't match what the `source` or `content` attribute specifies. Setting this to false allows file resources to initialize files without overwriting future changes. Note that this only affects content; Puppet will still manage ownership and permissions. Defaults to `true`." defaultto :true end newparam(:force, :boolean => true, :parent => Puppet::Parameter::Boolean) do desc "Perform the file operation even if it will destroy one or more directories. You must use `force` in order to: * `purge` subdirectories * Replace directories with files or links * Remove a directory when `ensure => absent`" defaultto false end newparam(:ignore) do desc "A parameter which omits action on files matching specified patterns during recursion. Uses Ruby's builtin globbing engine, so shell metacharacters are fully supported, e.g. `[a-z]*`. Matches that would descend into the directory structure are ignored, e.g., `*/*`." validate do |value| unless value.is_a?(Array) or value.is_a?(String) or value == false self.devfail "Ignore must be a string or an Array" end end end newparam(:links) do desc "How to handle links during file actions. During file copying, `follow` will copy the target file instead of the link, `manage` will copy the link itself, and `ignore` will just pass it by. When not copying, `manage` and `ignore` behave equivalently (because you cannot really ignore links entirely during local recursion), and `follow` will manage the file to which the link points." newvalues(:follow, :manage) defaultto :manage end newparam(:purge, :boolean => true, :parent => Puppet::Parameter::Boolean) do desc "Whether unmanaged files should be purged. This option only makes sense when `ensure => directory` and `recurse => true`. * When recursively duplicating an entire directory with the `source` attribute, `purge => true` will automatically purge any files that are not in the source directory. * When managing files in a directory as individual resources, setting `purge => true` will purge any files that aren't being specifically managed. If you have a filebucket configured, the purged files will be uploaded, but if you do not, this will destroy data. Unless `force => true` is set, purging will **not** delete directories, although it will delete the files they contain. If `recurselimit` is set and you aren't using `force => true`, purging will obey the recursion limit; files in any subdirectories deeper than the limit will be treated as unmanaged and left alone." defaultto :false end newparam(:sourceselect) do desc "Whether to copy all valid sources, or just the first one. This parameter only affects recursive directory copies; by default, the first valid source is the only one used, but if this parameter is set to `all`, then all valid sources will have all of their contents copied to the local system. If a given file exists in more than one source, the version from the earliest source in the list will be used." defaultto :first newvalues(:first, :all) end newparam(:show_diff, :boolean => true, :parent => Puppet::Parameter::Boolean) do desc "Whether to display differences when the file changes, defaulting to true. This parameter is useful for files that may contain passwords or other secret data, which might otherwise be included in Puppet reports or other insecure outputs. If the global `show_diff` setting is false, then no diffs will be shown even if this parameter is true." defaultto :true end newparam(:validate_cmd) do desc "A command for validating the file's syntax before replacing it. If Puppet would need to rewrite a file due to new `source` or `content`, it will check the new content's validity first. If validation fails, the file resource will fail. This command must have a fully qualified path, and should contain a percent (`%`) token where it would expect an input file. It must exit `0` if the syntax is correct, and non-zero otherwise. The command will be run on the target system while applying the catalog, not on the puppet master. Example: file { '/etc/apache2/apache2.conf': content => 'example', validate_cmd => '/usr/sbin/apache2 -t -f %', } This would replace apache2.conf only if the test returned true. Note that if a validation command requires a `%` as part of its text, you can specify a different placeholder token with the `validate_replacement` attribute." end newparam(:validate_replacement) do desc "The replacement string in a `validate_cmd` that will be replaced with an input file name. Defaults to: `%`" defaultto '%' end # Autorequire the nearest ancestor directory found in the catalog. autorequire(:file) do req = [] path = Pathname.new(self[:path]) if !path.root? # Start at our parent, to avoid autorequiring ourself parents = path.parent.enum_for(:ascend) if found = parents.find { |p| catalog.resource(:file, p.to_s) } req << found.to_s end end # if the resource is a link, make sure the target is created first req << self[:target] if self[:target] req end # Autorequire the owner and group of the file. {:user => :owner, :group => :group}.each do |type, property| autorequire(type) do if @parameters.include?(property) # The user/group property automatically converts to IDs next unless should = @parameters[property].shouldorig val = should[0] if val.is_a?(Integer) or val =~ /^\d+$/ nil else val end end end end CREATORS = [:content, :source, :target] SOURCE_ONLY_CHECKSUMS = [:none, :ctime, :mtime] validate do creator_count = 0 CREATORS.each do |param| creator_count += 1 if self.should(param) end creator_count += 1 if @parameters.include?(:source) self.fail "You cannot specify more than one of #{CREATORS.collect { |p| p.to_s}.join(", ")}" if creator_count > 1 self.fail "You cannot specify a remote recursion without a source" if !self[:source] and self[:recurse] == :remote self.fail "You cannot specify source when using checksum 'none'" if self[:checksum] == :none && !self[:source].nil? SOURCE_ONLY_CHECKSUMS.each do |checksum_type| self.fail "You cannot specify content when using checksum '#{checksum_type}'" if self[:checksum] == checksum_type && !self[:content].nil? end self.warning "Possible error: recurselimit is set but not recurse, no recursion will happen" if !self[:recurse] and self[:recurselimit] + if @parameters[:content] && @parameters[:content].actual_content + # Now that we know the checksum, update content (in case it was created before checksum was known). + @parameters[:content].value = @parameters[:checksum].sum(@parameters[:content].actual_content) + end + provider.validate if provider.respond_to?(:validate) end def self.[](path) return nil unless path super(path.gsub(/\/+/, '/').sub(/\/$/, '')) end def self.instances return [] end # Determine the user to write files as. def asuser if self.should(:owner) and ! self.should(:owner).is_a?(Symbol) writeable = Puppet::Util::SUIDManager.asuser(self.should(:owner)) { FileTest.writable?(::File.dirname(self[:path])) } # If the parent directory is writeable, then we execute # as the user in question. Otherwise we'll rely on # the 'owner' property to do things. asuser = self.should(:owner) if writeable end asuser end def bucket return @bucket if @bucket backup = self[:backup] return nil unless backup return nil if backup =~ /^\./ unless catalog or backup == "puppet" fail "Can not find filebucket for backups without a catalog" end unless catalog and filebucket = catalog.resource(:filebucket, backup) or backup == "puppet" fail "Could not find filebucket #{backup} specified in backup" end return default_bucket unless filebucket @bucket = filebucket.bucket @bucket end def default_bucket Puppet::Type.type(:filebucket).mkdefaultbucket.bucket end # Does the file currently exist? Just checks for whether # we have a stat def exist? stat ? true : false end def present?(current_values) super && current_values[:ensure] != :false end # We have to do some extra finishing, to retrieve our bucket if # there is one. def finish # Look up our bucket, if there is one bucket super end # Create any children via recursion or whatever. def eval_generate return [] unless self.recurse? recurse end def ancestors ancestors = Pathname.new(self[:path]).enum_for(:ascend).map(&:to_s) ancestors.delete(self[:path]) ancestors end def flush # We want to make sure we retrieve metadata anew on each transaction. @parameters.each do |name, param| param.flush if param.respond_to?(:flush) end @stat = :needs_stat end def initialize(hash) # Used for caching clients @clients = {} super # If they've specified a source, we get our 'should' values # from it. unless self[:ensure] if self[:target] self[:ensure] = :link elsif self[:content] self[:ensure] = :file end end @stat = :needs_stat end # Configure discovered resources to be purged. def mark_children_for_purging(children) children.each do |name, child| next if child[:source] child[:ensure] = :absent end end # Create a new file or directory object as a child to the current # object. def newchild(path) full_path = ::File.join(self[:path], path) # Add some new values to our original arguments -- these are the ones # set at initialization. We specifically want to exclude any param # values set by the :source property or any default values. # LAK:NOTE This is kind of silly, because the whole point here is that # the values set at initialization should live as long as the resource # but values set by default or by :source should only live for the transaction # or so. Unfortunately, we don't have a straightforward way to manage # the different lifetimes of this data, so we kludge it like this. # The right-side hash wins in the merge. options = @original_parameters.merge(:path => full_path).reject { |param, value| value.nil? } # These should never be passed to our children. [:parent, :ensure, :recurse, :recurselimit, :target, :alias, :source].each do |param| options.delete(param) if options.include?(param) end self.class.new(options) end # Files handle paths specially, because they just lengthen their # path names, rather than including the full parent's title each # time. def pathbuilder # We specifically need to call the method here, so it looks # up our parent in the catalog graph. if parent = parent() # We only need to behave specially when our parent is also # a file if parent.is_a?(self.class) # Remove the parent file name list = parent.pathbuilder list.pop # remove the parent's path info return list << self.ref else return super end else return [self.ref] end end # Recursively generate a list of file resources, which will # be used to copy remote files, manage local files, and/or make links # to map to another directory. def recurse children = (self[:recurse] == :remote) ? {} : recurse_local if self[:target] recurse_link(children) elsif self[:source] recurse_remote(children) end # If we're purging resources, then delete any resource that isn't on the # remote system. mark_children_for_purging(children) if self.purge? # REVISIT: sort_by is more efficient? result = children.values.sort { |a, b| a[:path] <=> b[:path] } remove_less_specific_files(result) end # This is to fix bug #2296, where two files recurse over the same # set of files. It's a rare case, and when it does happen you're # not likely to have many actual conflicts, which is good, because # this is a pretty inefficient implementation. def remove_less_specific_files(files) # REVISIT: is this Windows safe? AltSeparator? mypath = self[:path].split(::File::Separator) other_paths = catalog.vertices. select { |r| r.is_a?(self.class) and r[:path] != self[:path] }. collect { |r| r[:path].split(::File::Separator) }. select { |p| p[0,mypath.length] == mypath } return files if other_paths.empty? files.reject { |file| path = file[:path].split(::File::Separator) other_paths.any? { |p| path[0,p.length] == p } } end # A simple method for determining whether we should be recursing. def recurse? self[:recurse] == true or self[:recurse] == :remote end # Recurse the target of the link. def recurse_link(children) perform_recursion(self[:target]).each do |meta| if meta.relative_path == "." self[:ensure] = :directory next end children[meta.relative_path] ||= newchild(meta.relative_path) if meta.ftype == "directory" children[meta.relative_path][:ensure] = :directory else children[meta.relative_path][:ensure] = :link children[meta.relative_path][:target] = meta.full_path end end children end # Recurse the file itself, returning a Metadata instance for every found file. def recurse_local result = perform_recursion(self[:path]) return {} unless result result.inject({}) do |hash, meta| next hash if meta.relative_path == "." hash[meta.relative_path] = newchild(meta.relative_path) hash end end # Recurse against our remote file. def recurse_remote(children) sourceselect = self[:sourceselect] total = self[:source].collect do |source| next unless result = perform_recursion(source) return if top = result.find { |r| r.relative_path == "." } and top.ftype != "directory" result.each { |data| data.source = "#{source}/#{data.relative_path}" } break result if result and ! result.empty? and sourceselect == :first result end.flatten.compact # This only happens if we have sourceselect == :all unless sourceselect == :first found = [] total.reject! do |data| result = found.include?(data.relative_path) found << data.relative_path unless found.include?(data.relative_path) result end end total.each do |meta| if meta.relative_path == "." parameter(:source).metadata = meta next end children[meta.relative_path] ||= newchild(meta.relative_path) children[meta.relative_path][:source] = meta.source if meta.ftype == "file" children[meta.relative_path][:checksum] = Puppet[:digest_algorithm].to_sym end children[meta.relative_path].parameter(:source).metadata = meta end children end def perform_recursion(path) Puppet::FileServing::Metadata.indirection.search( path, :links => self[:links], :recurse => (self[:recurse] == :remote ? true : self[:recurse]), :recurselimit => self[:recurselimit], :ignore => self[:ignore], :checksum_type => (self[:source] || self[:content]) ? self[:checksum] : :none, :environment => catalog.environment_instance ) end # Back up and remove the file or directory at `self[:path]`. # # @param [Symbol] should The file type replacing the current content. # @return [Boolean] True if the file was removed, else False # @raises [fail???] If the current file isn't one of %w{file link directory} and can't be removed. def remove_existing(should) wanted_type = should.to_s current_type = read_current_type if current_type.nil? return false end if can_backup?(current_type) backup_existing end if wanted_type != "link" and current_type == wanted_type return false end case current_type when "directory" return remove_directory(wanted_type) when "link", "file" return remove_file(current_type, wanted_type) else self.fail "Could not back up files of type #{current_type}" end end def retrieve if source = parameter(:source) source.copy_source_values end super end # Set the checksum, from another property. There are multiple # properties that modify the contents of a file, and they need the # ability to make sure that the checksum value is in sync. def setchecksum(sum = nil) if @parameters.include? :checksum if sum @parameters[:checksum].checksum = sum else # If they didn't pass in a sum, then tell checksum to # figure it out. currentvalue = @parameters[:checksum].retrieve @parameters[:checksum].checksum = currentvalue end end end # Should this thing be a normal file? This is a relatively complex # way of determining whether we're trying to create a normal file, # and it's here so that the logic isn't visible in the content property. def should_be_file? return true if self[:ensure] == :file # I.e., it's set to something like "directory" return false if e = self[:ensure] and e != :present # The user doesn't really care, apparently if self[:ensure] == :present return true unless s = stat return(s.ftype == "file" ? true : false) end # If we've gotten here, then :ensure isn't set return true if self[:content] return true if stat and stat.ftype == "file" false end # Stat our file. Depending on the value of the 'links' attribute, we # use either 'stat' or 'lstat', and we expect the properties to use the # resulting stat object accordingly (mostly by testing the 'ftype' # value). # # We use the initial value :needs_stat to ensure we only stat the file once, # but can also keep track of a failed stat (@stat == nil). This also allows # us to re-stat on demand by setting @stat = :needs_stat. def stat return @stat unless @stat == :needs_stat method = :stat # Files are the only types that support links if (self.class.name == :file and self[:links] != :follow) or self.class.name == :tidy method = :lstat end @stat = begin Puppet::FileSystem.send(method, self[:path]) rescue Errno::ENOENT => error nil rescue Errno::ENOTDIR => error nil rescue Errno::EACCES => error warning "Could not stat; permission denied" nil end end def to_resource resource = super resource.delete(:target) if resource[:target] == :notlink resource end # Write out the file. Requires the property name for logging. # Write will be done by the content property, along with checksum computation def write(property) remove_existing(:file) mode = self.should(:mode) # might be nil mode_int = mode ? symbolic_mode_to_int(mode, Puppet::Util::DEFAULT_POSIX_MODE) : nil if write_temporary_file? Puppet::Util.replace_file(self[:path], mode_int) do |file| file.binmode content_checksum = write_content(file) file.flush fail_if_checksum_is_wrong(file.path, content_checksum) if validate_checksum? if self[:validate_cmd] output = Puppet::Util::Execution.execute(self[:validate_cmd].gsub(self[:validate_replacement], file.path), :failonfail => true, :combine => true) output.split(/\n/).each { |line| self.debug(line) } end end else umask = mode ? 000 : 022 Puppet::Util.withumask(umask) { ::File.open(self[:path], 'wb', mode_int ) { |f| write_content(f) } } end # make sure all of the modes are actually correct property_fix end private # @return [String] The type of the current file, cast to a string. def read_current_type stat_info = stat if stat_info stat_info.ftype.to_s else nil end end # @return [Boolean] If the current file can be backed up and needs to be backed up. def can_backup?(type) if type == "directory" and not force? # (#18110) Directories cannot be removed without :force, so it doesn't # make sense to back them up. false else true end end # @return [Boolean] True if the directory was removed # @api private def remove_directory(wanted_type) if force? debug "Removing existing directory for replacement with #{wanted_type}" FileUtils.rmtree(self[:path]) stat_needed true else notice "Not removing directory; use 'force' to override" false end end # @return [Boolean] if the file was removed (which is always true currently) # @api private def remove_file(current_type, wanted_type) debug "Removing existing #{current_type} for replacement with #{wanted_type}" Puppet::FileSystem.unlink(self[:path]) stat_needed true end def stat_needed @stat = :needs_stat end # Back up the existing file at a given prior to it being removed # @api private # @raise [Puppet::Error] if the file backup failed # @return [void] def backup_existing unless perform_backup raise Puppet::Error, "Could not back up; will not replace" end end # Should we validate the checksum of the file we're writing? def validate_checksum? self[:checksum] !~ /time/ end # Make sure the file we wrote out is what we think it is. def fail_if_checksum_is_wrong(path, content_checksum) newsum = parameter(:checksum).sum_file(path) return if [:absent, nil, content_checksum].include?(newsum) self.fail "File written to disk did not match checksum; discarding changes (#{content_checksum} vs #{newsum})" end # write the current content. Note that if there is no content property # simply opening the file with 'w' as done in write is enough to truncate # or write an empty length file. def write_content(file) (content = property(:content)) && content.write(file) end def write_temporary_file? # unfortunately we don't know the source file size before fetching it # so let's assume the file won't be empty (c = property(:content) and c.length) || @parameters[:source] end # There are some cases where all of the work does not get done on # file creation/modification, so we have to do some extra checking. def property_fix properties.each do |thing| next unless [:mode, :owner, :group, :seluser, :selrole, :seltype, :selrange].include?(thing.name) # Make sure we get a new stat objct @stat = :needs_stat currentvalue = thing.retrieve thing.sync unless thing.safe_insync?(currentvalue) end end end # We put all of the properties in separate files, because there are so many # of them. The order these are loaded is important, because it determines # the order they are in the property lit. require 'puppet/type/file/checksum' require 'puppet/type/file/content' # can create the file require 'puppet/type/file/source' # can create the file require 'puppet/type/file/target' # creates a different type of file require 'puppet/type/file/ensure' # can create the file require 'puppet/type/file/owner' require 'puppet/type/file/group' require 'puppet/type/file/mode' require 'puppet/type/file/type' require 'puppet/type/file/selcontext' # SELinux file context require 'puppet/type/file/ctime' require 'puppet/type/file/mtime' diff --git a/spec/shared_contexts/checksum.rb b/spec/shared_contexts/checksum.rb index 4d5e0f405..3e9e36b93 100644 --- a/spec/shared_contexts/checksum.rb +++ b/spec/shared_contexts/checksum.rb @@ -1,53 +1,55 @@ # Shared contexts for testing against all supported checksum types. # # These helpers define nested rspec example groups to test code against all our # supported checksum types. Example groups that need to be run against all # types should use the `with_checksum_types` helper which will # create a new example group for each types and will run the given block # in each example group. -CHECKSUM_STAT_TIME = Time.now +CHECKSUM_PLAINTEXT = "1\r\n"*4000 CHECKSUM_TYPES_TO_TRY = [ ['md5', 'a7a169ac84bb863b30484d0aa03139c1'], ['md5lite', '22b4182363e81b326e98231fde616782'], ['sha256', '47fcae62967db2fb5cba2fc0d9cf3e6767035d763d825ecda535a7b1928b9746'], - ['sha256lite', 'fd50217a2b0286ba25121bf2297bbe6c197933992de67e4e568f19861444ecf8'], + ['sha256lite', 'fd50217a2b0286ba25121bf2297bbe6c197933992de67e4e568f19861444ecf8'] ] + +CHECKSUM_STAT_TIME = Time.now TIME_TYPES_TO_TRY = [ ['ctime', "#{CHECKSUM_STAT_TIME}"], ['mtime', "#{CHECKSUM_STAT_TIME}"] ] shared_context('with supported checksum types') do def self.with_checksum_types(path, file, &block) def expect_correct_checksum(meta, checksum_type, checksum, type) expect(meta).to_not be_nil expect(meta).to be_instance_of(type) expect(meta.checksum_type).to eq(checksum_type) expect(meta.checksum).to eq("{#{checksum_type}}#{checksum}") end (CHECKSUM_TYPES_TO_TRY + TIME_TYPES_TO_TRY).each do |checksum_type, checksum| describe("when checksum_type is #{checksum_type}") do let(:checksum_type) { checksum_type } - let(:plaintext) { "1\r\n"*4000 } + let(:plaintext) { CHECKSUM_PLAINTEXT } let(:checksum) { checksum } let(:env_path) { tmpfile(path) } let(:checksum_file) { File.join(env_path, file) } def digest(content) Puppet::Util::Checksums.send(checksum_type, content) end before(:each) do FileUtils.mkdir_p(File.dirname(checksum_file)) File.open(checksum_file, "wb") { |f| f.write plaintext } File::Stat.any_instance.stubs(:ctime).returns(CHECKSUM_STAT_TIME) File::Stat.any_instance.stubs(:mtime).returns(CHECKSUM_STAT_TIME) end instance_eval(&block) end end end end diff --git a/spec/unit/type/file/content_spec.rb b/spec/unit/type/file/content_spec.rb index dfd880063..c59708c6a 100755 --- a/spec/unit/type/file/content_spec.rb +++ b/spec/unit/type/file/content_spec.rb @@ -1,521 +1,542 @@ #! /usr/bin/env ruby require 'spec_helper' require 'puppet/network/http_pool' require 'puppet/network/resolver' describe Puppet::Type.type(:file).attrclass(:content), :uses_checksums => true do include PuppetSpec::Files + include_context 'with supported checksum types' let(:filename) { tmpfile('testfile') } let(:environment) { Puppet::Node::Environment.create(:testing, []) } let(:catalog) { Puppet::Resource::Catalog.new(:test, environment) } let(:resource) { Puppet::Type.type(:file).new :path => filename, :catalog => catalog } before do File.open(filename, 'w') {|f| f.write "initial file content"} described_class.stubs(:standalone?).returns(false) end around do |example| Puppet.override(:environments => Puppet::Environments::Static.new(environment)) do example.run end end describe "when determining the checksum type" do let(:content) { described_class.new(:resource => resource) } it "should use the type specified in the source checksum if a source is set" do resource[:source] = File.expand_path("/foo") resource.parameter(:source).expects(:checksum).returns "{md5lite}eh" expect(content.checksum_type).to eq(:md5lite) end it "should use the type specified by the checksum parameter if no source is set" do resource[:checksum] = :md5lite expect(content.checksum_type).to eq(:md5lite) end with_digest_algorithms do it "should use the type specified by digest_algorithm by default" do expect(content.checksum_type).to eq(digest_algorithm.intern) end end end describe "when determining the actual content to write" do let(:content) { described_class.new(:resource => resource) } it "should use the set content if available" do content.should = "ehness" expect(content.actual_content).to eq("ehness") end it "should not use the content from the source if the source is set" do source = mock 'source' resource.expects(:parameter).never.with(:source).returns source expect(content.actual_content).to be_nil end end describe "when setting the desired content" do let(:content) { described_class.new(:resource => resource) } it "should make the actual content available via an attribute" do content.stubs(:checksum_type).returns "md5" content.should = "this is some content" expect(content.actual_content).to eq("this is some content") end with_digest_algorithms do it "should store the checksum as the desired content" do d = digest("this is some content") content.stubs(:checksum_type).returns digest_algorithm content.should = "this is some content" expect(content.should).to eq("{#{digest_algorithm}}#{d}") end it "should not checksum 'absent'" do content.should = :absent expect(content.should).to eq(:absent) end it "should accept a checksum as the desired content" do d = digest("this is some content") string = "{#{digest_algorithm}}#{d}" content.should = string expect(content.should).to eq(string) end end it "should convert the value to ASCII-8BIT", :if => "".respond_to?(:encode) do content.should= "Let's make a \u{2603}" expect(content.actual_content).to eq("Let's make a \xE2\x98\x83".force_encoding(Encoding::ASCII_8BIT)) end end describe "when retrieving the current content" do let(:content) { described_class.new(:resource => resource) } it "should return :absent if the file does not exist" do resource.expects(:stat).returns nil expect(content.retrieve).to eq(:absent) end it "should not manage content on directories" do stat = mock 'stat', :ftype => "directory" resource.expects(:stat).returns stat expect(content.retrieve).to be_nil end it "should not manage content on links" do stat = mock 'stat', :ftype => "link" resource.expects(:stat).returns stat expect(content.retrieve).to be_nil end it "should always return the checksum as a string" do resource[:checksum] = :mtime stat = mock 'stat', :ftype => "file" resource.expects(:stat).returns stat time = Time.now resource.parameter(:checksum).expects(:mtime_file).with(resource[:path]).returns time expect(content.retrieve).to eq("{mtime}#{time}") end with_digest_algorithms do it "should return the checksum of the file if it exists and is a normal file" do stat = mock 'stat', :ftype => "file" resource.expects(:stat).returns stat resource.parameter(:checksum).expects("#{digest_algorithm}_file".intern).with(resource[:path]).returns "mysum" expect(content.retrieve).to eq("{#{digest_algorithm}}mysum") end end end describe "when testing whether the content is in sync" do let(:content) { described_class.new(:resource => resource) } before do resource[:ensure] = :file end it "should return true if the resource shouldn't be a regular file" do resource.expects(:should_be_file?).returns false content.should = "foo" expect(content).to be_safe_insync("whatever") end it "should warn that no content will be synced to links when ensure is :present" do resource[:ensure] = :present resource[:content] = 'foo' resource.stubs(:should_be_file?).returns false resource.stubs(:stat).returns mock("stat", :ftype => "link") resource.expects(:warning).with {|msg| msg =~ /Ensure set to :present but file type is/} content.insync? :present end it "should return false if the current content is :absent" do content.should = "foo" expect(content).not_to be_safe_insync(:absent) end it "should return false if the file should be a file but is not present" do resource.expects(:should_be_file?).returns true content.should = "foo" expect(content).not_to be_safe_insync(:absent) end describe "and the file exists" do with_digest_algorithms do before do resource.stubs(:stat).returns mock("stat") resource[:checksum] = digest_algorithm content.should = "some content" end it "should return false if the current contents are different from the desired content" do expect(content).not_to be_safe_insync("other content") end it "should return true if the sum for the current contents is the same as the sum for the desired content" do expect(content).to be_safe_insync("{#{digest_algorithm}}" + digest("some content")) end [true, false].product([true, false]).each do |cfg, param| describe "and Puppet[:show_diff] is #{cfg} and show_diff => #{param}" do before do Puppet[:show_diff] = cfg resource.stubs(:show_diff?).returns param resource[:loglevel] = "debug" end if cfg and param it "should display a diff" do content.expects(:diff).returns("my diff").once content.expects(:debug).with("\nmy diff").once expect(content).not_to be_safe_insync("other content") end else it "should not display a diff" do content.expects(:diff).never expect(content).not_to be_safe_insync("other content") end end end end end end describe "and :replace is false" do before do resource.stubs(:replace?).returns false end it "should be insync if the file exists and the content is different" do resource.stubs(:stat).returns mock('stat') expect(content).to be_safe_insync("whatever") end it "should be insync if the file exists and the content is right" do resource.stubs(:stat).returns mock('stat') expect(content).to be_safe_insync("something") end it "should not be insync if the file does not exist" do content.should = "foo" expect(content).not_to be_safe_insync(:absent) end end end - describe "when changing the content" do + describe "when testing whether the content is initialized in the resource and in sync" do + CHECKSUM_TYPES_TO_TRY.each do |checksum_type, checksum| + describe "sync with checksum type #{checksum_type} and the file exists" do + before do + @new_resource = Puppet::Type.type(:file).new :ensure => :file, :path => filename, :catalog => catalog, + :content => CHECKSUM_PLAINTEXT, :checksum => checksum_type + @new_resource.stubs(:stat).returns mock('stat') + end + + it "should return false if the sum for the current contents are different from the desired content" do + expect(@new_resource.parameters[:content]).not_to be_safe_insync("other content") + end + + it "should return true if the sum for the current contents is the same as the sum for the desired content" do + expect(@new_resource.parameters[:content]).to be_safe_insync("{#{checksum_type}}#{checksum}") + end + end + end + end + + describe "when changing the content" do let(:content) { described_class.new(:resource => resource) } before do resource.stubs(:[]).with(:path).returns "/boo" resource.stubs(:stat).returns "eh" end it "should use the file's :write method to write the content" do resource.expects(:write).with(:content) content.sync end it "should return :file_changed if the file already existed" do resource.expects(:stat).returns "something" resource.stubs(:write) expect(content.sync).to eq(:file_changed) end it "should return :file_created if the file did not exist" do resource.expects(:stat).returns nil resource.stubs(:write) expect(content.sync).to eq(:file_created) end end describe "when writing" do let(:content) { described_class.new(:resource => resource) } let(:fh) { File.open(filename, 'wb') } it "should attempt to read from the filebucket if no actual content nor source exists" do content.should = "{md5}foo" content.resource.bucket.class.any_instance.stubs(:getfile).returns "foo" content.write(fh) fh.close end describe "from actual content" do before(:each) do content.stubs(:actual_content).returns("this is content") end it "should write to the given file handle" do fh = mock 'filehandle' fh.expects(:print).with("this is content") content.write(fh) end it "should return the current checksum value" do resource.parameter(:checksum).expects(:sum_stream).returns "checksum" expect(content.write(fh)).to eq("checksum") end end describe "from a file bucket" do it "should fail if a file bucket cannot be retrieved" do content.should = "{md5}foo" content.resource.expects(:bucket).returns nil expect { content.write(fh) }.to raise_error(Puppet::Error) end it "should fail if the file bucket cannot find any content" do content.should = "{md5}foo" bucket = stub 'bucket' content.resource.expects(:bucket).returns bucket bucket.expects(:getfile).with("foo").raises "foobar" expect { content.write(fh) }.to raise_error(Puppet::Error) end it "should write the returned content to the file" do content.should = "{md5}foo" bucket = stub 'bucket' content.resource.expects(:bucket).returns bucket bucket.expects(:getfile).with("foo").returns "mycontent" fh = mock 'filehandle' fh.expects(:print).with("mycontent") content.write(fh) end end describe "from local source" do let(:source_content) { "source file content\r\n"*10 } before(:each) do sourcename = tmpfile('source') resource[:backup] = false resource[:source] = sourcename File.open(sourcename, 'wb') {|f| f.write source_content} # This needs to be invoked to properly initialize the content property, # or attempting to write a file will fail. resource.newattr(:content) end it "should copy content from the source to the file" do source = resource.parameter(:source) resource.write(source) expect(Puppet::FileSystem.binread(filename)).to eq(source_content) end with_digest_algorithms do it "should return the checksum computed" do File.open(filename, 'wb') do |file| resource[:checksum] = digest_algorithm expect(content.write(file)).to eq("{#{digest_algorithm}}#{digest(source_content)}") end end end end describe 'from remote source' do let(:source_content) { "source file content\n"*10 } let(:source) { resource.newattr(:source) } let(:response) { stub_everything('response') } let(:conn) { mock('connection') } before(:each) do resource[:backup] = false # This needs to be invoked to properly initialize the content property, # or attempting to write a file will fail. resource.newattr(:content) response.stubs(:read_body).multiple_yields(*source_content.lines) conn.stubs(:request_get).yields(response) end it 'should use an explicit fileserver if source starts with puppet://' do response.stubs(:code).returns('200') source.stubs(:metadata).returns stub_everything('metadata', :source => 'puppet://somehostname/test/foo', :ftype => 'file') Puppet::Network::HttpPool.expects(:http_instance).with('somehostname', anything).returns(conn) resource.write(source) end it 'should use the default fileserver if source starts with puppet:///' do response.stubs(:code).returns('200') source.stubs(:metadata).returns stub_everything('metadata', :source => 'puppet:///test/foo', :ftype => 'file') Puppet::Network::HttpPool.expects(:http_instance).with(Puppet.settings[:server], anything).returns(conn) resource.write(source) end it 'should percent encode reserved characters' do response.stubs(:code).returns('200') Puppet::Network::HttpPool.stubs(:http_instance).returns(conn) source.stubs(:metadata).returns stub_everything('metadata', :source => 'puppet:///test/foo bar', :ftype => 'file') conn.unstub(:request_get) conn.expects(:request_get).with("#{Puppet::Network::HTTP::MASTER_URL_PREFIX}/v3/file_content/test/foo%20bar?environment=testing&", anything).yields(response) resource.write(source) end describe 'when handling file_content responses' do before(:each) do Puppet::Network::HttpPool.stubs(:http_instance).returns(conn) source.stubs(:metadata).returns stub_everything('metadata', :source => 'puppet:///test/foo', :ftype => 'file') end it 'should not write anything if source is not found' do response.stubs(:code).returns('404') expect { resource.write(source) }.to raise_error(Net::HTTPError, /404/) expect(File.read(filename)).to eq('initial file content') end it 'should raise an HTTP error in case of server error' do response.stubs(:code).returns('500') expect { resource.write(source) }.to raise_error(Net::HTTPError, /500/) end context 'and the request was successful' do before(:each) { response.stubs(:code).returns '200' } it 'should write the contents to the file' do resource.write(source) expect(Puppet::FileSystem.binread(filename)).to eq(source_content) end with_digest_algorithms do it 'should return the checksum computed' do File.open(filename, 'w') do |file| resource[:checksum] = digest_algorithm expect(content.write(file)).to eq("{#{digest_algorithm}}#{digest(source_content)}") end end end end end end # These are testing the implementation rather than the desired behaviour; while that bites, there are a whole # pile of other methods in the File type that depend on intimate details of this implementation and vice-versa. # If these blow up, you are gonna have to review the callers to make sure they don't explode! --daniel 2011-02-01 describe "each_chunk_from should work" do it "when content is a string" do content.each_chunk_from('i_am_a_string') { |chunk| expect(chunk).to eq('i_am_a_string') } end # The following manifest is a case where source and content.should are both set # file { "/tmp/mydir" : # source => '/tmp/sourcedir', # recurse => true, # } it "when content checksum comes from source" do source_param = Puppet::Type.type(:file).attrclass(:source) source = source_param.new(:resource => resource) content.should = "{md5}123abcd" content.expects(:chunk_file_from_source).returns('from_source') content.each_chunk_from(source) { |chunk| expect(chunk).to eq('from_source') } end it "when no content, source, but ensure present" do resource[:ensure] = :present content.each_chunk_from(nil) { |chunk| expect(chunk).to eq('') } end # you might do this if you were just auditing it "when no content, source, but ensure file" do resource[:ensure] = :file content.each_chunk_from(nil) { |chunk| expect(chunk).to eq('') } end it "when source_or_content is nil and content not a checksum" do content.each_chunk_from(nil) { |chunk| expect(chunk).to eq('') } end # the content is munged so that if it's a checksum nil gets passed in it "when content is a checksum it should try to read from filebucket" do content.should = "{md5}123abcd" content.expects(:read_file_from_filebucket).once.returns('im_a_filebucket') content.each_chunk_from(nil) { |chunk| expect(chunk).to eq('im_a_filebucket') } end it "when running as puppet apply" do Puppet[:default_file_terminus] = "file_server" source_or_content = stubs('source_or_content') source_or_content.expects(:content).once.returns :whoo content.each_chunk_from(source_or_content) { |chunk| expect(chunk).to eq(:whoo) } end it "when running from source with a local file" do source_or_content = stubs('source_or_content') source_or_content.expects(:local?).returns true content.expects(:chunk_file_from_disk).with(source_or_content).once.yields 'woot' content.each_chunk_from(source_or_content) { |chunk| expect(chunk).to eq('woot') } end it "when running from source with a remote file" do source_or_content = stubs('source_or_content') source_or_content.expects(:local?).returns false content.expects(:chunk_file_from_source).with(source_or_content).once.yields 'woot' content.each_chunk_from(source_or_content) { |chunk| expect(chunk).to eq('woot') } end end end end