diff --git a/lib/puppet/defaults.rb b/lib/puppet/defaults.rb index 27b81c2c2..662c0a125 100644 --- a/lib/puppet/defaults.rb +++ b/lib/puppet/defaults.rb @@ -1,2036 +1,2040 @@ module Puppet def self.default_diffargs if (Facter.value(:kernel) == "AIX" && Facter.value(:kernelmajversion) == "5300") "" else "-u" end end ############################################################################################ # NOTE: For information about the available values for the ":type" property of settings, # see the docs for Settings.define_settings ############################################################################################ AS_DURATION = %q{This setting can be a time interval in seconds (30 or 30s), minutes (30m), hours (6h), days (2d), or years (5y).} STORECONFIGS_ONLY = %q{This setting is only used by the ActiveRecord storeconfigs and inventory backends, which are deprecated.} # This is defined first so that the facter implementation is replaced before other setting defaults are evaluated. define_settings(:main, :cfacter => { :default => false, :type => :boolean, :desc => 'Whether or not to use the native facter (cfacter) implementation instead of the Ruby one (facter). Defaults to false.', :hook => proc do |value| return unless value raise ArgumentError, 'facter has already evaluated facts.' if Facter.instance_variable_get(:@collection) raise ArgumentError, 'cfacter version 0.2.0 or later is not installed.' unless Puppet.features.cfacter? CFacter.initialize end } ) define_settings(:main, :confdir => { :default => nil, :type => :directory, :desc => "The main Puppet configuration directory. The default for this setting is calculated based on the user. If the process is running as root or the user that Puppet is supposed to run as, it defaults to a system directory, but if it's running as any other user, it defaults to being in the user's home directory.", }, :vardir => { :default => nil, :type => :directory, :owner => "service", :group => "service", :desc => "Where Puppet stores dynamic and growing data. The default for this setting is calculated specially, like `confdir`_.", }, ### NOTE: this setting is usually being set to a symbol value. We don't officially have a ### setting type for that yet, but we might want to consider creating one. :name => { :default => nil, :desc => "The name of the application, if we are running as one. The default is essentially $0 without the path or `.rb`.", } ) define_settings(:main, :logdir => { :default => nil, :type => :directory, :mode => "0750", :owner => "service", :group => "service", :desc => "The directory in which to store log files", }, :log_level => { :default => 'notice', :type => :enum, :values => ["debug","info","notice","warning","err","alert","emerg","crit"], :desc => "Default logging level for messages from Puppet. Allowed values are: * debug * info * notice * warning * err * alert * emerg * crit ", }, :disable_warnings => { :default => [], :type => :array, :desc => "A comma-separated list of warning types to suppress. If large numbers of warnings are making Puppet's logs too large or difficult to use, you can temporarily silence them with this setting. If you are preparing to upgrade Puppet to a new major version, you should re-enable all warnings for a while. Valid values for this setting are: * `deprecations` --- disables deprecation warnings.", :hook => proc do |value| values = munge(value) valid = %w[deprecations] invalid = values - (values & valid) if not invalid.empty? raise ArgumentError, "Cannot disable unrecognized warning types #{invalid.inspect}. Valid values are #{valid.inspect}." end end } ) define_settings(:main, :priority => { :default => nil, :type => :priority, :desc => "The scheduling priority of the process. Valid values are 'high', 'normal', 'low', or 'idle', which are mapped to platform-specific values. The priority can also be specified as an integer value and will be passed as is, e.g. -5. Puppet must be running as a privileged user in order to increase scheduling priority.", }, :trace => { :default => false, :type => :boolean, :desc => "Whether to print stack traces on some errors", }, :profile => { :default => false, :type => :boolean, :desc => "Whether to enable experimental performance profiling", }, :autoflush => { :default => true, :type => :boolean, :desc => "Whether log files should always flush to disk.", :hook => proc { |value| Log.autoflush = value } }, :syslogfacility => { :default => "daemon", :desc => "What syslog facility to use when logging to syslog. Syslog has a fixed list of valid facilities, and you must choose one of those; you cannot just make one up." }, :statedir => { :default => "$vardir/state", :type => :directory, :mode => "01755", :desc => "The directory where Puppet state is stored. Generally, this directory can be removed without causing harm (although it might result in spurious service restarts)." }, :rundir => { :default => nil, :type => :directory, :mode => "0755", :owner => "service", :group => "service", :desc => "Where Puppet PID files are kept." }, :genconfig => { :default => false, :type => :boolean, :desc => "When true, causes Puppet applications to print an example config file to stdout and exit. The example will include descriptions of each setting, and the current (or default) value of each setting, incorporating any settings overridden on the CLI (with the exception of `genconfig` itself). This setting only makes sense when specified on the command line as `--genconfig`.", }, :genmanifest => { :default => false, :type => :boolean, :desc => "Whether to just print a manifest to stdout and exit. Only makes sense when specified on the command line as `--genmanifest`. Takes into account arguments specified on the CLI.", }, :configprint => { :default => "", :desc => "Print the value of a specific configuration setting. If the name of a setting is provided for this, then the value is printed and puppet exits. Comma-separate multiple values. For a list of all values, specify 'all'.", }, :color => { :default => "ansi", :type => :string, :desc => "Whether to use colors when logging to the console. Valid values are `ansi` (equivalent to `true`), `html`, and `false`, which produces no color. Defaults to false on Windows, as its console does not support ansi colors.", }, :mkusers => { :default => false, :type => :boolean, :desc => "Whether to create the necessary user and group that puppet agent will run as.", }, :manage_internal_file_permissions => { :default => true, :type => :boolean, :desc => "Whether Puppet should manage the owner, group, and mode of files it uses internally", }, :onetime => { :default => false, :type => :boolean, :desc => "Perform one configuration run and exit, rather than spawning a long-running daemon. This is useful for interactively running puppet agent, or running puppet agent from cron.", :short => 'o', }, :path => { :default => "none", :desc => "The shell search path. Defaults to whatever is inherited from the parent process.", :call_hook => :on_define_and_write, :hook => proc do |value| ENV["PATH"] = "" if ENV["PATH"].nil? ENV["PATH"] = value unless value == "none" paths = ENV["PATH"].split(File::PATH_SEPARATOR) Puppet::Util::Platform.default_paths.each do |path| ENV["PATH"] += File::PATH_SEPARATOR + path unless paths.include?(path) end value end }, :libdir => { :type => :directory, :default => "$vardir/lib", :desc => "An extra search path for Puppet. This is only useful for those files that Puppet will load on demand, and is only guaranteed to work for those cases. In fact, the autoload mechanism is responsible for making sure this directory is in Ruby's search path\n", :call_hook => :on_initialize_and_write, :hook => proc do |value| $LOAD_PATH.delete(@oldlibdir) if defined?(@oldlibdir) and $LOAD_PATH.include?(@oldlibdir) @oldlibdir = value $LOAD_PATH << value end }, :ignoreimport => { :default => false, :type => :boolean, :desc => "If true, allows the parser to continue without requiring all files referenced with `import` statements to exist. This setting was primarily designed for use with commit hooks for parse-checking.", }, :environment => { :default => "production", :desc => "The environment Puppet is running in. For clients (e.g., `puppet agent`) this determines the environment itself, which is used to find modules and much more. For servers (i.e., `puppet master`) this provides the default environment for nodes we know nothing about." }, :environmentpath => { :default => "", :desc => "A search path for directory environments, as a list of directories separated by the system path separator character. (The POSIX path separator is ':', and the Windows path separator is ';'.) This setting must have a value set to enable **directory environments.** The recommended value is `$confdir/environments`. For more details, see http://docs.puppetlabs.com/puppet/latest/reference/environments.html", :type => :path, }, :always_cache_features => { :type => :boolean, :default => false, :desc => <<-'EOT' Affects how we cache attempts to load Puppet 'features'. If false, then calls to `Puppet.features.?` will always attempt to load the feature (which can be an expensive operation) unless it has already been loaded successfully. This makes it possible for a single agent run to, e.g., install a package that provides the underlying capabilities for a feature, and then later load that feature during the same run (even if the feature had been tested earlier and had not been available). If this setting is set to true, then features will only be checked once, and if they are not available, the negative result is cached and returned for all subsequent attempts to load the feature. This behavior is almost always appropriate for the server, and can result in a significant performance improvement for features that are checked frequently. EOT }, :diff_args => { :default => lambda { default_diffargs }, :desc => "Which arguments to pass to the diff command when printing differences between files. The command to use can be chosen with the `diff` setting.", }, :diff => { :default => (Puppet.features.microsoft_windows? ? "" : "diff"), :desc => "Which diff command to use when printing differences between files. This setting has no default value on Windows, as standard `diff` is not available, but Puppet can use many third-party diff tools.", }, :show_diff => { :type => :boolean, :default => false, :desc => "Whether to log and report a contextual diff when files are being replaced. This causes partial file contents to pass through Puppet's normal logging and reporting system, so this setting should be used with caution if you are sending Puppet's reports to an insecure destination. This feature currently requires the `diff/lcs` Ruby library.", }, :daemonize => { :type => :boolean, :default => (Puppet.features.microsoft_windows? ? false : true), :desc => "Whether to send the process into the background. This defaults to true on POSIX systems, and to false on Windows (where Puppet currently cannot daemonize).", :short => "D", :hook => proc do |value| if value and Puppet.features.microsoft_windows? raise "Cannot daemonize on Windows" end end }, :maximum_uid => { :default => 4294967290, :desc => "The maximum allowed UID. Some platforms use negative UIDs but then ship with tools that do not know how to handle signed ints, so the UIDs show up as huge numbers that can then not be fed back into the system. This is a hackish way to fail in a slightly more useful way when that happens.", }, :route_file => { :default => "$confdir/routes.yaml", :desc => "The YAML file containing indirector route configuration.", }, :node_terminus => { :type => :terminus, :default => "plain", :desc => "Where to find information about nodes.", }, :node_cache_terminus => { :type => :terminus, :default => nil, :desc => "How to store cached nodes. Valid values are (none), 'json', 'msgpack', 'yaml' or write only yaml ('write_only_yaml'). The master application defaults to 'write_only_yaml', all others to none.", }, :data_binding_terminus => { :type => :terminus, :default => "hiera", :desc => "Where to retrive information about data.", }, :hiera_config => { :default => "$confdir/hiera.yaml", :desc => "The hiera configuration file. Puppet only reads this file on startup, so you must restart the puppet master every time you edit it.", :type => :file, }, :binder => { :default => false, :desc => "Turns the binding system on or off. This includes bindings in modules. The binding system aggregates data from modules and other locations and makes them available for lookup. The binding system is experimental and any or all of it may change.", :type => :boolean, }, :binder_config => { :default => nil, :desc => "The binder configuration file. Puppet reads this file on each request to configure the bindings system. If set to nil (the default), a $confdir/binder_config.yaml is optionally loaded. If it does not exists, a default configuration is used. If the setting :binding_config is specified, it must reference a valid and existing yaml file.", :type => :file, }, :catalog_terminus => { :type => :terminus, :default => "compiler", :desc => "Where to get node catalogs. This is useful to change if, for instance, you'd like to pre-compile catalogs and store them in memcached or some other easily-accessed store.", }, :catalog_cache_terminus => { :type => :terminus, :default => nil, :desc => "How to store cached catalogs. Valid values are 'json', 'msgpack' and 'yaml'. The agent application defaults to 'json'." }, :facts_terminus => { :default => 'facter', :desc => "The node facts terminus.", :call_hook => :on_initialize_and_write, :hook => proc do |value| require 'puppet/node/facts' # Cache to YAML if we're uploading facts away if %w[rest inventory_service].include? value.to_s Puppet.info "configuring the YAML fact cache because a remote terminus is active" Puppet::Node::Facts.indirection.cache_class = :yaml end end }, :inventory_terminus => { :type => :terminus, :default => "$facts_terminus", :desc => "Should usually be the same as the facts terminus", }, :default_file_terminus => { :type => :terminus, :default => "rest", :desc => "The default source for files if no server is given in a uri, e.g. puppet:///file. The default of `rest` causes the file to be retrieved using the `server` setting. When running `apply` the default is `file_server`, causing requests to be filled locally." }, :httplog => { :default => "$logdir/http.log", :type => :file, :owner => "root", :mode => "0640", :desc => "Where the puppet agent web server logs.", }, :http_proxy_host => { :default => "none", :desc => "The HTTP proxy host to use for outgoing connections. Note: You may need to use a FQDN for the server hostname when using a proxy. Environment variable http_proxy or HTTP_PROXY will override this value", }, :http_proxy_port => { :default => 3128, :desc => "The HTTP proxy port to use for outgoing connections", }, :http_proxy_user => { :default => "none", - :desc => "The user name for an authenticated HTTP proxy. Requires http_proxy_host.", + :desc => "The user name for an authenticated HTTP proxy. Requires the `http_proxy_host` setting.", }, :http_proxy_password =>{ :default => "none", :hook => proc do |value| if Puppet.settings[:http_proxy_password] =~ /[@!# \/]/ - raise "Special characters in passwords must be URL compliant, we received #{value}" + raise "Passwords set in the http_proxy_password setting must be valid as part of a URL, and any reserved characters must be URL-encoded. We received: #{value}" end end, - :desc => "The password for the user of an authenticated HTTP proxy. Requires http_proxy_user. - NOTE: Special characters must be escaped or encoded for URL compliance", + :desc => "The password for the user of an authenticated HTTP proxy. + Requires the `http_proxy_user` setting. + + Note that passwords must be valid when used as part of a URL. If a password + contains any characters with special meanings in URLs (as specified by RFC 3986 + section 2.2), they must be URL-encoded. (For example, `#` would become `%23`.)", }, :http_keepalive_timeout => { :default => "4s", :type => :duration, :desc => "The maximum amount of time a persistent HTTP connection can remain idle in the connection pool, before it is closed. This timeout should be shorter than the keepalive timeout used on the HTTP server, e.g. Apache KeepAliveTimeout directive. #{AS_DURATION}" }, :http_debug => { :default => false, :type => :boolean, :desc => "Whether to write HTTP request and responses to stderr. This should never be used in a production environment." }, :filetimeout => { :default => "15s", :type => :duration, :desc => "The minimum time to wait between checking for updates in configuration files. This timeout determines how quickly Puppet checks whether a file (such as manifests or templates) has changed on disk. #{AS_DURATION}", }, :environment_timeout => { :default => "unlimited", :type => :ttl, :desc => "The time to live for a cached environment. #{AS_DURATION} This setting can also be set to `unlimited`, which causes the environment to be cached until the master is restarted." }, :queue_type => { :default => "stomp", :desc => "Which type of queue to use for asynchronous processing.", }, :queue_type => { :default => "stomp", :desc => "Which type of queue to use for asynchronous processing.", }, :queue_source => { :default => "stomp://localhost:61613/", :desc => "Which type of queue to use for asynchronous processing. If your stomp server requires authentication, you can include it in the URI as long as your stomp client library is at least 1.1.1", }, :async_storeconfigs => { :default => false, :type => :boolean, :desc => "Whether to use a queueing system to provide asynchronous database integration. Requires that `puppet queue` be running.", :hook => proc do |value| if value # This reconfigures the termini for Node, Facts, and Catalog Puppet.settings.override_default(:storeconfigs, true) # But then we modify the configuration Puppet::Resource::Catalog.indirection.cache_class = :queue Puppet.settings.override_default(:catalog_cache_terminus, :queue) else raise "Cannot disable asynchronous storeconfigs in a running process" end end }, :thin_storeconfigs => { :default => false, :type => :boolean, :desc => "Boolean; whether Puppet should store only facts and exported resources in the storeconfigs database. This will improve the performance of exported resources with the older `active_record` backend, but will disable external tools that search the storeconfigs database. Thinning catalogs is generally unnecessary when using PuppetDB to store catalogs.", :hook => proc do |value| Puppet.settings.override_default(:storeconfigs, true) if value end }, :config_version => { :default => "", :desc => "How to determine the configuration version. By default, it will be the time that the configuration is parsed, but you can provide a shell script to override how the version is determined. The output of this script will be added to every log message in the reports, allowing you to correlate changes on your hosts to the source version on the server. Setting a global value for config_version in puppet.conf is deprecated. Please set a per-environment value in environment.conf instead. For more info, see http://docs.puppetlabs.com/puppet/latest/reference/environments.html", :deprecated => :allowed_on_commandline, }, :zlib => { :default => true, :type => :boolean, :desc => "Boolean; whether to use the zlib library", }, :prerun_command => { :default => "", :desc => "A command to run before every agent run. If this command returns a non-zero return code, the entire Puppet run will fail.", }, :postrun_command => { :default => "", :desc => "A command to run after every agent run. If this command returns a non-zero return code, the entire Puppet run will be considered to have failed, even though it might have performed work during the normal run.", }, :freeze_main => { :default => false, :type => :boolean, :desc => "Freezes the 'main' class, disallowing any code to be added to it. This essentially means that you can't have any code outside of a node, class, or definition other than in the site manifest.", }, :stringify_facts => { :default => true, :type => :boolean, :desc => "Flatten fact values to strings using #to_s. Means you can't have arrays or hashes as fact values. (DEPRECATED) This option will be removed in Puppet 4.0.", }, :trusted_node_data => { :default => false, :type => :boolean, :desc => "Stores trusted node data in a hash called $trusted. When true also prevents $trusted from being overridden in any scope.", }, :immutable_node_data => { :default => '$trusted_node_data', :type => :boolean, :desc => "When true, also prevents $trusted and $facts from being overridden in any scope", } ) Puppet.define_settings(:module_tool, :module_repository => { :default => 'https://forgeapi.puppetlabs.com', :desc => "The module repository", }, :module_working_dir => { :default => '$vardir/puppet-module', :desc => "The directory into which module tool data is stored", }, :module_skeleton_dir => { :default => '$module_working_dir/skeleton', :desc => "The directory which the skeleton for module tool generate is stored.", }, :forge_authorization => { :default => nil, :desc => "The authorization key to connect to the Puppet Forge. Leave blank for unauthorized or license based connections", }, :module_groups => { :default => nil, :desc => "Extra module groups to request from the Puppet Forge", } ) Puppet.define_settings( :main, # We have to downcase the fqdn, because the current ssl stuff (as oppsed to in master) doesn't have good facilities for # manipulating naming. :certname => { :default => lambda { Puppet::Settings.default_certname.downcase }, :desc => "The name to use when handling certificates. When a node requests a certificate from the CA puppet master, it uses the value of the `certname` setting as its requested Subject CN. This is the name used when managing a node's permissions in [auth.conf](http://docs.puppetlabs.com/puppet/latest/reference/config_file_auth.html). In most cases, it is also used as the node's name when matching [node definitions](http://docs.puppetlabs.com/puppet/latest/reference/lang_node_definitions.html) and requesting data from an ENC. (This can be changed with the `node_name_value` and `node_name_fact` settings, although you should only do so if you have a compelling reason.) A node's certname is available in Puppet manifests as `$trusted['certname']`. (See [Facts and Built-In Variables](http://docs.puppetlabs.com/puppet/latest/reference/lang_facts_and_builtin_vars.html) for more details.) * For best compatibility, you should limit the value of `certname` to only use letters, numbers, periods, underscores, and dashes. (That is, it should match `/\A[a-z0-9._-]+\Z/`.) * The special value `ca` is reserved, and can't be used as the certname for a normal node. Defaults to the node's fully qualified domain name.", :hook => proc { |value| raise(ArgumentError, "Certificate names must be lower case; see #1168") unless value == value.downcase }}, :certdnsnames => { :default => '', :hook => proc do |value| unless value.nil? or value == '' then Puppet.warning < < { :default => '', :desc => < { :default => "$confdir/csr_attributes.yaml", :type => :file, :desc => < { :default => "$ssldir/certs", :type => :directory, :mode => "0755", :owner => "service", :group => "service", :desc => "The certificate directory." }, :ssldir => { :default => "$confdir/ssl", :type => :directory, :mode => "0771", :owner => "service", :group => "service", :desc => "Where SSL certificates are kept." }, :publickeydir => { :default => "$ssldir/public_keys", :type => :directory, :mode => "0755", :owner => "service", :group => "service", :desc => "The public key directory." }, :requestdir => { :default => "$ssldir/certificate_requests", :type => :directory, :mode => "0755", :owner => "service", :group => "service", :desc => "Where host certificate requests are stored." }, :privatekeydir => { :default => "$ssldir/private_keys", :type => :directory, :mode => "0750", :owner => "service", :group => "service", :desc => "The private key directory." }, :privatedir => { :default => "$ssldir/private", :type => :directory, :mode => "0750", :owner => "service", :group => "service", :desc => "Where the client stores private certificate information." }, :passfile => { :default => "$privatedir/password", :type => :file, :mode => "0640", :owner => "service", :group => "service", :desc => "Where puppet agent stores the password for its private key. Generally unused." }, :hostcsr => { :default => "$ssldir/csr_$certname.pem", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Where individual hosts store and look for their certificate requests." }, :hostcert => { :default => "$certdir/$certname.pem", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Where individual hosts store and look for their certificates." }, :hostprivkey => { :default => "$privatekeydir/$certname.pem", :type => :file, :mode => "0640", :owner => "service", :group => "service", :desc => "Where individual hosts store and look for their private key." }, :hostpubkey => { :default => "$publickeydir/$certname.pem", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Where individual hosts store and look for their public key." }, :localcacert => { :default => "$certdir/ca.pem", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Where each client stores the CA certificate." }, :ssl_client_ca_auth => { :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Certificate authorities who issue server certificates. SSL servers will not be considered authentic unless they possess a certificate issued by an authority listed in this file. If this setting has no value then the Puppet master's CA certificate (localcacert) will be used." }, :ssl_server_ca_auth => { :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Certificate authorities who issue client certificates. SSL clients will not be considered authentic unless they possess a certificate issued by an authority listed in this file. If this setting has no value then the Puppet master's CA certificate (localcacert) will be used." }, :hostcrl => { :default => "$ssldir/crl.pem", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "Where the host's certificate revocation list can be found. This is distinct from the certificate authority's CRL." }, :certificate_revocation => { :default => true, :type => :boolean, :desc => "Whether certificate revocation should be supported by downloading a Certificate Revocation List (CRL) to all clients. If enabled, CA chaining will almost definitely not work.", }, :certificate_expire_warning => { :default => "60d", :type => :duration, :desc => "The window of time leading up to a certificate's expiration that a notification will be logged. This applies to CA, master, and agent certificates. #{AS_DURATION}" }, :digest_algorithm => { :default => 'md5', :type => :enum, :values => ["md5", "sha256"], :desc => 'Which digest algorithm to use for file resources and the filebucket. Valid values are md5, sha256. Default is md5.', } ) define_settings( :ca, :ca_name => { :default => "Puppet CA: $certname", :desc => "The name to use the Certificate Authority certificate.", }, :cadir => { :default => "$ssldir/ca", :type => :directory, :owner => "service", :group => "service", :mode => "0755", :desc => "The root directory for the certificate authority." }, :cacert => { :default => "$cadir/ca_crt.pem", :type => :file, :owner => "service", :group => "service", :mode => "0644", :desc => "The CA certificate." }, :cakey => { :default => "$cadir/ca_key.pem", :type => :file, :owner => "service", :group => "service", :mode => "0640", :desc => "The CA private key." }, :capub => { :default => "$cadir/ca_pub.pem", :type => :file, :owner => "service", :group => "service", :mode => "0644", :desc => "The CA public key." }, :cacrl => { :default => "$cadir/ca_crl.pem", :type => :file, :owner => "service", :group => "service", :mode => "0644", :desc => "The certificate revocation list (CRL) for the CA. Will be used if present but otherwise ignored.", }, :caprivatedir => { :default => "$cadir/private", :type => :directory, :owner => "service", :group => "service", :mode => "0750", :desc => "Where the CA stores private certificate information." }, :csrdir => { :default => "$cadir/requests", :type => :directory, :owner => "service", :group => "service", :mode => "0755", :desc => "Where the CA stores certificate requests" }, :signeddir => { :default => "$cadir/signed", :type => :directory, :owner => "service", :group => "service", :mode => "0755", :desc => "Where the CA stores signed certificates." }, :capass => { :default => "$caprivatedir/ca.pass", :type => :file, :owner => "service", :group => "service", :mode => "0640", :desc => "Where the CA stores the password for the private key." }, :serial => { :default => "$cadir/serial", :type => :file, :owner => "service", :group => "service", :mode => "0644", :desc => "Where the serial number for certificates is stored." }, :autosign => { :default => "$confdir/autosign.conf", :type => :autosign, :desc => "Whether (and how) to autosign certificate requests. This setting is only relevant on a puppet master acting as a certificate authority (CA). Valid values are true (autosigns all certificate requests; not recommended), false (disables autosigning certificates), or the absolute path to a file. The file specified in this setting may be either a **configuration file** or a **custom policy executable.** Puppet will automatically determine what it is: If the Puppet user (see the `user` setting) can execute the file, it will be treated as a policy executable; otherwise, it will be treated as a config file. If a custom policy executable is configured, the CA puppet master will run it every time it receives a CSR. The executable will be passed the subject CN of the request _as a command line argument,_ and the contents of the CSR in PEM format _on stdin._ It should exit with a status of 0 if the cert should be autosigned and non-zero if the cert should not be autosigned. If a certificate request is not autosigned, it will persist for review. An admin user can use the `puppet cert sign` command to manually sign it, or can delete the request. For info on autosign configuration files, see [the guide to Puppet's config files](http://docs.puppetlabs.com/guides/configuring.html).", }, :allow_duplicate_certs => { :default => false, :type => :boolean, :desc => "Whether to allow a new certificate request to overwrite an existing certificate.", }, :ca_ttl => { :default => "5y", :type => :duration, :desc => "The default TTL for new certificates. #{AS_DURATION}" }, :req_bits => { :default => 4096, :desc => "The bit length of the certificates.", }, :keylength => { :default => 4096, :desc => "The bit length of keys.", }, :cert_inventory => { :default => "$cadir/inventory.txt", :type => :file, :mode => "0644", :owner => "service", :group => "service", :desc => "The inventory file. This is a text file to which the CA writes a complete listing of all certificates." } ) # Define the config default. define_settings(:application, :config_file_name => { :type => :string, :default => Puppet::Settings.default_config_file_name, :desc => "The name of the puppet config file.", }, :config => { :type => :file, :default => "$confdir/${config_file_name}", :desc => "The configuration file for the current puppet application.", }, :pidfile => { :type => :file, :default => "$rundir/${run_mode}.pid", :desc => "The file containing the PID of a running process. This file is intended to be used by service management frameworks and monitoring systems to determine if a puppet process is still in the process table.", }, :bindaddress => { :default => "0.0.0.0", :desc => "The address a listening server should bind to.", } ) define_settings(:master, :user => { :default => "puppet", :desc => "The user puppet master should run as.", }, :group => { :default => "puppet", :desc => "The group puppet master should run as.", }, :manifestdir => { :default => "$confdir/manifests", :type => :directory, :desc => "Used to build the default value of the `manifest` setting. Has no other purpose. This setting is deprecated.", :deprecated => :completely, }, :manifest => { :default => "$manifestdir/site.pp", :type => :file_or_directory, :desc => "The entry-point manifest for puppet master. This can be one file or a directory of manifests to be evaluated in alphabetical order. Puppet manages this path as a directory if one exists or if the path ends with a / or \\. Setting a global value for `manifest` in puppet.conf is deprecated. Please use directory environments instead. If you need to use something other than the environment's `manifests` directory as the main manifest, you can set `manifest` in environment.conf. For more info, see http://docs.puppetlabs.com/puppet/latest/reference/environments.html", :deprecated => :allowed_on_commandline, }, :code => { :default => "", :desc => "Code to parse directly. This is essentially only used by `puppet`, and should only be set if you're writing your own Puppet executable.", }, :masterhttplog => { :default => "$logdir/masterhttp.log", :type => :file, :owner => "service", :group => "service", :mode => "0660", :create => true, :desc => "Where the puppet master web server saves its access log. This is only used when running a WEBrick puppet master. When puppet master is running under a Rack server like Passenger, that web server will have its own logging behavior." }, :masterport => { :default => 8140, :desc => "The port for puppet master traffic. For puppet master, this is the port to listen on; for puppet agent, this is the port to make requests on. Both applications use this setting to get the port.", }, :node_name => { :default => "cert", :desc => "How the puppet master determines the client's identity and sets the 'hostname', 'fqdn' and 'domain' facts for use in the manifest, in particular for determining which 'node' statement applies to the client. Possible values are 'cert' (use the subject's CN in the client's certificate) and 'facter' (use the hostname that the client reported in its facts)", }, :bucketdir => { :default => "$vardir/bucket", :type => :directory, :mode => "0750", :owner => "service", :group => "service", :desc => "Where FileBucket files are stored." }, :rest_authconfig => { :default => "$confdir/auth.conf", :type => :file, :desc => "The configuration file that defines the rights to the different rest indirections. This can be used as a fine-grained authorization system for `puppet master`.", }, :ca => { :default => true, :type => :boolean, :desc => "Whether the master should function as a certificate authority.", }, :basemodulepath => { :default => "$confdir/modules#{File::PATH_SEPARATOR}/usr/share/puppet/modules", :type => :path, :desc => "The search path for **global** modules. Should be specified as a list of directories separated by the system path separator character. (The POSIX path separator is ':', and the Windows path separator is ';'.) If you are using directory environments, these are the modules that will be used by _all_ environments. Note that the `modules` directory of the active environment will have priority over any global directories. For more info, see http://docs.puppetlabs.com/puppet/latest/reference/environments.html This setting also provides the default value for the deprecated `modulepath` setting, which is used when directory environments are disabled.", }, :modulepath => { :default => "$basemodulepath", :type => :path, :desc => "The search path for modules, as a list of directories separated by the system path separator character. (The POSIX path separator is ':', and the Windows path separator is ';'.) Setting a global value for `modulepath` in puppet.conf is deprecated. Please use directory environments instead. If you need to use something other than the default modulepath of `:$basemodulepath`, you can set `modulepath` in environment.conf. For more info, see http://docs.puppetlabs.com/puppet/latest/reference/environments.html", :deprecated => :allowed_on_commandline, }, :ssl_client_header => { :default => "HTTP_X_CLIENT_DN", :desc => "The header containing an authenticated client's SSL DN. This header must be set by the proxy to the authenticated client's SSL DN (e.g., `/CN=puppet.puppetlabs.com`). Puppet will parse out the Common Name (CN) from the Distinguished Name (DN) and use the value of the CN field for authorization. Note that the name of the HTTP header gets munged by the web server common gateway inteface: an `HTTP_` prefix is added, dashes are converted to underscores, and all letters are uppercased. Thus, to use the `X-Client-DN` header, this setting should be `HTTP_X_CLIENT_DN`.", }, :ssl_client_verify_header => { :default => "HTTP_X_CLIENT_VERIFY", :desc => "The header containing the status message of the client verification. This header must be set by the proxy to 'SUCCESS' if the client successfully authenticated, and anything else otherwise. Note that the name of the HTTP header gets munged by the web server common gateway inteface: an `HTTP_` prefix is added, dashes are converted to underscores, and all letters are uppercased. Thus, to use the `X-Client-Verify` header, this setting should be `HTTP_X_CLIENT_VERIFY`.", }, # To make sure this directory is created before we try to use it on the server, we need # it to be in the server section (#1138). :yamldir => { :default => "$vardir/yaml", :type => :directory, :owner => "service", :group => "service", :mode => "0750", :desc => "The directory in which YAML data is stored, usually in a subdirectory."}, :server_datadir => { :default => "$vardir/server_data", :type => :directory, :owner => "service", :group => "service", :mode => "0750", :desc => "The directory in which serialized data is stored, usually in a subdirectory."}, :reports => { :default => "store", :desc => "The list of report handlers to use. When using multiple report handlers, their names should be comma-separated, with whitespace allowed. (For example, `reports = http, tagmail`.) This setting is relevant to puppet master and puppet apply. The puppet master will call these report handlers with the reports it receives from agent nodes, and puppet apply will call them with its own report. (In all cases, the node applying the catalog must have `report = true`.) See the report reference for information on the built-in report handlers; custom report handlers can also be loaded from modules. (Report handlers are loaded from the lib directory, at `puppet/reports/NAME.rb`.)", }, :reportdir => { :default => "$vardir/reports", :type => :directory, :mode => "0750", :owner => "service", :group => "service", :desc => "The directory in which to store reports. Each node gets a separate subdirectory in this directory. This setting is only used when the `store` report processor is enabled (see the `reports` setting)."}, :reporturl => { :default => "http://localhost:3000/reports/upload", :desc => "The URL that reports should be forwarded to. This setting is only used when the `http` report processor is enabled (see the `reports` setting).", }, :fileserverconfig => { :default => "$confdir/fileserver.conf", :type => :file, :desc => "Where the fileserver configuration is stored.", }, :strict_hostname_checking => { :default => false, :desc => "Whether to only search for the complete hostname as it is in the certificate when searching for node information in the catalogs.", } ) define_settings(:metrics, :rrddir => { :type => :directory, :default => "$vardir/rrd", :mode => "0750", :owner => "service", :group => "service", :desc => "The directory where RRD database files are stored. Directories for each reporting host will be created under this directory." }, :rrdinterval => { :default => "$runinterval", :type => :duration, :desc => "How often RRD should expect data. This should match how often the hosts report back to the server. #{AS_DURATION}", } ) define_settings(:device, :devicedir => { :default => "$vardir/devices", :type => :directory, :mode => "0750", :desc => "The root directory of devices' $vardir.", }, :deviceconfig => { :default => "$confdir/device.conf", :desc => "Path to the device config file for puppet device.", } ) define_settings(:agent, :node_name_value => { :default => "$certname", :desc => "The explicit value used for the node name for all requests the agent makes to the master. WARNING: This setting is mutually exclusive with node_name_fact. Changing this setting also requires changes to the default auth.conf configuration on the Puppet Master. Please see http://links.puppetlabs.com/node_name_value for more information." }, :node_name_fact => { :default => "", :desc => "The fact name used to determine the node name used for all requests the agent makes to the master. WARNING: This setting is mutually exclusive with node_name_value. Changing this setting also requires changes to the default auth.conf configuration on the Puppet Master. Please see http://links.puppetlabs.com/node_name_fact for more information.", :hook => proc do |value| if !value.empty? and Puppet[:node_name_value] != Puppet[:certname] raise "Cannot specify both the node_name_value and node_name_fact settings" end end }, :localconfig => { :default => "$statedir/localconfig", :type => :file, :owner => "root", :mode => "0660", :desc => "Where puppet agent caches the local configuration. An extension indicating the cache format is added automatically."}, :statefile => { :default => "$statedir/state.yaml", :type => :file, :mode => "0660", :desc => "Where puppet agent and puppet master store state associated with the running configuration. In the case of puppet master, this file reflects the state discovered through interacting with clients." }, :clientyamldir => { :default => "$vardir/client_yaml", :type => :directory, :mode => "0750", :desc => "The directory in which client-side YAML data is stored." }, :client_datadir => { :default => "$vardir/client_data", :type => :directory, :mode => "0750", :desc => "The directory in which serialized data is stored on the client." }, :classfile => { :default => "$statedir/classes.txt", :type => :file, :owner => "root", :mode => "0640", :desc => "The file in which puppet agent stores a list of the classes associated with the retrieved configuration. Can be loaded in the separate `puppet` executable using the `--loadclasses` option."}, :resourcefile => { :default => "$statedir/resources.txt", :type => :file, :owner => "root", :mode => "0640", :desc => "The file in which puppet agent stores a list of the resources associated with the retrieved configuration." }, :puppetdlog => { :default => "$logdir/puppetd.log", :type => :file, :owner => "root", :mode => "0640", :desc => "The fallback log file. This is only used when the `--logdest` option is not specified AND Puppet is running on an operating system where both the POSIX syslog service and the Windows Event Log are unavailable. (Currently, no supported operating systems match that description.) Despite the name, both puppet agent and puppet master will use this file as the fallback logging destination. For control over logging destinations, see the `--logdest` command line option in the manual pages for puppet master, puppet agent, and puppet apply. You can see man pages by running `puppet --help`, or read them online at http://docs.puppetlabs.com/references/latest/man/." }, :server => { :default => "puppet", :desc => "The puppet master server to which the puppet agent should connect." }, :use_srv_records => { :default => false, :type => :boolean, :desc => "Whether the server will search for SRV records in DNS for the current domain.", }, :srv_domain => { :default => lambda { Puppet::Settings.domain_fact }, :desc => "The domain which will be queried to find the SRV records of servers to use.", }, :ignoreschedules => { :default => false, :type => :boolean, :desc => "Boolean; whether puppet agent should ignore schedules. This is useful for initial puppet agent runs.", }, :default_schedules => { :default => true, :type => :boolean, :desc => "Boolean; whether to generate the default schedule resources. Setting this to false is useful for keeping external report processors clean of skipped schedule resources.", }, :puppetport => { :default => 8139, :desc => "Which port puppet agent listens on.", }, :noop => { :default => false, :type => :boolean, :desc => "Whether to apply catalogs in noop mode, which allows Puppet to partially simulate a normal run. This setting affects puppet agent and puppet apply. When running in noop mode, Puppet will check whether each resource is in sync, like it does when running normally. However, if a resource attribute is not in the desired state (as declared in the catalog), Puppet will take no action, and will instead report the changes it _would_ have made. These simulated changes will appear in the report sent to the puppet master, or be shown on the console if running puppet agent or puppet apply in the foreground. The simulated changes will not send refresh events to any subscribing or notified resources, although Puppet will log that a refresh event _would_ have been sent. **Important note:** [The `noop` metaparameter](http://docs.puppetlabs.com/references/latest/metaparameter.html#noop) allows you to apply individual resources in noop mode, and will override the global value of the `noop` setting. This means a resource with `noop => false` _will_ be changed if necessary, even when running puppet agent with `noop = true` or `--noop`. (Conversely, a resource with `noop => true` will only be simulated, even when noop mode is globally disabled.)", }, :runinterval => { :default => "30m", :type => :duration, :desc => "How often puppet agent applies the catalog. Note that a runinterval of 0 means \"run continuously\" rather than \"never run.\" If you want puppet agent to never run, you should start it with the `--no-client` option. #{AS_DURATION}", }, :listen => { :default => false, :type => :boolean, :desc => "Whether puppet agent should listen for connections. If this is true, then puppet agent will accept incoming REST API requests, subject to the default ACLs and the ACLs set in the `rest_authconfig` file. Puppet agent can respond usefully to requests on the `run`, `facts`, `certificate`, and `resource` endpoints.", }, :ca_server => { :default => "$server", :desc => "The server to use for certificate authority requests. It's a separate server because it cannot and does not need to horizontally scale.", }, :ca_port => { :default => "$masterport", :desc => "The port to use for the certificate authority.", }, :catalog_format => { :default => "", :desc => "(Deprecated for 'preferred_serialization_format') What format to use to dump the catalog. Only supports 'marshal' and 'yaml'. Only matters on the client, since it asks the server for a specific format.", :hook => proc { |value| if value Puppet.deprecation_warning "Setting 'catalog_format' is deprecated; use 'preferred_serialization_format' instead." Puppet.settings.override_default(:preferred_serialization_format, value) end } }, :preferred_serialization_format => { :default => "pson", :desc => "The preferred means of serializing ruby instances for passing over the wire. This won't guarantee that all instances will be serialized using this method, since not all classes can be guaranteed to support this format, but it will be used for all classes that support it.", }, :report_serialization_format => { :default => "pson", :type => :enum, :values => ["pson", "yaml"], :desc => "The serialization format to use when sending reports to the `report_server`. Possible values are `pson` and `yaml`. This setting affects puppet agent, but not puppet apply (which processes its own reports). This should almost always be set to `pson`. It can be temporarily set to `yaml` to let agents using this Puppet version connect to a puppet master running Puppet 3.0.0 through 3.2.x. Note that this is set to 'yaml' automatically if the agent detects an older master, so should never need to be set explicitly." }, :legacy_query_parameter_serialization => { :default => false, :type => :boolean, :desc => "The serialization format to use when sending file_metadata query parameters. Older versions of puppet master expect certain query parameters to be serialized as yaml, which is deprecated. This should almost always be false. It can be temporarily set to true to let agents using this Puppet version connect to a puppet master running Puppet 3.0.0 through 3.2.x. Note that this is set to true automatically if the agent detects an older master, so should never need to be set explicitly." }, :agent_catalog_run_lockfile => { :default => "$statedir/agent_catalog_run.lock", :type => :string, # (#2888) Ensure this file is not added to the settings catalog. :desc => "A lock file to indicate that a puppet agent catalog run is currently in progress. The file contains the pid of the process that holds the lock on the catalog run.", }, :agent_disabled_lockfile => { :default => "$statedir/agent_disabled.lock", :type => :file, :desc => "A lock file to indicate that puppet agent runs have been administratively disabled. File contains a JSON object with state information.", }, :usecacheonfailure => { :default => true, :type => :boolean, :desc => "Whether to use the cached configuration when the remote configuration will not compile. This option is useful for testing new configurations, where you want to fix the broken configuration rather than reverting to a known-good one.", }, :use_cached_catalog => { :default => false, :type => :boolean, :desc => "Whether to only use the cached catalog rather than compiling a new catalog on every run. Puppet can be run with this enabled by default and then selectively disabled when a recompile is desired.", }, :ignoremissingtypes => { :default => false, :type => :boolean, :desc => "Skip searching for classes and definitions that were missing during a prior compilation. The list of missing objects is maintained per-environment and persists until the environment is cleared or the master is restarted.", }, :ignorecache => { :default => false, :type => :boolean, :desc => "Ignore cache and always recompile the configuration. This is useful for testing new configurations, where the local cache may in fact be stale even if the timestamps are up to date - if the facts change or if the server changes.", }, :dynamicfacts => { :default => "memorysize,memoryfree,swapsize,swapfree", :desc => "(Deprecated) Facts that are dynamic; these facts will be ignored when deciding whether changed facts should result in a recompile. Multiple facts should be comma-separated.", :hook => proc { |value| if value Puppet.deprecation_warning "The dynamicfacts setting is deprecated and will be ignored." end } }, :splaylimit => { :default => "$runinterval", :type => :duration, :desc => "The maximum time to delay before runs. Defaults to being the same as the run interval. #{AS_DURATION}", }, :splay => { :default => false, :type => :boolean, :desc => "Whether to sleep for a pseudo-random (but consistent) amount of time before a run.", }, :clientbucketdir => { :default => "$vardir/clientbucket", :type => :directory, :mode => "0750", :desc => "Where FileBucket files are stored locally." }, :configtimeout => { :default => "2m", :type => :duration, :desc => "How long the client should wait for the configuration to be retrieved before considering it a failure. This can help reduce flapping if too many clients contact the server at one time. #{AS_DURATION}", }, :report_server => { :default => "$server", :desc => "The server to send transaction reports to.", }, :report_port => { :default => "$masterport", :desc => "The port to communicate with the report_server.", }, :inventory_server => { :default => "$server", :desc => "The server to send facts to.", }, :inventory_port => { :default => "$masterport", :desc => "The port to communicate with the inventory_server.", }, :report => { :default => true, :type => :boolean, :desc => "Whether to send reports after every transaction.", }, :lastrunfile => { :default => "$statedir/last_run_summary.yaml", :type => :file, :mode => "0644", :desc => "Where puppet agent stores the last run report summary in yaml format." }, :lastrunreport => { :default => "$statedir/last_run_report.yaml", :type => :file, :mode => "0640", :desc => "Where puppet agent stores the last run report in yaml format." }, :graph => { :default => false, :type => :boolean, :desc => "Whether to create dot graph files for the different configuration graphs. These dot files can be interpreted by tools like OmniGraffle or dot (which is part of ImageMagick).", }, :graphdir => { :default => "$statedir/graphs", :type => :directory, :desc => "Where to store dot-outputted graphs.", }, :http_compression => { :default => false, :type => :boolean, :desc => "Allow http compression in REST communication with the master. This setting might improve performance for agent -> master communications over slow WANs. Your puppet master needs to support compression (usually by activating some settings in a reverse-proxy in front of the puppet master, which rules out webrick). It is harmless to activate this settings if your master doesn't support compression, but if it supports it, this setting might reduce performance on high-speed LANs.", }, :waitforcert => { :default => "2m", :type => :duration, :desc => "How frequently puppet agent should ask for a signed certificate. When starting for the first time, puppet agent will submit a certificate signing request (CSR) to the server named in the `ca_server` setting (usually the puppet master); this may be autosigned, or may need to be approved by a human, depending on the CA server's configuration. Puppet agent cannot apply configurations until its approved certificate is available. Since the certificate may or may not be available immediately, puppet agent will repeatedly try to fetch it at this interval. You can turn off waiting for certificates by specifying a time of 0, in which case puppet agent will exit if it cannot get a cert. #{AS_DURATION}", }, :ordering => { :type => :enum, :values => ["manifest", "title-hash", "random"], :default => "manifest", :desc => "How unrelated resources should be ordered when applying a catalog. Allowed values are `title-hash`, `manifest`, and `random`. This setting affects puppet agent and puppet apply, but not puppet master. * `title-hash` (the default) will order resources randomly, but will use the same order across runs and across nodes. * `manifest` will use the order in which the resources were declared in their manifest files. * `random` will order resources randomly and change their order with each run. This can work like a fuzzer for shaking out undeclared dependencies. Regardless of this setting's value, Puppet will always obey explicit dependencies set with the before/require/notify/subscribe metaparameters and the `->`/`~>` chaining arrows; this setting only affects the relative ordering of _unrelated_ resources." } ) define_settings(:inspect, :archive_files => { :type => :boolean, :default => false, :desc => "During an inspect run, whether to archive files whose contents are audited to a file bucket.", }, :archive_file_server => { :default => "$server", :desc => "During an inspect run, the file bucket server to archive files to if archive_files is set.", } ) # Plugin information. define_settings( :main, :plugindest => { :type => :directory, :default => "$libdir", :desc => "Where Puppet should store plugins that it pulls down from the central server.", }, :pluginsource => { :default => "puppet://$server/plugins", :desc => "From where to retrieve plugins. The standard Puppet `file` type is used for retrieval, so anything that is a valid file source can be used here.", }, :pluginfactdest => { :type => :directory, :default => "$vardir/facts.d", :desc => "Where Puppet should store external facts that are being handled by pluginsync", }, :pluginfactsource => { :default => "puppet://$server/pluginfacts", :desc => "Where to retrieve external facts for pluginsync", }, :pluginsync => { :default => true, :type => :boolean, :desc => "Whether plugins should be synced with the central server.", }, :pluginsignore => { :default => ".svn CVS .git", :desc => "What files to ignore when pulling down plugins.", } ) # Central fact information. define_settings( :main, :factpath => { :type => :path, :default => "$vardir/lib/facter#{File::PATH_SEPARATOR}$vardir/facts", :desc => "Where Puppet should look for facts. Multiple directories should be separated by the system path separator character. (The POSIX path separator is ':', and the Windows path separator is ';'.)", :call_hook => :on_initialize_and_write, # Call our hook with the default value, so we always get the value added to facter. :hook => proc do |value| paths = value.split(File::PATH_SEPARATOR) Facter.search(*paths) end } ) define_settings( :tagmail, :tagmap => { :default => "$confdir/tagmail.conf", :desc => "The mapping between reporting tags and email addresses.", }, :sendmail => { :default => which('sendmail') || '', :desc => "Where to find the sendmail binary with which to send email.", }, :reportfrom => { :default => lambda { "report@#{Puppet::Settings.default_certname.downcase}" }, :desc => "The 'from' email address for the reports.", }, :smtpserver => { :default => "none", :desc => "The server through which to send email reports.", }, :smtpport => { :default => 25, :desc => "The TCP port through which to send email reports.", }, :smtphelo => { :default => lambda { Facter.value 'fqdn' }, :desc => "The name by which we identify ourselves in SMTP HELO for reports. If you send to a smtpserver which does strict HELO checking (as with Postfix's `smtpd_helo_restrictions` access controls), you may need to ensure this resolves.", } ) define_settings( :rails, :dblocation => { :default => "$statedir/clientconfigs.sqlite3", :type => :file, :mode => "0660", :owner => "service", :group => "service", :desc => "The sqlite database file. #{STORECONFIGS_ONLY}" }, :dbadapter => { :default => "sqlite3", :desc => "The type of database to use. #{STORECONFIGS_ONLY}", }, :dbmigrate => { :default => false, :type => :boolean, :desc => "Whether to automatically migrate the database. #{STORECONFIGS_ONLY}", }, :dbname => { :default => "puppet", :desc => "The name of the database to use. #{STORECONFIGS_ONLY}", }, :dbserver => { :default => "localhost", :desc => "The database server for caching. Only used when networked databases are used.", }, :dbport => { :default => "", :desc => "The database password for caching. Only used when networked databases are used. #{STORECONFIGS_ONLY}", }, :dbuser => { :default => "puppet", :desc => "The database user for caching. Only used when networked databases are used. #{STORECONFIGS_ONLY}", }, :dbpassword => { :default => "puppet", :desc => "The database password for caching. Only used when networked databases are used. #{STORECONFIGS_ONLY}", }, :dbconnections => { :default => '', :desc => "The number of database connections for networked databases. Will be ignored unless the value is a positive integer. #{STORECONFIGS_ONLY}", }, :dbsocket => { :default => "", :desc => "The database socket location. Only used when networked databases are used. Will be ignored if the value is an empty string. #{STORECONFIGS_ONLY}", }, :railslog => { :default => "$logdir/rails.log", :type => :file, :mode => "0600", :owner => "service", :group => "service", :desc => "Where Rails-specific logs are sent. #{STORECONFIGS_ONLY}" }, :rails_loglevel => { :default => "info", :desc => "The log level for Rails connections. The value must be a valid log level within Rails. Production environments normally use `info` and other environments normally use `debug`. #{STORECONFIGS_ONLY}", } ) define_settings( :couchdb, :couchdb_url => { :default => "http://127.0.0.1:5984/puppet", :desc => "The url where the puppet couchdb database will be created. Only used when `facts_terminus` is set to `couch`.", } ) define_settings( :transaction, :tags => { :default => "", :desc => "Tags to use to find resources. If this is set, then only resources tagged with the specified tags will be applied. Values must be comma-separated.", }, :evaltrace => { :default => false, :type => :boolean, :desc => "Whether each resource should log when it is being evaluated. This allows you to interactively see exactly what is being done.", }, :summarize => { :default => false, :type => :boolean, :desc => "Whether to print a transaction summary.", } ) define_settings( :main, :external_nodes => { :default => "none", :desc => "An external command that can produce node information. The command's output must be a YAML dump of a hash, and that hash must have a `classes` key and/or a `parameters` key, where `classes` is an array or hash and `parameters` is a hash. For unknown nodes, the command should exit with a non-zero exit code. This command makes it straightforward to store your node mapping information in other data sources like databases.", } ) define_settings( :ldap, :ldapssl => { :default => false, :type => :boolean, :desc => "Whether SSL should be used when searching for nodes. Defaults to false because SSL usually requires certificates to be set up on the client side.", }, :ldaptls => { :default => false, :type => :boolean, :desc => "Whether TLS should be used when searching for nodes. Defaults to false because TLS usually requires certificates to be set up on the client side.", }, :ldapserver => { :default => "ldap", :desc => "The LDAP server. Only used if `node_terminus` is set to `ldap`.", }, :ldapport => { :default => 389, :desc => "The LDAP port. Only used if `node_terminus` is set to `ldap`.", }, :ldapstring => { :default => "(&(objectclass=puppetClient)(cn=%s))", :desc => "The search string used to find an LDAP node.", }, :ldapclassattrs => { :default => "puppetclass", :desc => "The LDAP attributes to use to define Puppet classes. Values should be comma-separated.", }, :ldapstackedattrs => { :default => "puppetvar", :desc => "The LDAP attributes that should be stacked to arrays by adding the values in all hierarchy elements of the tree. Values should be comma-separated.", }, :ldapattrs => { :default => "all", :desc => "The LDAP attributes to include when querying LDAP for nodes. All returned attributes are set as variables in the top-level scope. Multiple values should be comma-separated. The value 'all' returns all attributes.", }, :ldapparentattr => { :default => "parentnode", :desc => "The attribute to use to define the parent node.", }, :ldapuser => { :default => "", :desc => "The user to use to connect to LDAP. Must be specified as a full DN.", }, :ldappassword => { :default => "", :desc => "The password to use to connect to LDAP.", }, :ldapbase => { :default => "", :desc => "The search base for LDAP searches. It's impossible to provide a meaningful default here, although the LDAP libraries might have one already set. Generally, it should be the 'ou=Hosts' branch under your main directory.", } ) define_settings(:master, :storeconfigs => { :default => false, :type => :boolean, :desc => "Whether to store each client's configuration, including catalogs, facts, and related data. This also enables the import and export of resources in the Puppet language - a mechanism for exchange resources between nodes. By default this uses ActiveRecord and an SQL database to store and query the data; this, in turn, will depend on Rails being available. You can adjust the backend using the storeconfigs_backend setting.", # Call our hook with the default value, so we always get the libdir set. :call_hook => :on_initialize_and_write, :hook => proc do |value| require 'puppet/node' require 'puppet/node/facts' if value if not Puppet.settings[:async_storeconfigs] Puppet::Resource::Catalog.indirection.cache_class = :store_configs Puppet.settings.override_default(:catalog_cache_terminus, :store_configs) end Puppet::Node::Facts.indirection.cache_class = :store_configs Puppet::Resource.indirection.terminus_class = :store_configs end end }, :storeconfigs_backend => { :type => :terminus, :default => "active_record", :desc => "Configure the backend terminus used for StoreConfigs. By default, this uses the ActiveRecord store, which directly talks to the database from within the Puppet Master process." } ) define_settings(:parser, :templatedir => { :default => "$vardir/templates", :type => :directory, :desc => "Where Puppet looks for template files. Can be a list of colon-separated directories. This setting is deprecated. Please put your templates in modules instead.", :deprecated => :completely, }, :allow_variables_with_dashes => { :default => false, :desc => <<-'EOT' Permit hyphens (`-`) in variable names and issue deprecation warnings about them. This setting **should always be `false`;** setting it to `true` will cause subtle and wide-ranging bugs. It will be removed in a future version. Hyphenated variables caused major problems in the language, but were allowed between Puppet 2.7.3 and 2.7.14. If you used them during this window, we apologize for the inconvenience --- you can temporarily set this to `true` in order to upgrade, and can rename your variables at your leisure. Please revert it to `false` after you have renamed all affected variables. EOT }, :parser => { :default => "current", :desc => <<-'EOT' Selects the parser to use for parsing puppet manifests (in puppet DSL language/'.pp' files). Available choices are `current` (the default) and `future`. The `current` parser means that the released version of the parser should be used. The `future` parser is a "time travel to the future" allowing early exposure to new language features. What these features are will vary from release to release and they may be invididually configurable. Available Since Puppet 3.2. EOT }, :max_errors => { :default => 10, :desc => <<-'EOT' Sets the max number of logged/displayed parser validation errors in case multiple errors have been detected. A value of 0 is the same as a value of 1; a minimum of one error is always raised. The count is per manifest. EOT }, :max_warnings => { :default => 10, :desc => <<-'EOT' Sets the max number of logged/displayed parser validation warnings in case multiple warnings have been detected. A value of 0 blocks logging of warnings. The count is per manifest. EOT }, :max_deprecations => { :default => 10, :desc => <<-'EOT' Sets the max number of logged/displayed parser validation deprecation warnings in case multiple deprecation warnings have been detected. A value of 0 blocks the logging of deprecation warnings. The count is per manifest. EOT }, :strict_variables => { :default => false, :type => :boolean, :desc => <<-'EOT' Makes the parser raise errors when referencing unknown variables. (This does not affect referencing variables that are explicitly set to undef). EOT } ) define_settings(:puppetdoc, :document_all => { :default => false, :type => :boolean, :desc => "Whether to document all resources when using `puppet doc` to generate manifest documentation.", } ) end diff --git a/lib/puppet/indirector/facts/couch.rb b/lib/puppet/indirector/facts/couch.rb index 54980eb14..9cbd0a3dd 100644 --- a/lib/puppet/indirector/facts/couch.rb +++ b/lib/puppet/indirector/facts/couch.rb @@ -1,34 +1,36 @@ require 'puppet/node/facts' require 'puppet/indirector/couch' class Puppet::Node::Facts::Couch < Puppet::Indirector::Couch - desc "Store facts in CouchDB. This should not be used with the inventory service; + desc "DEPRECATED. This terminus will be removed in Puppet 4.0. + + Store facts in CouchDB. This should not be used with the inventory service; it is for more obscure custom integrations. If you are wondering whether you should use it, you shouldn't; use PuppetDB instead." # Return the facts object or nil if there is no document def find(request) doc = super doc ? model.new(doc['_id'], doc['facts']) : nil end private # Facts values are stored to the document's 'facts' attribute. Hostname is # stored to 'name' # def hash_from(request) super.merge('facts' => request.instance.values) end # Facts are stored to the 'node' document. def document_type_for(request) 'node' end # The id used to store the object in couchdb. def id_for(request) request.key.to_s end end diff --git a/lib/puppet/module_tool/applications/builder.rb b/lib/puppet/module_tool/applications/builder.rb index 73a3989f3..3d86ad16f 100644 --- a/lib/puppet/module_tool/applications/builder.rb +++ b/lib/puppet/module_tool/applications/builder.rb @@ -1,108 +1,144 @@ require 'fileutils' require 'json' require 'puppet/file_system' +require 'pathspec' module Puppet::ModuleTool module Applications class Builder < Application def initialize(path, options = {}) @path = File.expand_path(path) @pkg_path = File.join(@path, 'pkg') super(options) end def run load_metadata! sanity_check create_directory copy_contents write_json Puppet.notice "Building #{@path} for release" pack relative = Pathname.new(archive_file).relative_path_from(Pathname.new(File.expand_path(Dir.pwd))) # Return the Pathname object representing the path to the release # archive just created. This return value is used by the module_tool # face build action, and displayed to on the console using the to_s # method. # # Example return value: # # # relative end private def archive_file File.join(@pkg_path, "#{metadata.release_name}.tar.gz") end def pack FileUtils.rm archive_file rescue nil tar = Puppet::ModuleTool::Tar.instance Dir.chdir(@pkg_path) do tar.pack(metadata.release_name, archive_file) end end def create_directory FileUtils.mkdir(@pkg_path) rescue nil if File.directory?(build_path) FileUtils.rm_rf(build_path, :secure => true) end FileUtils.mkdir(build_path) end + def ignored_files + if @ignored_files + return @ignored_files + else + pmtignore = File.join(@path, '.pmtignore') + gitignore = File.join(@path, '.gitignore') + + if File.file? pmtignore + @ignored_files = PathSpec.new File.read(pmtignore) + elsif File.file? gitignore + @ignored_files = PathSpec.new File.read(gitignore) + else + @ignored_files = PathSpec.new + end + end + end + def copy_contents - Dir[File.join(@path, '*')].each do |path| - case File.basename(path) - when *Puppet::ModuleTool::ARTIFACTS + Find.find(File.join(@path)) do |path| + if path == @path next + end + + # Needed because pathspec looks for a trailing slash in the path to + # determine if a path is a directory + path = path.to_s + '/' if File.directory? path + + unless ignored_files.match_paths([path], @path).empty? + Find.prune + end + + rel = Pathname.new(path).relative_path_from(Pathname.new(@path)) + case rel.to_s + when *Puppet::ModuleTool::ARTIFACTS + Find.prune + end + + dest = "#{build_path}/#{rel.to_s}" + if File.directory? path + FileUtils.mkdir dest, :mode => File.stat(path).mode else - FileUtils.cp_r path, build_path, :preserve => true + FileUtils.cp path, dest, :preserve => true end end end def sanity_check symlinks = Dir.glob("#{@path}/**/*", File::FNM_DOTMATCH).map { |f| Pathname.new(f) }.select {|p| Puppet::FileSystem.symlink? p} dirpath = Pathname.new @path unless symlinks.empty? symlinks.each do |s| Puppet.warning "Symlinks in modules are unsupported. Please investigate symlink #{s.relative_path_from dirpath}->#{s.realpath.relative_path_from dirpath}." end raise Puppet::ModuleTool::Errors::ModuleToolError, "Found symlinks. Symlinks in modules are not allowed, please remove them." end end def write_json metadata_path = File.join(build_path, 'metadata.json') if metadata.to_hash.include? 'checksums' Puppet.warning "A 'checksums' field was found in metadata.json. This field will be ignored and can safely be removed." end # TODO: This may necessarily change the order in which the metadata.json # file is packaged from what was written by the user. This is a # regretable, but required for now. File.open(metadata_path, 'w') do |f| f.write(metadata.to_json) end File.open(File.join(build_path, 'checksums.json'), 'w') do |f| f.write(PSON.pretty_generate(Checksums.new(build_path))) end end def build_path @build_path ||= File.join(@pkg_path, metadata.release_name) end end end end diff --git a/lib/puppet/parser/scope.rb b/lib/puppet/parser/scope.rb index 6d8b49edc..c8cfa6060 100644 --- a/lib/puppet/parser/scope.rb +++ b/lib/puppet/parser/scope.rb @@ -1,902 +1,906 @@ # The scope class, which handles storing and retrieving variables and types and # such. require 'forwardable' require 'puppet/parser' require 'puppet/parser/templatewrapper' require 'puppet/resource/type_collection_helper' require 'puppet/util/methodhelper' # This class is part of the internal parser/evaluator/compiler functionality of Puppet. # It is passed between the various classes that participate in evaluation. # None of its methods are API except those that are clearly marked as such. # # @api public class Puppet::Parser::Scope extend Forwardable include Puppet::Util::MethodHelper include Puppet::Resource::TypeCollectionHelper require 'puppet/parser/resource' AST = Puppet::Parser::AST Puppet::Util.logmethods(self) include Puppet::Util::Errors attr_accessor :source, :resource attr_accessor :compiler attr_accessor :parent attr_reader :namespaces # Hash of hashes of default values per type name attr_reader :defaults # Add some alias methods that forward to the compiler, since we reference # them frequently enough to justify the extra method call. def_delegators :compiler, :catalog, :environment # Abstract base class for LocalScope and MatchScope # class Ephemeral attr_reader :parent def initialize(parent = nil) @parent = parent end def is_local_scope? false end def [](name) if @parent @parent[name] end end def include?(name) (@parent and @parent.include?(name)) end def bound?(name) false end def add_entries_to(target = {}) @parent.add_entries_to(target) unless @parent.nil? # do not include match data ($0-$n) target end end class LocalScope < Ephemeral def initialize(parent=nil) super parent @symbols = {} end def [](name) if @symbols.include?(name) @symbols[name] else super end end def is_local_scope? true end def []=(name, value) @symbols[name] = value end def include?(name) bound?(name) || super end def delete(name) @symbols.delete(name) end def bound?(name) @symbols.include?(name) end def add_entries_to(target = {}) super @symbols.each do |k, v| if v == :undef || v.nil? target.delete(k) else target[ k ] = v end end target end end class MatchScope < Ephemeral attr_accessor :match_data def initialize(parent = nil, match_data = nil) super parent @match_data = match_data end def is_local_scope? false end def [](name) if bound?(name) @match_data[name.to_i] else super end end def include?(name) bound?(name) or super end def bound?(name) # A "match variables" scope reports all numeric variables to be bound if the scope has # match_data. Without match data the scope is transparent. # @match_data && name =~ /^\d+$/ end def []=(name, value) # TODO: Bad choice of exception raise Puppet::ParseError, "Numerical variables cannot be changed. Attempt to set $#{name}" end def delete(name) # TODO: Bad choice of exception raise Puppet::ParseError, "Numerical variables cannot be deleted: Attempt to delete: $#{name}" end def add_entries_to(target = {}) # do not include match data ($0-$n) super end end # Returns true if the variable of the given name has a non nil value. # TODO: This has vague semantics - does the variable exist or not? # use ['name'] to get nil or value, and if nil check with exist?('name') # this include? is only useful because of checking against the boolean value false. # def include?(name) ! self[name].nil? end # Returns true if the variable of the given name is set to any value (including nil) # def exist?(name) next_scope = inherited_scope || enclosing_scope effective_symtable(true).include?(name) || next_scope && next_scope.exist?(name) end # Returns true if the given name is bound in the current (most nested) scope for assignments. # def bound?(name) # Do not look in ephemeral (match scope), the semantics is to answer if an assignable variable is bound effective_symtable(false).bound?(name) end # Is the value true? This allows us to control the definition of truth # in one place. def self.true?(value) case value when '' false when :undef false else !!value end end # Coerce value to a number, or return `nil` if it isn't one. def self.number?(value) case value when Numeric value when /^-?\d+(:?\.\d+|(:?\.\d+)?e\d+)$/ value.to_f when /^0x[0-9a-f]+$/i value.to_i(16) when /^0[0-7]+$/ value.to_i(8) when /^-?\d+$/ value.to_i else nil end end # Add to our list of namespaces. def add_namespace(ns) return false if @namespaces.include?(ns) if @namespaces == [""] @namespaces = [ns] else @namespaces << ns end end def find_hostclass(name, options = {}) known_resource_types.find_hostclass(namespaces, name, options) end def find_definition(name) known_resource_types.find_definition(namespaces, name) end def find_global_scope() # walk upwards until first found node_scope or top_scope if is_nodescope? || is_topscope? self else next_scope = inherited_scope || enclosing_scope if next_scope.nil? # this happens when testing, and there is only a single test scope and no link to any # other scopes self else next_scope.find_global_scope() end end end # This just delegates directly. def_delegator :compiler, :findresource # Initialize our new scope. Defaults to having no parent. def initialize(compiler, options = {}) if compiler.is_a? Puppet::Parser::Compiler self.compiler = compiler else raise Puppet::DevError, "you must pass a compiler instance to a new scope object" end if n = options.delete(:namespace) @namespaces = [n] else @namespaces = [""] end raise Puppet::DevError, "compiler passed in options" if options.include? :compiler set_options(options) extend_with_functions_module # The symbol table for this scope. This is where we store variables. # @symtable = Ephemeral.new(nil, true) @symtable = LocalScope.new(nil) @ephemeral = [ MatchScope.new(@symtable, nil) ] # All of the defaults set for types. It's a hash of hashes, # with the first key being the type, then the second key being # the parameter. @defaults = Hash.new { |dhash,type| dhash[type] = {} } # The table for storing class singletons. This will only actually # be used by top scopes and node scopes. @class_scopes = {} @enable_immutable_data = Puppet[:immutable_node_data] end # Store the fact that we've evaluated a class, and store a reference to # the scope in which it was evaluated, so that we can look it up later. def class_set(name, scope) if parent parent.class_set(name, scope) else @class_scopes[name] = scope end end # Return the scope associated with a class. This is just here so # that subclasses can set their parent scopes to be the scope of # their parent class, and it's also used when looking up qualified # variables. def class_scope(klass) # They might pass in either the class or class name k = klass.respond_to?(:name) ? klass.name : klass @class_scopes[k] || (parent && parent.class_scope(k)) end # Collect all of the defaults set at any higher scopes. # This is a different type of lookup because it's # additive -- it collects all of the defaults, with defaults # in closer scopes overriding those in later scopes. # # The lookupdefaults searches in the the order: # # * inherited # * contained (recursive) # * self # def lookupdefaults(type) values = {} # first collect the values from the parents if parent parent.lookupdefaults(type).each { |var,value| values[var] = value } end # then override them with any current values # this should probably be done differently if @defaults.include?(type) @defaults[type].each { |var,value| values[var] = value } end values end # Look up a defined type. def lookuptype(name) find_definition(name) || find_hostclass(name) end def undef_as(x,v) if v.nil? or v == :undef x else v end end # Lookup a variable within this scope using the Puppet language's # scoping rules. Variables can be qualified using just as in a # manifest. # # @param [String] name the variable name to lookup # # @return Object the value of the variable, or nil if it's not found # # @api public def lookupvar(name, options = {}) unless name.is_a? String raise Puppet::ParseError, "Scope variable name #{name.inspect} is a #{name.class}, not a string" end table = @ephemeral.last if name =~ /^(.*)::(.+)$/ class_name = $1 variable_name = $2 lookup_qualified_variable(class_name, variable_name, options) # TODO: optimize with an assoc instead, this searches through scopes twice for a hit elsif table.include?(name) table[name] else next_scope = inherited_scope || enclosing_scope if next_scope next_scope.lookupvar(name, options) else variable_not_found(name) end end end def variable_not_found(name, reason=nil) if Puppet[:strict_variables] if Puppet[:parser] == 'future' throw :undefined_variable else reason_msg = reason.nil? ? '' : "; #{reason}" raise Puppet::ParseError, "Undefined variable #{name.inspect}#{reason_msg}" end else nil end end # Retrieves the variable value assigned to the name given as an argument. The name must be a String, # and namespace can be qualified with '::'. The value is looked up in this scope, its parent scopes, # or in a specific visible named scope. # # @param varname [String] the name of the variable (may be a qualified name using `(ns'::')*varname` # @param options [Hash] Additional options, not part of api. # @return [Object] the value assigned to the given varname # @see #[]= # @api public # def [](varname, options={}) lookupvar(varname, options) end # The scope of the inherited thing of this scope's resource. This could # either be a node that was inherited or the class. # # @return [Puppet::Parser::Scope] The scope or nil if there is not an inherited scope def inherited_scope if has_inherited_class? qualified_scope(resource.resource_type.parent) else nil end end # The enclosing scope (topscope or nodescope) of this scope. # The enclosing scopes are produced when a class or define is included at # some point. The parent scope of the included class or define becomes the # scope in which it was included. The chain of parent scopes is followed # until a node scope or the topscope is found # # @return [Puppet::Parser::Scope] The scope or nil if there is no enclosing scope def enclosing_scope if has_enclosing_scope? if parent.is_topscope? or parent.is_nodescope? parent else parent.enclosing_scope end else nil end end def is_classscope? resource and resource.type == "Class" end def is_nodescope? resource and resource.type == "Node" end def is_topscope? compiler and self == compiler.topscope end def lookup_qualified_variable(class_name, variable_name, position) begin if lookup_as_local_name?(class_name, variable_name) self[variable_name] else qualified_scope(class_name).lookupvar(variable_name, position) end rescue RuntimeError => e unless Puppet[:strict_variables] # Do not issue warning if strict variables are on, as an error will be raised by variable_not_found location = if position[:lineproc] " at #{position[:lineproc].call}" elsif position[:file] && position[:line] " at #{position[:file]}:#{position[:line]}" else "" end warning "Could not look up qualified variable '#{class_name}::#{variable_name}'; #{e.message}#{location}" end variable_not_found("#{class_name}::#{variable_name}", e.message) end end # Handles the special case of looking up fully qualified variable in not yet evaluated top scope # This is ok if the lookup request originated in topscope (this happens when evaluating # bindings; using the top scope to provide the values for facts. # @param class_name [String] the classname part of a variable name, may be special "" # @param variable_name [String] the variable name without the absolute leading '::' # @return [Boolean] true if the given variable name should be looked up directly in this scope # def lookup_as_local_name?(class_name, variable_name) # not a local if name has more than one segment return nil if variable_name =~ /::/ # partial only if the class for "" cannot be found return nil unless class_name == "" && klass = find_hostclass(class_name) && class_scope(klass).nil? is_topscope? end def has_inherited_class? is_classscope? and resource.resource_type.parent end private :has_inherited_class? def has_enclosing_scope? not parent.nil? end private :has_enclosing_scope? def qualified_scope(classname) raise "class #{classname} could not be found" unless klass = find_hostclass(classname) raise "class #{classname} has not been evaluated" unless kscope = class_scope(klass) kscope end private :qualified_scope # Returns a Hash containing all variables and their values, optionally (and # by default) including the values defined in parent. Local values # shadow parent values. Ephemeral scopes for match results ($0 - $n) are not included. # # This is currently a wrapper for to_hash_legacy or to_hash_future. # # @see to_hash_future # # @see to_hash_legacy def to_hash(recursive = true) @parser ||= Puppet[:parser] if @parser == 'future' to_hash_future(recursive) else to_hash_legacy(recursive) end end # Fixed version of to_hash that implements scoping correctly (i.e., with # dynamic scoping disabled #28200 / PUP-1220 # # @see to_hash def to_hash_future(recursive) if recursive and has_enclosing_scope? target = enclosing_scope.to_hash_future(recursive) if !(inherited = inherited_scope).nil? target.merge!(inherited.to_hash_future(recursive)) end else target = Hash.new end # add all local scopes @ephemeral.last.add_entries_to(target) target end # The old broken implementation of to_hash that retains the dynamic scoping # semantics # # @see to_hash def to_hash_legacy(recursive = true) if recursive and parent target = parent.to_hash_legacy(recursive) else target = Hash.new end # add all local scopes @ephemeral.last.add_entries_to(target) target end def namespaces @namespaces.dup end # Create a new scope and set these options. def newscope(options = {}) compiler.newscope(self, options) end def parent_module_name return nil unless @parent return nil unless @parent.source @parent.source.module_name end # Set defaults for a type. The typename should already be downcased, # so that the syntax is isolated. We don't do any kind of type-checking # here; instead we let the resource do it when the defaults are used. def define_settings(type, params) table = @defaults[type] # if we got a single param, it'll be in its own array params = [params] unless params.is_a?(Array) params.each { |param| if table.include?(param.name) raise Puppet::ParseError.new("Default already defined for #{type} { #{param.name} }; cannot redefine", param.line, param.file) end table[param.name] = param } end RESERVED_VARIABLE_NAMES = ['trusted', 'facts'].freeze # Set a variable in the current scope. This will override settings # in scopes above, but will not allow variables in the current scope # to be reassigned. # It's preferred that you use self[]= instead of this; only use this # when you need to set options. def setvar(name, value, options = {}) if name =~ /^[0-9]+$/ raise Puppet::ParseError.new("Cannot assign to a numeric match result variable '$#{name}'") # unless options[:ephemeral] end unless name.is_a? String raise Puppet::ParseError, "Scope variable name #{name.inspect} is a #{name.class}, not a string" end # Check for reserved variable names if @enable_immutable_data && !options[:privileged] && RESERVED_VARIABLE_NAMES.include?(name) raise Puppet::ParseError, "Attempt to assign to a reserved variable name: '#{name}'" end table = effective_symtable(options[:ephemeral]) if table.bound?(name) if options[:append] error = Puppet::ParseError.new("Cannot append, variable #{name} is defined in this scope") else error = Puppet::ParseError.new("Cannot reassign variable #{name}") end error.file = options[:file] if options[:file] error.line = options[:line] if options[:line] raise error end if options[:append] # produced result (value) is the resulting appended value, note: the table[]= does not return the value table[name] = (value = append_value(undef_as('', self[name]), value)) else table[name] = value end value end def set_trusted(hash) setvar('trusted', deep_freeze(hash), :privileged => true) end def set_facts(hash) + # Remove _timestamp (it has an illegal datatype). It is not allowed to mutate the given hash + # since it contains the facts. + hash = hash.dup + hash.delete('_timestamp') setvar('facts', deep_freeze(hash), :privileged => true) end # Deeply freezes the given object. The object and its content must be of the types: # Array, Hash, Numeric, Boolean, Symbol, Regexp, NilClass, or String. All other types raises an Error. # (i.e. if they are assignable to Puppet::Pops::Types::Data type). # def deep_freeze(object) case object when Array object.each {|v| deep_freeze(v) } object.freeze when Hash object.each {|k, v| deep_freeze(k); deep_freeze(v) } object.freeze when NilClass, Numeric, TrueClass, FalseClass # do nothing when String object.freeze else raise Puppet::Error, "Unsupported data type: '#{object.class}'" end object end private :deep_freeze # Return the effective "table" for setting variables. # This method returns the first ephemeral "table" that acts as a local scope, or this # scope's symtable. If the parameter `use_ephemeral` is true, the "top most" ephemeral "table" # will be returned (irrespective of it being a match scope or a local scope). # # @param use_ephemeral [Boolean] whether the top most ephemeral (of any kind) should be used or not def effective_symtable(use_ephemeral) s = @ephemeral.last if use_ephemeral return s || @symtable else while s && !s.is_local_scope?() s = s.parent end s || @symtable end end # Sets the variable value of the name given as an argument to the given value. The value is # set in the current scope and may shadow a variable with the same name in a visible outer scope. # It is illegal to re-assign a variable in the same scope. It is illegal to set a variable in some other # scope/namespace than the scope passed to a method. # # @param varname [String] The variable name to which the value is assigned. Must not contain `::` # @param value [String] The value to assign to the given variable name. # @param options [Hash] Additional options, not part of api. # # @api public # def []=(varname, value, options = {}) setvar(varname, value, options = {}) end def append_value(bound_value, new_value) case new_value when Array bound_value + new_value when Hash bound_value.merge(new_value) else if bound_value.is_a?(Hash) raise ArgumentError, "Trying to append to a hash with something which is not a hash is unsupported" end bound_value + new_value end end private :append_value # Return the tags associated with this scope. def_delegator :resource, :tags # Used mainly for logging def to_s "Scope(#{@resource})" end # remove ephemeral scope up to level # TODO: Who uses :all ? Remove ?? # def unset_ephemeral_var(level=:all) if level == :all @ephemeral = [ MatchScope.new(@symtable, nil)] else @ephemeral.pop(@ephemeral.size - level) end end def ephemeral_level @ephemeral.size end # TODO: Who calls this? def new_ephemeral(local_scope = false) if local_scope @ephemeral.push(LocalScope.new(@ephemeral.last)) else @ephemeral.push(MatchScope.new(@ephemeral.last, nil)) end end # Sets match data in the most nested scope (which always is a MatchScope), it clobbers match data already set there # def set_match_data(match_data) @ephemeral.last.match_data = match_data end # Nests a match data scope def new_match_scope(match_data) @ephemeral.push(MatchScope.new(@ephemeral.last, match_data)) end def ephemeral_from(match, file = nil, line = nil) case match when Hash # Create local scope ephemeral and set all values from hash new_ephemeral(true) match.each {|k,v| setvar(k, v, :file => file, :line => line, :ephemeral => true) } # Must always have an inner match data scope (that starts out as transparent) # In 3x slightly wasteful, since a new nested scope is created for a match # (TODO: Fix that problem) new_ephemeral(false) else raise(ArgumentError,"Invalid regex match data. Got a #{match.class}") unless match.is_a?(MatchData) # Create a match ephemeral and set values from match data new_match_scope(match) end end def find_resource_type(type) # It still works fine without the type == 'class' short-cut, but it is a lot slower. return nil if ["class", "node"].include? type.to_s.downcase find_builtin_resource_type(type) || find_defined_resource_type(type) end def find_builtin_resource_type(type) Puppet::Type.type(type.to_s.downcase.to_sym) end def find_defined_resource_type(type) known_resource_types.find_definition(namespaces, type.to_s.downcase) end def method_missing(method, *args, &block) method.to_s =~ /^function_(.*)$/ name = $1 super unless name super unless Puppet::Parser::Functions.function(name) # In odd circumstances, this might not end up defined by the previous # method, so we might as well be certain. if respond_to? method send(method, *args) else raise Puppet::DevError, "Function #{name} not defined despite being loaded!" end end def resolve_type_and_titles(type, titles) raise ArgumentError, "titles must be an array" unless titles.is_a?(Array) case type.downcase when "class" # resolve the titles titles = titles.collect do |a_title| hostclass = find_hostclass(a_title) hostclass ? hostclass.name : a_title end when "node" # no-op else # resolve the type resource_type = find_resource_type(type) type = resource_type.name if resource_type end return [type, titles] end # Transforms references to classes to the form suitable for # lookup in the compiler. # # Makes names passed in the names array absolute if they are relative # Names are now made absolute if Puppet[:parser] == 'future', this will # be the default behavior in Puppet 4.0 # # Transforms Class[] and Resource[] type referenes to class name # or raises an error if a Class[] is unspecific, if a Resource is not # a 'class' resource, or if unspecific (no title). # # TODO: Change this for 4.0 to always make names absolute # # @param names [Array] names to (optionally) make absolute # @return [Array] names after transformation # def transform_and_assert_classnames(names) if Puppet[:parser] == 'future' names.map do |name| case name when String name.sub(/^([^:]{1,2})/, '::\1') when Puppet::Resource assert_class_and_title(name.type, name.title) name.title.sub(/^([^:]{1,2})/, '::\1') when Puppet::Pops::Types::PHostClassType raise ArgumentError, "Cannot use an unspecific Class[] Type" unless name.class_name name.class_name.sub(/^([^:]{1,2})/, '::\1') when Puppet::Pops::Types::PResourceType assert_class_and_title(name.type_name, name.title) name.title.sub(/^([^:]{1,2})/, '::\1') end end else names end end private def assert_class_and_title(type_name, title) if type_name.nil? || type_name == '' raise ArgumentError, "Cannot use an unspecific Resource[] where a Resource['class', name] is expected" end unless type_name =~ /^[Cc]lass$/ raise ArgumentError, "Cannot use a Resource[#{type_name}] where a Resource['class', name] is expected" end if title.nil? raise ArgumentError, "Cannot use an unspecific Resource['class'] where a Resource['class', name] is expected" end end def extend_with_functions_module root = Puppet.lookup(:root_environment) extend Puppet::Parser::Functions.environment_module(root) extend Puppet::Parser::Functions.environment_module(environment) if environment != root end end diff --git a/lib/puppet/provider/package/pacman.rb b/lib/puppet/provider/package/pacman.rb index b82518c3c..4ca356b16 100644 --- a/lib/puppet/provider/package/pacman.rb +++ b/lib/puppet/provider/package/pacman.rb @@ -1,230 +1,234 @@ require 'puppet/provider/package' require 'set' require 'uri' Puppet::Type.type(:package).provide :pacman, :parent => Puppet::Provider::Package do - desc "Support for the Package Manager Utility (pacman) used in Archlinux." + desc "Support for the Package Manager Utility (pacman) used in Archlinux. + + This provider supports the `install_options` attribute, which allows command-line flags to be passed to pacman. + These options should be specified as a string (e.g. '--flag'), a hash (e.g. {'--flag' => 'value'}), + or an array where each element is either a string or a hash." commands :pacman => "/usr/bin/pacman" # Yaourt is a common AUR helper which, if installed, we can use to query the AUR commands :yaourt => "/usr/bin/yaourt" if Puppet::FileSystem.exist? '/usr/bin/yaourt' confine :operatingsystem => :archlinux defaultfor :operatingsystem => :archlinux has_feature :install_options has_feature :uninstall_options has_feature :upgradeable # If yaourt is installed, we can make use of it def yaourt? return Puppet::FileSystem.exist?('/usr/bin/yaourt') end # Install a package using 'pacman', or 'yaourt' if available. # Installs quietly, without confirmation or progressbar, updates package # list from servers defined in pacman.conf. def install if @resource[:source] install_from_file else install_from_repo end unless self.query raise Puppet::ExecutionFailure.new("Could not find package %s" % self.name) end end def install_from_repo if yaourt? cmd = %w{--noconfirm} cmd += install_options if @resource[:install_options] cmd << "-S" << @resource[:name] yaourt *cmd else cmd = %w{--noconfirm --noprogressbar} cmd += install_options if @resource[:install_options] cmd << "-Sy" << @resource[:name] pacman *cmd end end private :install_from_repo def install_from_file source = @resource[:source] begin source_uri = URI.parse source rescue => detail self.fail Puppet::Error, "Invalid source '#{source}': #{detail}", detail end source = case source_uri.scheme when nil then source when /https?/i then source when /ftp/i then source when /file/i then source_uri.path when /puppet/i fail "puppet:// URL is not supported by pacman" else fail "Source #{source} is not supported by pacman" end pacman "--noconfirm", "--noprogressbar", "-Sy" pacman "--noconfirm", "--noprogressbar", "-U", source end private :install_from_file def self.listcmd [command(:pacman), "-Q"] end # Pacman has a concept of package groups as well. # Package groups have no versions. def self.listgroupcmd [command(:pacman), "-Qg"] end # Get installed packages (pacman -Q) def self.installedpkgs packages = [] begin execpipe(listcmd()) do |process| # pacman -Q output is 'packagename version-rel' regex = %r{^(\S+)\s(\S+)} fields = [:name, :ensure] hash = {} process.each_line { |line| if match = regex.match(line) fields.zip(match.captures) { |field,value| hash[field] = value } hash[:provider] = self.name packages << new(hash) hash = {} else warning("Failed to match line %s" % line) end } end rescue Puppet::ExecutionFailure return nil end packages end # Get installed groups (pacman -Qg) def self.installedgroups packages = [] begin execpipe(listgroupcmd()) do |process| # pacman -Qg output is 'groupname packagename' # Groups need to be deduplicated groups = Set[] process.each_line { |line| groups.add(line.split[0]) } groups.each { |line| hash = { :name => line, :ensure => "1", # Groups don't have versions, so ensure => latest # will still cause a reinstall. :provider => self.name } packages << new(hash) } end rescue Puppet::ExecutionFailure return nil end packages end # Fetch the list of packages currently installed on the system. def self.instances packages = self.installedpkgs groups = self.installedgroups result = nil if (!packages && !groups) nil elsif (packages && groups) packages.concat(groups) else packages end end # Because Archlinux is a rolling release based distro, installing a package # should always result in the newest release. def update # Install in pacman can be used for update, too self.install end # We rescue the main check from Pacman with a check on the AUR using yaourt, if installed def latest pacman "-Sy" pacman_check = true # Query the main repos first begin if pacman_check output = pacman "-Sp", "--print-format", "%v", @resource[:name] return output.chomp else output = yaourt "-Qma", @resource[:name] output.split("\n").each do |line| return line.split[1].chomp if line =~ /^aur/ end end rescue Puppet::ExecutionFailure if pacman_check and self.yaourt? pacman_check = false # now try the AUR retry else raise end end end # Querys the pacman master list for information about the package. def query begin output = pacman("-Qi", @resource[:name]) if output =~ /Version.*:\s(.+)/ return { :ensure => $1 } end rescue Puppet::ExecutionFailure return { :ensure => :purged, :status => 'missing', :name => @resource[:name], :error => 'ok', } end nil end # Removes a package from the system. def uninstall cmd = %w{--noconfirm --noprogressbar} cmd += uninstall_options if @resource[:uninstall_options] cmd << "-R" << @resource[:name] pacman *cmd end private def install_options join_options(@resource[:install_options]) end def uninstall_options join_options(@resource[:uninstall_options]) end end diff --git a/lib/puppet/type/file/mode.rb b/lib/puppet/type/file/mode.rb index 8a5ce2077..6f104947b 100644 --- a/lib/puppet/type/file/mode.rb +++ b/lib/puppet/type/file/mode.rb @@ -1,163 +1,168 @@ # Manage file modes. This state should support different formats # for specification (e.g., u+rwx, or -0011), but for now only supports # specifying the full mode. module Puppet Puppet::Type.type(:file).newproperty(:mode) do require 'puppet/util/symbolic_file_mode' include Puppet::Util::SymbolicFileMode desc <<-'EOT' The desired permissions mode for the file, in symbolic or numeric - notation. Puppet uses traditional Unix permission schemes and translates + notation. This value should be specified as a quoted string; do not use + un-quoted numbers to represent file modes. + + The `file` type uses traditional Unix permission schemes and translates them to equivalent permissions for systems which represent permissions - differently, including Windows. + differently, including Windows. For detailed ACL controls on Windows, + you can leave `mode` unmanaged and use + [the puppetlabs/acl module.](https://forge.puppetlabs.com/puppetlabs/acl) Numeric modes should use the standard four-digit octal notation of `` (e.g. 0644). Each of the "owner," "group," and "other" digits should be a sum of the permissions for that class of users, where read = 4, write = 2, and execute/search = 1. When setting numeric permissions for directories, Puppet sets the search permission wherever the read permission is set. Symbolic modes should be represented as a string of comma-separated permission clauses, in the form ``: * "Who" should be u (user), g (group), o (other), and/or a (all) * "Op" should be = (set exact permissions), + (add select permissions), or - (remove select permissions) * "Perm" should be one or more of: * r (read) * w (write) * x (execute/search) * t (sticky) * s (setuid/setgid) * X (execute/search if directory or if any one user can execute) * u (user's current permissions) * g (group's current permissions) * o (other's current permissions) Thus, mode `0664` could be represented symbolically as either `a=r,ug+w` or `ug=rw,o=r`. However, symbolic modes are more expressive than numeric modes: a mode only affects the specified bits, so `mode => 'ug+w'` will set the user and group write bits, without affecting any other bits. See the manual page for GNU or BSD `chmod` for more details on numeric and symbolic modes. On Windows, permissions are translated as follows: * Owner and group names are mapped to Windows SIDs * The "other" class of users maps to the "Everyone" SID * The read/write/execute permissions map to the `FILE_GENERIC_READ`, `FILE_GENERIC_WRITE`, and `FILE_GENERIC_EXECUTE` access rights; a file's owner always has the `FULL_CONTROL` right * "Other" users can't have any permissions a file's group lacks, and its group can't have any permissions its owner lacks; that is, 0644 is an acceptable mode, but 0464 is not. EOT validate do |value| if !value.is_a?(String) Puppet.deprecation_warning("Non-string values for the file mode property are deprecated. It must be a string, " \ "either a symbolic mode like 'o+w,a+r' or an octal representation like '0644' or '755'.") end unless value.nil? or valid_symbolic_mode?(value) raise Puppet::Error, "The file mode specification is invalid: #{value.inspect}" end end munge do |value| return nil if value.nil? unless valid_symbolic_mode?(value) raise Puppet::Error, "The file mode specification is invalid: #{value.inspect}" end normalize_symbolic_mode(value) end def desired_mode_from_current(desired, current) current = current.to_i(8) if current.is_a? String is_a_directory = @resource.stat && @resource.stat.directory? symbolic_mode_to_int(desired, current, is_a_directory) end # If we're a directory, we need to be executable for all cases # that are readable. This should probably be selectable, but eh. def dirmask(value) if FileTest.directory?(resource[:path]) and value =~ /^\d+$/ then value = value.to_i(8) value |= 0100 if value & 0400 != 0 value |= 010 if value & 040 != 0 value |= 01 if value & 04 != 0 value = value.to_s(8) end value end # If we're not following links and we're a link, then we just turn # off mode management entirely. def insync?(currentvalue) if stat = @resource.stat and stat.ftype == "link" and @resource[:links] != :follow self.debug "Not managing symlink mode" return true else return super(currentvalue) end end def property_matches?(current, desired) return false unless current current_bits = normalize_symbolic_mode(current) desired_bits = desired_mode_from_current(desired, current).to_s(8) current_bits == desired_bits end # Ideally, dirmask'ing could be done at munge time, but we don't know if 'ensure' # will eventually be a directory or something else. And unfortunately, that logic # depends on the ensure, source, and target properties. So rather than duplicate # that logic, and get it wrong, we do dirmask during retrieve, after 'ensure' has # been synced. def retrieve if @resource.stat @should &&= @should.collect { |s| self.dirmask(s) } end super end # Finally, when we sync the mode out we need to transform it; since we # don't have access to the calculated "desired" value here, or the # "current" value, only the "should" value we need to retrieve again. def sync current = @resource.stat ? @resource.stat.mode : 0644 set(desired_mode_from_current(@should[0], current).to_s(8)) end def change_to_s(old_value, desired) return super if desired =~ /^\d+$/ old_bits = normalize_symbolic_mode(old_value) new_bits = normalize_symbolic_mode(desired_mode_from_current(desired, old_bits)) super(old_bits, new_bits) + " (#{desired})" end def should_to_s(should_value) should_value.rjust(4, "0") end def is_to_s(currentvalue) if currentvalue == :absent # This can occur during audits---if a file is transitioning from # present to absent the mode will have a value of `:absent`. super else currentvalue.rjust(4, "0") end end end end diff --git a/lib/puppet/type/yumrepo.rb b/lib/puppet/type/yumrepo.rb index d9ff11d3b..028717071 100644 --- a/lib/puppet/type/yumrepo.rb +++ b/lib/puppet/type/yumrepo.rb @@ -1,360 +1,363 @@ require 'uri' Puppet::Type.newtype(:yumrepo) do @doc = "The client-side description of a yum repository. Repository configurations are found by parsing `/etc/yum.conf` and the files indicated by the `reposdir` option in that file (see `yum.conf(5)` for details). Most parameters are identical to the ones documented in the `yum.conf(5)` man page. Continuation lines that yum supports (for the `baseurl`, for example) are not supported. This type does not attempt to read or verify the exinstence of files listed in the `include` attribute." # Ensure yumrepos can be removed too. ensurable # Doc string for properties that can be made 'absent' ABSENT_DOC="Set this to `absent` to remove it from the file completely." # False can be false/0/no and True can be true/1/yes in yum. YUM_BOOLEAN=/^(True|False|0|1|No|Yes)$/i YUM_BOOLEAN_DOC="Valid values are: False/0/No or True/1/Yes." VALID_SCHEMES = %w[file http https ftp] newparam(:name, :namevar => true) do desc "The name of the repository. This corresponds to the `repositoryid` parameter in `yum.conf(5)`." end newparam(:target) do desc "The filename to write the yum repository to." defaultto :absent end newproperty(:descr) do desc "A human-readable description of the repository. This corresponds to the name parameter in `yum.conf(5)`. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:mirrorlist) do desc "The URL that holds the list of mirrors for this repository. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' parsed = URI.parse(value) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end newproperty(:baseurl) do desc "The URL for this repository. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' value.split(/\s+/).each do |uri| parsed = URI.parse(uri) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end end newproperty(:enabled) do desc "Whether this repository is enabled. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:gpgcheck) do desc "Whether to check the GPG signature on packages installed from this repository. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:repo_gpgcheck) do desc "Whether to check the GPG signature on repodata. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:gpgkey) do desc "The URL for the GPG key with which packages from this repository are signed. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' value.split(/\s+/).each do |uri| parsed = URI.parse(uri) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end end newproperty(:mirrorlist_expire) do desc "Time (in seconds) after which the mirrorlist locally cached will expire.\n#{ABSENT_DOC}" newvalues(/^[0-9]+$/, :absent) end newproperty(:include) do desc "The URL of a remote file containing additional yum configuration settings. Puppet does not check for this file's existence or validity. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' parsed = URI.parse(value) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end newproperty(:exclude) do desc "List of shell globs. Matching packages will never be considered in updates or installs for this repo. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:gpgcakey) do desc "The URL for the GPG CA key for this repository. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' parsed = URI.parse(value) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end newproperty(:includepkgs) do desc "List of shell globs. If this is set, only packages matching one of the globs will be considered for update or install from this repo. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:enablegroups) do desc "Whether yum will allow the use of package groups for this repository. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:failovermethod) do desc "The failover method for this repository; should be either `roundrobin` or `priority`. #{ABSENT_DOC}" newvalues(/^roundrobin|priority$/, :absent) end newproperty(:keepalive) do desc "Whether HTTP/1.1 keepalive should be used with this repository. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:retries) do desc "Set the number of times any attempt to retrieve a file should retry before returning an error. Setting this to `0` makes yum try forever.\n#{ABSENT_DOC}" newvalues(/^[0-9]+$/, :absent) end newproperty(:http_caching) do desc "What to cache from this repository. #{ABSENT_DOC}" newvalues(/^(packages|all|none)$/, :absent) end newproperty(:timeout) do desc "Number of seconds to wait for a connection before timing out. #{ABSENT_DOC}" newvalues(/^\d+$/, :absent) end newproperty(:metadata_expire) do desc "Number of seconds after which the metadata will expire. #{ABSENT_DOC}" newvalues(/^([0-9]+[dhm]?|never)$/, :absent) end newproperty(:protect) do desc "Enable or disable protection for this repository. Requires that the `protectbase` plugin is installed and enabled. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:priority) do desc "Priority of this repository from 1-99. Requires that the `priorities` plugin is installed and enabled. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' unless (1..99).include?(value.to_i) fail("Must be within range 1-99") end end end newproperty(:throttle) do desc "Enable bandwidth throttling for downloads. This option can be expressed as a absolute data rate in bytes/sec or a percentage `60%`. An SI prefix (k, M or G) may be appended to the data rate values.\n#{ABSENT_DOC}" newvalues(/^\d+[kMG%]?$/, :absent) end newproperty(:bandwidth) do desc "Use to specify the maximum available network bandwidth in bytes/second. Used with the `throttle` option. If `throttle` is a percentage and `bandwidth` is `0` then bandwidth throttling will be disabled. If `throttle` is expressed as a data rate then this option is ignored.\n#{ABSENT_DOC}" newvalues(/^\d+[kMG]?$/, :absent) end newproperty(:cost) do desc "Cost of this repository. #{ABSENT_DOC}" newvalues(/^\d+$/, :absent) end newproperty(:proxy) do - desc "URL to the proxy server for this repository. #{ABSENT_DOC}" + desc "URL of a proxy server that Yum should use when accessing this repository. + This attribute can also be set to `'_none_'`, which will make Yum bypass any + global proxy settings when accessing this repository. + #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s =~ /^(absent|_none_)$/ parsed = URI.parse(value) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end newproperty(:proxy_username) do desc "Username for this proxy. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:proxy_password) do desc "Password for this proxy. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:s3_enabled) do desc "Access the repo via S3. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:sslcacert) do desc "Path to the directory containing the databases of the certificate authorities yum should use to verify SSL certificates. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:sslverify) do desc "Should yum verify SSL certificates/hosts at all. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end newproperty(:sslclientcert) do desc "Path to the SSL client certificate yum should use to connect to repos/remote sites. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:sslclientkey) do desc "Path to the SSL client key yum should use to connect to repos/remote sites. #{ABSENT_DOC}" newvalues(/.*/, :absent) end newproperty(:metalink) do desc "Metalink for mirrors. #{ABSENT_DOC}" newvalues(/.*/, :absent) validate do |value| next if value.to_s == 'absent' parsed = URI.parse(value) unless VALID_SCHEMES.include?(parsed.scheme) raise "Must be a valid URL" end end end newproperty(:skip_if_unavailable) do desc "Should yum skip this repository if unable to reach it. #{YUM_BOOLEAN_DOC} #{ABSENT_DOC}" newvalues(YUM_BOOLEAN, :absent) end end diff --git a/lib/puppet/vendor.rb b/lib/puppet/vendor.rb index fdaf5c053..7c6be4e80 100644 --- a/lib/puppet/vendor.rb +++ b/lib/puppet/vendor.rb @@ -1,55 +1,57 @@ module Puppet # Simple module to manage vendored code. # # To vendor a library: # # * Download its whole git repo or untar into `lib/puppet/vendor/` - # * Create a lib/puppetload_libraryname.rb file to add its libdir into the $:. + # * Create a vendor/puppetload_libraryname.rb file to add its libdir into the $:. # (Look at existing load_xxx files, they should all follow the same pattern). + # * Add a /PUPPET_README.md file describing what the library is for + # and where it comes from. # * To load the vendored lib upfront, add a `require ''`line to # `vendor/require_vendored.rb`. # * To load the vendored lib on demand, add a comment to `vendor/require_vendored.rb` # to make it clear it should not be loaded upfront. # # At runtime, the #load_vendored method should be called. It will ensure # all vendored libraries are added to the global `$:` path, and # will then call execute the up-front loading specified in `vendor/require_vendored.rb`. # # The intention is to not change vendored libraries and to eventually # make adding them in optional so that distros can simply adjust their # packaging to exclude this directory and the various load_xxx.rb scripts # if they wish to install these gems as native packages. # class Vendor class << self # @api private def vendor_dir File.join([File.dirname(File.expand_path(__FILE__)), "vendor"]) end # @api private def load_entry(entry) Puppet.debug("Loading vendored #{$1}") load "#{vendor_dir}/#{entry}" end # @api private def require_libs require 'puppet/vendor/require_vendored' end # Configures the path for all vendored libraries and loads required libraries. # (This is the entry point for loading vendored libraries). # def load_vendored Dir.entries(vendor_dir).each do |entry| if entry.match(/load_(\w+?)\.rb$/) load_entry entry end end require_libs end end end end diff --git a/lib/puppet/vendor/load_pathspec.rb b/lib/puppet/vendor/load_pathspec.rb new file mode 100644 index 000000000..afdbc2279 --- /dev/null +++ b/lib/puppet/vendor/load_pathspec.rb @@ -0,0 +1 @@ +$: << File.join([File.dirname(__FILE__), "pathspec/lib"]) diff --git a/lib/puppet/vendor/pathspec/CHANGELOG.md b/lib/puppet/vendor/pathspec/CHANGELOG.md new file mode 100644 index 000000000..e4fe66d91 --- /dev/null +++ b/lib/puppet/vendor/pathspec/CHANGELOG.md @@ -0,0 +1,2 @@ +0.0.2: Misc. Windows/regex fixes. +0.0.1: Initial version. diff --git a/lib/puppet/vendor/pathspec/LICENSE b/lib/puppet/vendor/pathspec/LICENSE new file mode 100644 index 000000000..5c304d1a4 --- /dev/null +++ b/lib/puppet/vendor/pathspec/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/puppet/vendor/pathspec/PUPPET_README.md b/lib/puppet/vendor/pathspec/PUPPET_README.md new file mode 100644 index 000000000..e7ceddec7 --- /dev/null +++ b/lib/puppet/vendor/pathspec/PUPPET_README.md @@ -0,0 +1,6 @@ +Pathspec-ruby - Gitignore parsing in Ruby +============================================= + +Pathspec-ruby version 0.0.2 + +Copied from https://github.com/highb/pathspec-ruby/releases/tag/0.0.2 diff --git a/lib/puppet/vendor/pathspec/README.md b/lib/puppet/vendor/pathspec/README.md new file mode 100644 index 000000000..ecbd64d3d --- /dev/null +++ b/lib/puppet/vendor/pathspec/README.md @@ -0,0 +1,53 @@ +pathspec-ruby +============= + +Match Path Specifications, such as .gitignore, in Ruby! + +Follows .gitignore syntax defined on [gitscm](http://git-scm.com/docs/gitignore) + +.gitignore functionality ported from [Python pathspec](https://pypi.python.org/pypi/pathspec/0.2.2) by [@cpburnz](https://github.com/cpburnz/python-path-specification) + +[Travis Status](https://travis-ci.org/highb/pathspec-ruby) ![Travis CI Status](https://travis-ci.org/highb/pathspec-ruby.svg?branch=master) + +## Build/Install from Rubygems +```shell +gem install pathspec +``` + +## Usage +```ruby +require 'pathspec' + +# Create a .gitignore-style Pathspec by giving it newline separated gitignore +# lines, an array of gitignore lines, or any other enumable object that will +# give strings matching the .gitignore-style (File, etc.) +gitignore = Pathspec.new File.read('.gitignore', 'r') + +# Our .gitignore in this example contains: +# !**/important.txt +# abc/** + +# true, matches "abc/**" +gitignore.match 'abc/def.rb' + +# false, because it has been negated using the line "!**/important.txt" +gitignore.match 'abc/important.txt' + +# Give a path somewhere in the filesystem, and the Pathspec will return all +# matching files underneath. +# Returns ['/src/repo/abc/', '/src/repo/abc/123'] +gitignore.match_tree '/src/repo' + +# Give an enumerable of paths, and Pathspec will return the ones that match. +# Returns ['/abc/123', '/abc/'] +gitignore.match_paths ['/abc/123', '/abc/important.txt', '/abc/'] +``` + +## Building/Installing from Source +```shell +git clone git@github.com:highb/pathspec-ruby.git +cd pathspec-ruby && bash ./build_from_source.sh +``` + +## Contributing +Pull requests, bug reports, and feature requests welcome! :smile: I've tried to write exhaustive tests but who knows what cases I've missed. diff --git a/lib/puppet/vendor/pathspec/lib/pathspec.rb b/lib/puppet/vendor/pathspec/lib/pathspec.rb new file mode 100644 index 000000000..9d9a92837 --- /dev/null +++ b/lib/puppet/vendor/pathspec/lib/pathspec.rb @@ -0,0 +1,121 @@ +require 'pathspec/gitignorespec' +require 'pathspec/regexspec' +require 'find' +require 'pathname' + +class PathSpec + attr_reader :specs + + def initialize(lines=nil, type=:git) + @specs = [] + + if lines + add(lines, type) + end + + self + end + + # Check if a path matches the pathspecs described + # Returns true if there are matches and none are excluded + # Returns false if there aren't matches or none are included + def match(path) + matches = specs_matching(path.to_s) + !matches.empty? && matches.all? {|m| m.inclusive?} + end + + def specs_matching(path) + @specs.select do |spec| + if spec.match(path) + spec + end + end + end + + # Check if any files in a given directory or subdirectories match the specs + # Returns matched paths or nil if no paths matched + def match_tree(root) + rootpath = Pathname.new(root) + matching = [] + + Find.find(root) do |path| + relpath = Pathname.new(path).relative_path_from(rootpath).to_s + relpath += '/' if File.directory? path + if match(relpath) + matching << path + end + end + + matching + end + + def match_path(path, root='/') + rootpath = Pathname.new(drive_letter_to_path(root)) + relpath = Pathname.new(drive_letter_to_path(path)).relative_path_from(rootpath).to_s + relpath = relpath + '/' if path[-1].chr == '/' + + match(relpath) + end + + def match_paths(paths, root='/') + matching = [] + + paths.each do |path| + if match_path(path, root) + matching << path + end + end + + matching + end + + def drive_letter_to_path(path) + path.gsub(/^([a-zA-z]):\//, '/\1/') + end + + # Generate specs from a filename, such as a .gitignore + def self.from_filename(filename, type=:git) + self.from_lines(File.open(filename, 'r')) + end + + def self.from_lines(lines, type=:git) + self.new lines, type + end + + # Generate specs from lines of text + def add(obj, type=:git) + spec_class = spec_type(type) + + if obj.respond_to?(:each_line) + obj.each_line do |l| + spec = spec_class.new(l.rstrip) + + if !spec.regex.nil? && !spec.inclusive?.nil? + @specs << spec + end + end + elsif obj.respond_to?(:each) + obj.each do |l| + add(l, type) + end + else + raise 'Cannot make Pathspec from non-string/non-enumerable object.' + end + + self + end + + def empty? + @specs.empty? + end + + def spec_type(type) + if type == :git + GitIgnoreSpec + elsif type == :regex + RegexSpec + else + raise "Unknown spec type #{type}" + end + end +end diff --git a/lib/puppet/vendor/pathspec/lib/pathspec/gitignorespec.rb b/lib/puppet/vendor/pathspec/lib/pathspec/gitignorespec.rb new file mode 100644 index 000000000..f7a94b359 --- /dev/null +++ b/lib/puppet/vendor/pathspec/lib/pathspec/gitignorespec.rb @@ -0,0 +1,275 @@ +# encoding: utf-8 + +require 'pathspec/regexspec' + +class GitIgnoreSpec < RegexSpec + attr_reader :regex + + def initialize(pattern) + pattern = pattern.strip unless pattern.nil? + + # A pattern starting with a hash ('#') serves as a comment + # (neither includes nor excludes files). Escape the hash with a + # back-slash to match a literal hash (i.e., '\#'). + if pattern.start_with?('#') + @regex = nil + @inclusive = nil + + # A blank pattern is a null-operation (neither includes nor + # excludes files). + elsif pattern.empty? + @regex = nil + @inclusive = nil + + # Patterns containing three or more consecutive stars are invalid and + # will be ignored. + elsif pattern =~ /\*\*\*+/ + @regex = nil + @inclusive = nil + + # We have a valid pattern! + else + # A pattern starting with an exclamation mark ('!') negates the + # pattern (exclude instead of include). Escape the exclamation + # mark with a back-slash to match a literal exclamation mark + # (i.e., '\!'). + if pattern.start_with?('!') + @inclusive = false + # Remove leading exclamation mark. + pattern = pattern[1..-1] + else + @inclusive = true + end + + # Remove leading back-slash escape for escaped hash ('#') or + # exclamation mark ('!'). + if pattern.start_with?('\\') + pattern = pattern[1..-1] + end + + # Split pattern into segments. -1 to allow trailing slashes. + pattern_segs = pattern.split('/', -1) + + # Normalize pattern to make processing easier. + + # A pattern beginning with a slash ('/') will only match paths + # directly on the root directory instead of any descendant + # paths. So, remove empty first segment to make pattern relative + # to root. + if pattern_segs[0].empty? + pattern_segs.shift + else + # A pattern without a beginning slash ('/') will match any + # descendant path. This is equivilent to "**/{pattern}". So, + # prepend with double-asterisks to make pattern relative to + # root. + if pattern_segs.length == 1 && pattern_segs[0] != '**' + pattern_segs.insert(0, '**') + end + end + + # A pattern ending with a slash ('/') will match all descendant + # paths of if it is a directory but not if it is a regular file. + # This is equivilent to "{pattern}/**". So, set last segment to + # double asterisks to include all descendants. + if pattern_segs[-1].empty? + pattern_segs[-1] = '**' + end + + # Handle platforms with backslash separated paths + if File::SEPARATOR == '\\' + path_sep = '\\\\' + else + path_sep = '/' + end + + + # Build regular expression from pattern. + regex = '^' + need_slash = false + regex_end = pattern_segs.size - 1 + pattern_segs.each_index do |i| + seg = pattern_segs[i] + + if seg == '**' + # A pattern consisting solely of double-asterisks ('**') + # will match every path. + if i == 0 && i == regex_end + regex.concat('.+') + + # A normalized pattern beginning with double-asterisks + # ('**') will match any leading path segments. + elsif i == 0 + regex.concat("(?:.+#{path_sep})?") + need_slash = false + + # A normalized pattern ending with double-asterisks ('**') + # will match any trailing path segments. + elsif i == regex_end + regex.concat("#{path_sep}.*") + + # A pattern with inner double-asterisks ('**') will match + # multiple (or zero) inner path segments. + else + regex.concat("(?:#{path_sep}.+)?") + need_slash = true + end + + # Match single path segment. + elsif seg == '*' + if need_slash + regex.concat(path_sep) + end + + regex.concat("[^#{path_sep}]+") + need_slash = true + + else + # Match segment glob pattern. + if need_slash + regex.concat(path_sep) + end + + regex.concat(translate_segment_glob(seg)) + need_slash = true + end + end + + regex.concat('$') + super(regex) + end + end + + def match(path) + super(path) + end + + def translate_segment_glob(pattern) + """ + Translates the glob pattern to a regular expression. This is used in + the constructor to translate a path segment glob pattern to its + corresponding regular expression. + + *pattern* (``str``) is the glob pattern. + + Returns the regular expression (``str``). + """ + # NOTE: This is derived from `fnmatch.translate()` and is similar to + # the POSIX function `fnmatch()` with the `FNM_PATHNAME` flag set. + + escape = false + regex = '' + i = 0 + + while i < pattern.size + # Get next character. + char = pattern[i].chr + i += 1 + + # Escape the character. + if escape + escape = false + regex += Regexp.escape(char) + + # Escape character, escape next character. + elsif char == '\\' + escape = true + + # Multi-character wildcard. Match any string (except slashes), + # including an empty string. + elsif char == '*' + regex += '[^/]*' + + # Single-character wildcard. Match any single character (except + # a slash). + elsif char == '?' + regex += '[^/]' + + # Braket expression wildcard. Except for the beginning + # exclamation mark, the whole braket expression can be used + # directly as regex but we have to find where the expression + # ends. + # - "[][!]" matchs ']', '[' and '!'. + # - "[]-]" matchs ']' and '-'. + # - "[!]a-]" matchs any character except ']', 'a' and '-'. + elsif char == '[' + j = i + # Pass brack expression negation. + if j < pattern.size && pattern[j].chr == '!' + j += 1 + end + + # Pass first closing braket if it is at the beginning of the + # expression. + if j < pattern.size && pattern[j].chr == ']' + j += 1 + end + + # Find closing braket. Stop once we reach the end or find it. + while j < pattern.size && pattern[j].chr != ']' + j += 1 + end + + + if j < pattern.size + expr = '[' + + # Braket expression needs to be negated. + if pattern[i].chr == '!' + expr += '^' + i += 1 + + # POSIX declares that the regex braket expression negation + # "[^...]" is undefined in a glob pattern. Python's + # `fnmatch.translate()` escapes the caret ('^') as a + # literal. To maintain consistency with undefined behavior, + # I am escaping the '^' as well. + elsif pattern[i].chr == '^' + expr += '\\^' + i += 1 + end + + # Escape brackets contained within pattern + if pattern[i].chr == ']' && i != j + expr += '\]' + i += 1 + end + + + # Build regex braket expression. Escape slashes so they are + # treated as literal slashes by regex as defined by POSIX. + expr += pattern[i..j].sub('\\', '\\\\') + + # Add regex braket expression to regex result. + regex += expr + + # Found end of braket expression. Increment j to be one past + # the closing braket: + # + # [...] + # ^ ^ + # i j + # + j += 1 + # Set i to one past the closing braket. + i = j + + # Failed to find closing braket, treat opening braket as a + # braket literal instead of as an expression. + else + regex += '\[' + end + + # Regular character, escape it for regex. + else + regex << Regexp.escape(char) + end + end + + regex + end + + def inclusive? + @inclusive + end +end diff --git a/lib/puppet/vendor/pathspec/lib/pathspec/regexspec.rb b/lib/puppet/vendor/pathspec/lib/pathspec/regexspec.rb new file mode 100644 index 000000000..af043f445 --- /dev/null +++ b/lib/puppet/vendor/pathspec/lib/pathspec/regexspec.rb @@ -0,0 +1,17 @@ +require 'pathspec/spec' + +class RegexSpec < Spec + def initialize(regex) + @regex = Regexp.compile regex + + super + end + + def inclusive? + true + end + + def match(path) + @regex.match(path) if @regex + end +end diff --git a/lib/puppet/vendor/pathspec/lib/pathspec/spec.rb b/lib/puppet/vendor/pathspec/lib/pathspec/spec.rb new file mode 100644 index 000000000..756588b1f --- /dev/null +++ b/lib/puppet/vendor/pathspec/lib/pathspec/spec.rb @@ -0,0 +1,14 @@ +class Spec + attr_reader :regex + + def initialize(*_) + end + + def match(files) + raise "Unimplemented" + end + + def inclusive? + true + end +end diff --git a/lib/puppet/vendor/require_vendored.rb b/lib/puppet/vendor/require_vendored.rb index d0bcad6ee..d9f38fabc 100644 --- a/lib/puppet/vendor/require_vendored.rb +++ b/lib/puppet/vendor/require_vendored.rb @@ -1,8 +1,9 @@ # This adds upfront requirements on vendored code found under lib/vendor/x # Add one requirement per vendored package (or a comment if it is loaded on demand). require 'safe_yaml' require 'puppet/vendor/safe_yaml_patches' # The vendored library 'semantic' is loaded on demand. # The vendored library 'rgen' is loaded on demand. +# The vendored library 'pathspec' is loaded on demand. diff --git a/spec/unit/module_tool/applications/builder_spec.rb b/spec/unit/module_tool/applications/builder_spec.rb index c301afcf2..8982040ea 100644 --- a/spec/unit/module_tool/applications/builder_spec.rb +++ b/spec/unit/module_tool/applications/builder_spec.rb @@ -1,110 +1,378 @@ require 'spec_helper' require 'puppet/file_system' require 'puppet/module_tool/applications' require 'puppet_spec/modules' describe Puppet::ModuleTool::Applications::Builder do include PuppetSpec::Files let(:path) { tmpdir("working_dir") } let(:module_name) { 'mymodule-mytarball' } let(:version) { '0.0.1' } let(:release_name) { "#{module_name}-#{version}" } let(:tarball) { File.join(path, 'pkg', release_name) + ".tar.gz" } let(:builder) { Puppet::ModuleTool::Applications::Builder.new(path) } shared_examples "a packagable module" do def target_exists?(file) File.exist?(File.join(path, "pkg", "#{module_name}-#{version}", file)) end - it "packages the module in a tarball named after the module" do + def build tarrer = mock('tarrer') Puppet::ModuleTool::Tar.expects(:instance).returns(tarrer) Dir.expects(:chdir).with(File.join(path, 'pkg')).yields tarrer.expects(:pack).with(release_name, tarball) builder.run + end + + def create_regular_files + Puppet::FileSystem.touch(File.join(path, '.dotfile')) + Puppet::FileSystem.touch(File.join(path, 'file.foo')) + Puppet::FileSystem.touch(File.join(path, 'REVISION')) + Puppet::FileSystem.touch(File.join(path, '~file')) + Puppet::FileSystem.touch(File.join(path, '#file')) + Puppet::FileSystem.mkpath(File.join(path, 'pkg')) + Puppet::FileSystem.mkpath(File.join(path, 'coverage')) + Puppet::FileSystem.mkpath(File.join(path, 'sub')) + Puppet::FileSystem.touch(File.join(path, 'sub/.dotfile')) + Puppet::FileSystem.touch(File.join(path, 'sub/file.foo')) + Puppet::FileSystem.touch(File.join(path, 'sub/REVISION')) + Puppet::FileSystem.touch(File.join(path, 'sub/~file')) + Puppet::FileSystem.touch(File.join(path, 'sub/#file')) + Puppet::FileSystem.mkpath(File.join(path, 'sub/pkg')) + Puppet::FileSystem.mkpath(File.join(path, 'sub/coverage')) + end + + def create_ignored_files + Puppet::FileSystem.touch(File.join(path, 'gitignored.foo')) + Puppet::FileSystem.mkpath(File.join(path, 'gitdirectory/sub')) + Puppet::FileSystem.touch(File.join(path, 'gitdirectory/gitartifact')) + Puppet::FileSystem.touch(File.join(path, 'gitdirectory/gitimportantfile')) + Puppet::FileSystem.touch(File.join(path, 'gitdirectory/sub/artifact')) + Puppet::FileSystem.touch(File.join(path, 'pmtignored.foo')) + Puppet::FileSystem.mkpath(File.join(path, 'pmtdirectory/sub')) + Puppet::FileSystem.touch(File.join(path, 'pmtdirectory/pmtimportantfile')) + Puppet::FileSystem.touch(File.join(path, 'pmtdirectory/pmtartifact')) + Puppet::FileSystem.touch(File.join(path, 'pmtdirectory/sub/artifact')) + end + + def create_pmtignore_file + Puppet::FileSystem.open(File.join(path, '.pmtignore'), 0600, 'w') do |f| + f << <<-PMTIGNORE +pmtignored.* +pmtdirectory/sub/** +pmtdirectory/pmt* +!pmtimportantfile +PMTIGNORE + end + end + + def create_gitignore_file + Puppet::FileSystem.open(File.join(path, '.gitignore'), 0600, 'w') do |f| + f << <<-GITIGNORE +gitignored.* +gitdirectory/sub/** +gitdirectory/git* +!gitimportantfile +GITIGNORE + end + end + + shared_examples "regular files are present" do + it "has metadata" do + expect(target_exists?('metadata.json')).to eq true + end + + it "has checksums" do + expect(target_exists?('checksums.json')).to eq true + end + + it "copies regular files" do + expect(target_exists?('file.foo')).to eq true + end + end + + shared_examples "default artifacts are removed in module dir but not in subdirs" do + it "ignores dotfiles" do + expect(target_exists?('.dotfile')).to eq false + expect(target_exists?('sub/.dotfile')).to eq true + end + + it "does not have .gitignore" do + expect(target_exists?('.gitignore')).to eq false + end + + it "does not have .pmtignore" do + expect(target_exists?('.pmtignore')).to eq false + end + + it "does not have pkg" do + expect(target_exists?('pkg')).to eq false + expect(target_exists?('sub/pkg')).to eq true + end + + it "does not have coverage" do + expect(target_exists?('coverage')).to eq false + expect(target_exists?('sub/coverage')).to eq true + end + + it "does not have REVISION" do + expect(target_exists?('REVISION')).to eq false + expect(target_exists?('sub/REVISION')).to eq true + end + + it "does not have ~files" do + expect(target_exists?('~file')).to eq false + expect(target_exists?('sub/~file')).to eq true + end + + it "does not have #files" do + expect(target_exists?('#file')).to eq false + expect(target_exists?('sub/#file')).to eq true + end + end + + shared_examples "gitignored files are present" do + it "leaves regular files" do + expect(target_exists?('gitignored.foo')).to eq true + end + + it "leaves directories" do + expect(target_exists?('gitdirectory')).to eq true + end + + it "leaves files in directories" do + expect(target_exists?('gitdirectory/gitartifact')).to eq true + end + + it "leaves exceptional files" do + expect(target_exists?('gitdirectory/gitimportantfile')).to eq true + end + + it "leaves subdirectories" do + expect(target_exists?('gitdirectory/sub')).to eq true + end + + it "leaves files in subdirectories" do + expect(target_exists?('gitdirectory/sub/artifact')).to eq true + end + end + + shared_examples "gitignored files are not present" do + it "ignores regular files" do + expect(target_exists?('gitignored.foo')).to eq false + end + + it "ignores directories" do + expect(target_exists?('gitdirectory')).to eq true + end + + it "ignores files in directories" do + expect(target_exists?('gitdirectory/gitartifact')).to eq false + end + + it "copies exceptional files" do + expect(target_exists?('gitdirectory/gitimportantfile')).to eq true + end + + it "ignores subdirectories" do + expect(target_exists?('gitdirectory/sub')).to eq false + end + + it "ignores files in subdirectories" do + expect(target_exists?('gitdirectory/sub/artifact')).to eq false + end + end + + shared_examples "pmtignored files are present" do + it "leaves regular files" do + expect(target_exists?('pmtignored.foo')).to eq true + end + + it "leaves directories" do + expect(target_exists?('pmtdirectory')).to eq true + end + + it "ignores files in directories" do + expect(target_exists?('pmtdirectory/pmtartifact')).to eq true + end + + it "leaves exceptional files" do + expect(target_exists?('pmtdirectory/pmtimportantfile')).to eq true + end + + it "leaves subdirectories" do + expect(target_exists?('pmtdirectory/sub')).to eq true + end + + it "leaves files in subdirectories" do + expect(target_exists?('pmtdirectory/sub/artifact')).to eq true + end + end + + shared_examples "pmtignored files are not present" do + it "ignores regular files" do + expect(target_exists?('pmtignored.foo')).to eq false + end + + it "ignores directories" do + expect(target_exists?('pmtdirectory')).to eq true + end + + it "copies exceptional files" do + expect(target_exists?('pmtdirectory/pmtimportantfile')).to eq true + end + + it "ignores files in directories" do + expect(target_exists?('pmtdirectory/pmtartifact')).to eq false + end + + it "ignores subdirectories" do + expect(target_exists?('pmtdirectory/sub')).to eq false + end + + it "ignores files in subdirectories" do + expect(target_exists?('pmtdirectory/sub/artifact')).to eq false + end + end + + context "with no ignore files" do + before :each do + create_regular_files + create_ignored_files + + build + end + + it_behaves_like "regular files are present" + it_behaves_like "default artifacts are removed in module dir but not in subdirs" + it_behaves_like "pmtignored files are present" + it_behaves_like "gitignored files are present" + end + + context "with .gitignore file" do + before :each do + create_regular_files + create_ignored_files + create_gitignore_file + + build + end + + it_behaves_like "regular files are present" + it_behaves_like "default artifacts are removed in module dir but not in subdirs" + it_behaves_like "pmtignored files are present" + it_behaves_like "gitignored files are not present" + end + + context "with .pmtignore file" do + before :each do + create_regular_files + create_ignored_files + create_pmtignore_file + + build + end + + it_behaves_like "regular files are present" + it_behaves_like "default artifacts are removed in module dir but not in subdirs" + it_behaves_like "gitignored files are present" + it_behaves_like "pmtignored files are not present" + end + + context "with .pmtignore and .gitignore file" do + before :each do + create_regular_files + create_ignored_files + create_pmtignore_file + create_gitignore_file + + build + end - expect(target_exists?('checksums.json')).to be true - expect(target_exists?('metadata.json')).to be true + it_behaves_like "regular files are present" + it_behaves_like "default artifacts are removed in module dir but not in subdirs" + it_behaves_like "gitignored files are present" + it_behaves_like "pmtignored files are not present" end end context 'with metadata.json' do before :each do File.open(File.join(path, 'metadata.json'), 'w') do |f| f.puts({ "name" => "#{module_name}", "version" => "#{version}", "source" => "http://github.com/testing/#{module_name}", "author" => "testing", "license" => "Apache License Version 2.0", "summary" => "Puppet testing module", "description" => "This module can be used for basic testing", "project_page" => "http://github.com/testing/#{module_name}" }.to_json) end end it_behaves_like "a packagable module" it "does not package with a symlink", :if => Puppet.features.manages_symlinks? do FileUtils.touch(File.join(path, 'tempfile')) Puppet::FileSystem.symlink(File.join(path, 'tempfile'), File.join(path, 'tempfile2')) expect { builder.run }.to raise_error Puppet::ModuleTool::Errors::ModuleToolError, /symlinks/i end it "does not package with a symlink in a subdir", :if => Puppet.features.manages_symlinks? do FileUtils.mkdir(File.join(path, 'manifests')) FileUtils.touch(File.join(path, 'manifests/tempfile.pp')) Puppet::FileSystem.symlink(File.join(path, 'manifests/tempfile.pp'), File.join(path, 'manifests/tempfile2.pp')) expect { builder.run }.to raise_error Puppet::ModuleTool::Errors::ModuleToolError, /symlinks/i end end context 'with metadata.json containing checksums' do before :each do File.open(File.join(path, 'metadata.json'), 'w') do |f| f.puts({ "name" => "#{module_name}", "version" => "#{version}", "source" => "http://github.com/testing/#{module_name}", "author" => "testing", "license" => "Apache License Version 2.0", "summary" => "Puppet testing module", "description" => "This module can be used for basic testing", "project_page" => "http://github.com/testing/#{module_name}", "checksums" => {"README.md" => "deadbeef"} }.to_json) end end it_behaves_like "a packagable module" end context 'with Modulefile' do before :each do File.open(File.join(path, 'Modulefile'), 'w') do |f| f.write <<-MODULEFILE name '#{module_name}' version '#{version}' source 'http://github.com/testing/#{module_name}' author 'testing' license 'Apache License Version 2.0' summary 'Puppet testing module' description 'This module can be used for basic testing' project_page 'http://github.com/testing/#{module_name}' MODULEFILE end end it_behaves_like "a packagable module" end end