diff --git a/Rakefile b/Rakefile index a28b868fd..f242afa2b 100644 --- a/Rakefile +++ b/Rakefile @@ -1,66 +1,68 @@ # Rakefile for Puppet -*- ruby -*- # We need access to the Puppet.version method $LOAD_PATH.unshift(File.expand_path("lib")) require 'puppet/version' $LOAD_PATH << File.join(File.dirname(__FILE__), 'tasks') begin require 'rubygems' require 'rubygems/package_task' + require 'rspec' + require 'rspec/core/rake_task' rescue LoadError # Users of older versions of Rake (0.8.7 for example) will not necessarily # have rubygems installed, or the newer rubygems package_task for that # matter. require 'rake/packagetask' require 'rake/gempackagetask' end require 'rake' -require 'rspec' -require "rspec/core/rake_task" Dir['tasks/**/*.rake'].each { |t| load t } Dir['ext/packaging/tasks/**/*'].sort.each { |t| load t } build_defs_file = 'ext/build_defaults.yaml' if File.exist?(build_defs_file) begin require 'yaml' @build_defaults ||= YAML.load_file(build_defs_file) rescue Exception => e STDERR.puts "Unable to load yaml from #{build_defs_file}:" STDERR.puts e end @packaging_url = @build_defaults['packaging_url'] @packaging_repo = @build_defaults['packaging_repo'] raise "Could not find packaging url in #{build_defs_file}" if @packaging_url.nil? raise "Could not find packaging repo in #{build_defs_file}" if @packaging_repo.nil? namespace :package do desc "Bootstrap packaging automation, e.g. clone into packaging repo" task :bootstrap do if File.exist?("ext/#{@packaging_repo}") puts "It looks like you already have ext/#{@packaging_repo}. If you don't like it, blow it away with package:implode." else cd 'ext' do %x{git clone #{@packaging_url}} end end end desc "Remove all cloned packaging automation" task :implode do rm_rf "ext/#{@packaging_repo}" end end end task :default do sh %{rake -T} end -RSpec::Core::RakeTask.new do |t| - t.pattern ='spec/{unit,integration}/**/*.rb' - t.fail_on_error = true +if defined?(RSpec::Core::RakeTask) + RSpec::Core::RakeTask.new do |t| + t.pattern ='spec/{unit,integration}/**/*.rb' + t.fail_on_error = true + end end diff --git a/lib/puppet/defaults.rb b/lib/puppet/defaults.rb index b6e240b22..6e40ff341 100644 --- a/lib/puppet/defaults.rb +++ b/lib/puppet/defaults.rb @@ -1,1499 +1,1512 @@ # The majority of Puppet's configuration settings are set in this file. module Puppet ############################################################################################ # NOTE: For information about the available values for the ":type" property of settings, # see the docs for Settings.define_settings ############################################################################################ define_settings(:main, :confdir => { :default => nil, :type => :directory, :desc => "The main Puppet configuration directory. The default for this setting is calculated based on the user. If the process\n" + "is running as root or the user that Puppet is supposed to run as, it defaults to a system directory, but if it's running as any other user,\n" + "it defaults to being in the user's home directory.", }, :vardir => { :default => nil, :type => :directory, :desc => "Where Puppet stores dynamic and growing data. The default for this setting is calculated specially, like `confdir`_.", }, ### NOTE: this setting is usually being set to a symbol value. We don't officially have a ### setting type for that yet, but we might want to consider creating one. :name => { :default => nil, :desc => "The name of the application, if we are running as one. The\n" + "default is essentially $0 without the path or `.rb`.", } ) define_settings(:main, :logdir => { :default => nil, :type => :directory, :mode => 0750, :owner => "service", :group => "service", :desc => "The directory in which to store log files", } ) define_settings(:main, :trace => { :default => false, :type => :boolean, :desc => "Whether to print stack traces on some errors", }, :autoflush => { :default => true, :type => :boolean, :desc => "Whether log files should always flush to disk.", :hook => proc { |value| Log.autoflush = value } }, :syslogfacility => { :default => "daemon", :desc => "What syslog facility to use when logging to\n" + "syslog. Syslog has a fixed list of valid facilities, and you must\n" + "choose one of those; you cannot just make one up." }, :statedir => { :default => "$vardir/state", :type => :directory, :mode => 01755, :desc => "The directory where Puppet state is stored. Generally, this directory can be removed without causing harm (although it might result in spurious service restarts)." }, :rundir => { :default => nil, :type => :directory, :mode => 01777, :desc => "Where Puppet PID files are kept." }, :genconfig => { :default => false, :type => :boolean, :desc => "Whether to just print a configuration to stdout and exit. Only makes\n" + "sense when used interactively. Takes into account arguments specified\n" + "on the CLI.", }, :genmanifest => { :default => false, :type => :boolean, :desc => "Whether to just print a manifest to stdout and exit. Only makes\n" + "sense when used interactively. Takes into account arguments specified\n" + "on the CLI.", }, :configprint => { :default => "", :desc => "Print the value of a specific configuration setting. If the name of a\n" + "setting is provided for this, then the value is printed and puppet\n" + "exits. Comma-separate multiple values. For a list of all values,\n" + "specify 'all'.", }, :color => { :default => "ansi", :type => :string, :desc => "Whether to use colors when logging to the console. Valid values are\n" + "`ansi` (equivalent to `true`), `html`, and `false`, which produces no color.\n" + "Defaults to false on Windows, as its console does not support ansi colors.", }, :mkusers => { :default => false, :type => :boolean, :desc => "Whether to create the necessary user and group that puppet agent will run as.", }, :manage_internal_file_permissions => { :default => true, :type => :boolean, :desc => "Whether Puppet should manage the owner, group, and mode of files it uses internally", }, :onetime => { :default => false, :type => :boolean, :desc => "Run the configuration once, rather than as a long-running\n" + "daemon. This is useful for interactively running puppetd.", :short => 'o', }, :path => { :default => "none", :desc => "The shell search path. Defaults to whatever is inherited\n" + "from the parent process.", :call_hook => :on_define_and_write, :hook => proc do |value| ENV["PATH"] = "" if ENV["PATH"].nil? ENV["PATH"] = value unless value == "none" paths = ENV["PATH"].split(File::PATH_SEPARATOR) Puppet::Util::Platform.default_paths.each do |path| ENV["PATH"] += File::PATH_SEPARATOR + path unless paths.include?(path) end value end }, :libdir => { :type => :directory, :default => "$vardir/lib", :desc => "An extra search path for Puppet. This is only useful\n" + "for those files that Puppet will load on demand, and is only\n" + "guaranteed to work for those cases. In fact, the autoload\n" + "mechanism is responsible for making sure this directory\n" + "is in Ruby's search path\n", :call_hook => :on_initialize_and_write, :hook => proc do |value| $LOAD_PATH.delete(@oldlibdir) if defined?(@oldlibdir) and $LOAD_PATH.include?(@oldlibdir) @oldlibdir = value $LOAD_PATH << value end }, :ignoreimport => { :default => false, :type => :boolean, :desc => "If true, allows the parser to continue without requiring\n" + "all files referenced with `import` statements to exist. This setting was primarily\n" + "designed for use with commit hooks for parse-checking.", }, :authconfig => { :default => "$confdir/namespaceauth.conf", :desc => "The configuration file that defines the rights to the different\n" + "namespaces and methods. This can be used as a coarse-grained\n" + "authorization system for both `puppet agent` and `puppet master`.", }, :environment => { :default => "production", :desc => "The environment Puppet is running in. For clients\n" + "(e.g., `puppet agent`) this determines the environment itself, which\n" + "is used to find modules and much more. For servers (i.e., `puppet master`)\n" + "this provides the default environment for nodes we know nothing about." }, :diff_args => { :default => "-u", :desc => "Which arguments to pass to the diff command when printing differences between\n" + "files. The command to use can be chosen with the `diff` setting.", }, :diff => { :default => (Puppet.features.microsoft_windows? ? "" : "diff"), :desc => "Which diff command to use when printing differences between files. This setting\n" + "has no default value on Windows, as standard `diff` is not available, but Puppet can use many\n" + "third-party diff tools.", }, :show_diff => { :type => :boolean, :default => false, :desc => "Whether to log and report a contextual diff when files are being replaced. This causes\n" + "partial file contents to pass through Puppet's normal logging and reporting system, so this setting\n" + "should be used with caution if you are sending Puppet's reports to an insecure destination.\n" + "This feature currently requires the `diff/lcs` Ruby library.", }, :daemonize => { :type => :boolean, :default => (Puppet.features.microsoft_windows? ? false : true), :desc => "Whether to send the process into the background. This defaults to true on POSIX systems, and to false on Windows (where Puppet currently cannot daemonize).", :short => "D", :hook => proc do |value| if value and Puppet.features.microsoft_windows? raise "Cannot daemonize on Windows" end end }, :maximum_uid => { :default => 4294967290, :desc => "The maximum allowed UID. Some platforms use negative UIDs\n" + "but then ship with tools that do not know how to handle signed ints, so the UIDs show up as\n" + "huge numbers that can then not be fed back into the system. This is a hackish way to fail in a\n" + "slightly more useful way when that happens.", }, :route_file => { :default => "$confdir/routes.yaml", :desc => "The YAML file containing indirector route configuration.", }, :node_terminus => { :type => :terminus, :default => "plain", :desc => "Where to find information about nodes.", }, :data_binding_terminus => { :type => :terminus, :default => "hiera", :desc => "Where to retrive information about data.", }, :hiera_config => { :default => "$confdir/hiera.yaml", :desc => "The hiera configuration file", :type => :file, }, :catalog_terminus => { :type => :terminus, :default => "compiler", :desc => "Where to get node catalogs. This is useful to change if, for instance, you'd like to pre-compile catalogs and store them in memcached or some other easily-accessed store.", }, :catalog_cache_terminus => { :type => :terminus, :default => nil, :desc => "How to store cached catalogs. Valid values are 'json' and 'yaml'. The agent application defaults to 'json'." }, :facts_terminus => { :default => 'facter', :desc => "The node facts terminus.", :call_hook => :on_initialize_and_write, :hook => proc do |value| require 'puppet/node/facts' # Cache to YAML if we're uploading facts away if %w[rest inventory_service].include? value.to_s Puppet.info "configuring the YAML fact cache because a remote terminus is active" Puppet::Node::Facts.indirection.cache_class = :yaml end end }, :inventory_terminus => { :type => :terminus, :default => "$facts_terminus", :desc => "Should usually be the same as the facts terminus", }, :default_file_terminus => { :type => :terminus, :default => "rest", :desc => "The default source for files if no server is given in a uri, e.g. puppet:///file. The default of `rest` causes the file to be retrieved using the `server` setting. When running `apply` the default is `file_server`, causing requests to be filled locally." }, :httplog => { :default => "$logdir/http.log", :type => :file, :owner => "root", :mode => 0640, :desc => "Where the puppet agent web server logs.", }, :http_proxy_host => { :default => "none", :desc => "The HTTP proxy host to use for outgoing connections. Note: You may need to use a FQDN for the server hostname when using a proxy.", }, :http_proxy_port => { :default => 3128, :desc => "The HTTP proxy port to use for outgoing connections", }, :filetimeout => { :default => "15s", :type => :duration, :desc => "The minimum time to wait between checking for updates in configuration files. This timeout determines how quickly Puppet checks whether a file (such as manifests or templates) has changed on disk. Can be specified as a duration.", }, :queue_type => { :default => "stomp", :desc => "Which type of queue to use for asynchronous processing.", }, :queue_type => { :default => "stomp", :desc => "Which type of queue to use for asynchronous processing.", }, :queue_source => { :default => "stomp://localhost:61613/", :desc => "Which type of queue to use for asynchronous processing. If your stomp server requires authentication, you can include it in the URI as long as your stomp client library is at least 1.1.1", }, :async_storeconfigs => { :default => false, :type => :boolean, :desc => "Whether to use a queueing system to provide asynchronous database integration. Requires that `puppet queue` be running.", :hook => proc do |value| if value # This reconfigures the terminii for Node, Facts, and Catalog Puppet.settings[:storeconfigs] = true # But then we modify the configuration Puppet::Resource::Catalog.indirection.cache_class = :queue Puppet.settings[:catalog_cache_terminus] = :queue else raise "Cannot disable asynchronous storeconfigs in a running process" end end }, :thin_storeconfigs => { :default => false, :type => :boolean, :desc => "Boolean; whether Puppet should store only facts and exported resources in the storeconfigs database. This will improve the performance of exported resources with the older `active_record` backend, but will disable external tools that search the storeconfigs database. Thinning catalogs is generally unnecessary when using PuppetDB to store catalogs.", :hook => proc do |value| Puppet.settings[:storeconfigs] = true if value end }, :config_version => { :default => "", :desc => "How to determine the configuration version. By default, it will be the time that the configuration is parsed, but you can provide a shell script to override how the version is determined. The output of this script will be added to every log message in the reports, allowing you to correlate changes on your hosts to the source version on the server.", }, :zlib => { :default => true, :type => :boolean, :desc => "Boolean; whether to use the zlib library", }, :prerun_command => { :default => "", :desc => "A command to run before every agent run. If this command returns a non-zero return code, the entire Puppet run will fail.", }, :postrun_command => { :default => "", :desc => "A command to run after every agent run. If this command returns a non-zero return code, the entire Puppet run will be considered to have failed, even though it might have performed work during the normal run.", }, :freeze_main => { :default => false, :type => :boolean, :desc => "Freezes the 'main' class, disallowing any code to be added to it. This\n" + "essentially means that you can't have any code outside of a node, class, or definition other\n" + "than in the site manifest.", } ) Puppet.define_settings(:module_tool, :module_repository => { :default => 'https://forge.puppetlabs.com', :desc => "The module repository", }, :module_working_dir => { :default => '$vardir/puppet-module', :desc => "The directory into which module tool data is stored", } ) Puppet.define_settings( :main, # We have to downcase the fqdn, because the current ssl stuff (as oppsed to in master) doesn't have good facilities for # manipulating naming. :certname => { :default => Puppet::Settings.default_certname.downcase, :desc => "The name to use when handling certificates. Defaults to the fully qualified domain name.", :call_hook => :on_define_and_write, # Call our hook with the default value, so we're always downcased :hook => proc { |value| raise(ArgumentError, "Certificate names must be lower case; see #1168") unless value == value.downcase }}, :certdnsnames => { :default => '', :hook => proc do |value| unless value.nil? or value == '' then Puppet.warning < < { :default => '', :desc => < { :default => "$ssldir/certs", :type => :directory, :owner => "service", :desc => "The certificate directory." }, :ssldir => { :default => "$confdir/ssl", :type => :directory, :mode => 0771, :owner => "service", :desc => "Where SSL certificates are kept." }, :publickeydir => { :default => "$ssldir/public_keys", :type => :directory, :owner => "service", :desc => "The public key directory." }, :requestdir => { :default => "$ssldir/certificate_requests", :type => :directory, :owner => "service", :desc => "Where host certificate requests are stored." }, :privatekeydir => { :default => "$ssldir/private_keys", :type => :directory, :mode => 0750, :owner => "service", :desc => "The private key directory." }, :privatedir => { :default => "$ssldir/private", :type => :directory, :mode => 0750, :owner => "service", :desc => "Where the client stores private certificate information." }, :passfile => { :default => "$privatedir/password", :type => :file, :mode => 0640, :owner => "service", :desc => "Where puppet agent stores the password for its private key. Generally unused." }, :hostcsr => { :default => "$ssldir/csr_$certname.pem", :type => :file, :mode => 0644, :owner => "service", :desc => "Where individual hosts store and look for their certificate requests." }, :hostcert => { :default => "$certdir/$certname.pem", :type => :file, :mode => 0644, :owner => "service", :desc => "Where individual hosts store and look for their certificates." }, :hostprivkey => { :default => "$privatekeydir/$certname.pem", :type => :file, :mode => 0600, :owner => "service", :desc => "Where individual hosts store and look for their private key." }, :hostpubkey => { :default => "$publickeydir/$certname.pem", :type => :file, :mode => 0644, :owner => "service", :desc => "Where individual hosts store and look for their public key." }, :localcacert => { :default => "$certdir/ca.pem", :type => :file, :mode => 0644, :owner => "service", :desc => "Where each client stores the CA certificate." }, ## JJM - The ssl_client_ca_chain setting is commented out because it is # intended for (#3143) and is not expected to be used until CA chaining is # supported. # :ssl_client_ca_chain => { # :type => :file, # :mode => 0644, # :owner => "service", # :desc => "The list of CA certificate to complete the chain of trust to CA certificates \n" << # "listed in the ssl_client_ca_auth file." # }, :ssl_client_ca_auth => { :type => :file, :mode => 0644, :owner => "service", :desc => "Certificate authorities who issue server certificates. SSL servers will not be \n" << "considered authentic unless they posses a certificate issued by an authority \n" << "listed in this file. If this setting has no value then the Puppet master's CA \n" << "certificate (localcacert) will be used." }, ## JJM - The ssl_server_ca_chain setting is commented out because it is # intended for (#3143) and is not expected to be used until CA chaining is # supported. # :ssl_server_ca_chain => { # :type => :file, # :mode => 0644, # :owner => "service", # :desc => "The list of CA certificate to complete the chain of trust to CA certificates \n" << # "listed in the ssl_server_ca_auth file." # }, :ssl_server_ca_auth => { :type => :file, :mode => 0644, :owner => "service", :desc => "Certificate authorities who issue client certificates. SSL clients will not be \n" << "considered authentic unless they posses a certificate issued by an authority \n" << "listed in this file. If this setting has no value then the Puppet master's CA \n" << "certificate (localcacert) will be used." }, :hostcrl => { :default => "$ssldir/crl.pem", :type => :file, :mode => 0644, :owner => "service", :desc => "Where the host's certificate revocation list can be found. This is distinct from the certificate authority's CRL." }, :certificate_revocation => { :default => true, :type => :boolean, :desc => "Whether certificate revocation should be supported by downloading a Certificate Revocation List (CRL) to all clients. If enabled, CA chaining will almost definitely not work.", }, :certificate_expire_warning => { :default => "60d", :type => :duration, :desc => "The window of time leading up to a certificate's expiration that a notification will be logged. This applies to CA, master, and agent certificates. Can be specified as a duration." } ) define_settings( :ca, :ca_name => { :default => "Puppet CA: $certname", :desc => "The name to use the Certificate Authority certificate.", }, :cadir => { :default => "$ssldir/ca", :type => :directory, :owner => "service", :group => "service", :mode => 0770, :desc => "The root directory for the certificate authority." }, :cacert => { :default => "$cadir/ca_crt.pem", :type => :file, :owner => "service", :group => "service", :mode => 0660, :desc => "The CA certificate." }, :cakey => { :default => "$cadir/ca_key.pem", :type => :file, :owner => "service", :group => "service", :mode => 0660, :desc => "The CA private key." }, :capub => { :default => "$cadir/ca_pub.pem", :type => :file, :owner => "service", :group => "service", :desc => "The CA public key." }, :cacrl => { :default => "$cadir/ca_crl.pem", :type => :file, :owner => "service", :group => "service", :mode => 0664, :desc => "The certificate revocation list (CRL) for the CA. Will be used if present but otherwise ignored.", }, :caprivatedir => { :default => "$cadir/private", :type => :directory, :owner => "service", :group => "service", :mode => 0770, :desc => "Where the CA stores private certificate information." }, :csrdir => { :default => "$cadir/requests", :type => :directory, :owner => "service", :group => "service", :desc => "Where the CA stores certificate requests" }, :signeddir => { :default => "$cadir/signed", :type => :directory, :owner => "service", :group => "service", :mode => 0770, :desc => "Where the CA stores signed certificates." }, :capass => { :default => "$caprivatedir/ca.pass", :type => :file, :owner => "service", :group => "service", :mode => 0660, :desc => "Where the CA stores the password for the private key" }, :serial => { :default => "$cadir/serial", :type => :file, :owner => "service", :group => "service", :mode => 0644, :desc => "Where the serial number for certificates is stored." }, :autosign => { :default => "$confdir/autosign.conf", :type => :file, :mode => 0644, :desc => "Whether to enable autosign. Valid values are true (which autosigns any key request, and is a very bad idea), false (which never autosigns any key request), and the path to a file, which uses that configuration file to determine which keys to sign."}, :allow_duplicate_certs => { :default => false, :type => :boolean, :desc => "Whether to allow a new certificate request to overwrite an existing certificate.", }, :ca_ttl => { :default => "5y", :type => :duration, :desc => "The default TTL for new certificates. Can be specified as a duration." }, :ca_md => { :default => "md5", :desc => "The type of hash used in certificates.", }, :req_bits => { :default => 4096, :desc => "The bit length of the certificates.", }, :keylength => { :default => 4096, :desc => "The bit length of keys.", }, :cert_inventory => { :default => "$cadir/inventory.txt", :type => :file, :mode => 0644, :owner => "service", :group => "service", :desc => "A Complete listing of all certificates" } ) # Define the config default. define_settings(:application, :config_file_name => { :type => :string, :default => Puppet::Settings.default_config_file_name, :desc => "The name of the puppet config file.", }, :config => { :type => :file, :default => "$confdir/${config_file_name}", :desc => "The configuration file for the current puppet application", }, :pidfile => { :type => :file, :default => "$rundir/${run_mode}.pid", :desc => "The file containing the PID of a running process. " << "This file is intended to be used by service management " << "frameworks and monitoring systems to determine if a " << "puppet process is still in the process table.", }, :bindaddress => { :default => "0.0.0.0", :desc => "The address a listening server should bind to.", } ) define_settings(:master, :user => { :default => "puppet", :desc => "The user puppet master should run as.", }, :group => { :default => "puppet", :desc => "The group puppet master should run as.", }, :manifestdir => { :default => "$confdir/manifests", :type => :directory, :desc => "Where puppet master looks for its manifests.", }, :manifest => { :default => "$manifestdir/site.pp", :type => :file, :desc => "The entry-point manifest for puppet master.", }, :code => { :default => "", :desc => "Code to parse directly. This is essentially only used by `puppet`, and should only be set if you're writing your own Puppet executable", }, :masterlog => { :default => "$logdir/puppetmaster.log", :type => :file, :owner => "service", :group => "service", :mode => 0660, :desc => "Where puppet master logs. This is generally not used, since syslog is the default log destination." }, :masterhttplog => { :default => "$logdir/masterhttp.log", :type => :file, :owner => "service", :group => "service", :mode => 0660, :create => true, :desc => "Where the puppet master web server logs." }, :masterport => { :default => 8140, :desc => "Which port puppet master listens on.", }, :node_name => { :default => "cert", :desc => "How the puppet master determines the client's identity and sets the 'hostname', 'fqdn' and 'domain' facts for use in the manifest, in particular for determining which 'node' statement applies to the client. Possible values are 'cert' (use the subject's CN in the client's certificate) and 'facter' (use the hostname that the client reported in its facts)", }, :bucketdir => { :default => "$vardir/bucket", :type => :directory, :mode => 0750, :owner => "service", :group => "service", :desc => "Where FileBucket files are stored." }, :rest_authconfig => { :default => "$confdir/auth.conf", :type => :file, :desc => "The configuration file that defines the rights to the different rest indirections. This can be used as a fine-grained authorization system for `puppet master`.", }, :ca => { :default => true, :type => :boolean, :desc => "Whether the master should function as a certificate authority.", }, :modulepath => { :default => "$confdir/modules#{File::PATH_SEPARATOR}/usr/share/puppet/modules", :type => :path, :desc => "The search path for modules, as a list of directories separated by the system path separator character. " + "(The POSIX path separator is ':', and the Windows path separator is ';'.)", }, :ssl_client_header => { :default => "HTTP_X_CLIENT_DN", :desc => "The header containing an authenticated client's SSL DN. This header must be set by the proxy to the authenticated client's SSL DN (e.g., `/CN=puppet.puppetlabs.com`).", }, :ssl_client_verify_header => { :default => "HTTP_X_CLIENT_VERIFY", :desc => "The header containing the status message of the client verification. This header must be set by the proxy to 'SUCCESS' if the client successfully authenticated, and anything else otherwise.", }, # To make sure this directory is created before we try to use it on the server, we need # it to be in the server section (#1138). :yamldir => { :default => "$vardir/yaml", :type => :directory, :owner => "service", :group => "service", :mode => "750", :desc => "The directory in which YAML data is stored, usually in a subdirectory."}, :server_datadir => { :default => "$vardir/server_data", :type => :directory, :owner => "service", :group => "service", :mode => "750", :desc => "The directory in which serialized data is stored, usually in a subdirectory."}, :reports => { :default => "store", :desc => "The list of reports to generate. All reports are looked for in `puppet/reports/name.rb`, and multiple report names should be comma-separated (whitespace is okay).", }, :reportdir => { :default => "$vardir/reports", :type => :directory, :mode => 0750, :owner => "service", :group => "service", :desc => "The directory in which to store reports received from the client. Each client gets a separate subdirectory."}, :reporturl => { :default => "http://localhost:3000/reports/upload", :desc => "The URL used by the http reports processor to send reports", }, :fileserverconfig => { :default => "$confdir/fileserver.conf", :type => :file, :desc => "Where the fileserver configuration is stored.", }, :strict_hostname_checking => { :default => false, :desc => "Whether to only search for the complete hostname as it is in the certificate when searching for node information in the catalogs.", } ) define_settings(:metrics, :rrddir => { :type => :directory, :default => "$vardir/rrd", :mode => 0750, :owner => "service", :group => "service", :desc => "The directory where RRD database files are stored. Directories for each reporting host will be created under this directory." }, :rrdinterval => { :default => "$runinterval", :type => :duration, :desc => "How often RRD should expect data. This should match how often the hosts report back to the server. Can be specified as a duration.", } ) define_settings(:device, :devicedir => { :default => "$vardir/devices", :type => :directory, :mode => "750", :desc => "The root directory of devices' $vardir", }, :deviceconfig => { :default => "$confdir/device.conf", :desc => "Path to the device config file for puppet device", } ) define_settings(:agent, :node_name_value => { :default => "$certname", :desc => "The explicit value used for the node name for all requests the agent makes to the master. WARNING: This setting is mutually exclusive with node_name_fact. Changing this setting also requires changes to the default auth.conf configuration on the Puppet Master. Please see http://links.puppetlabs.com/node_name_value for more information." }, :node_name_fact => { :default => "", :desc => "The fact name used to determine the node name used for all requests the agent makes to the master. WARNING: This setting is mutually exclusive with node_name_value. Changing this setting also requires changes to the default auth.conf configuration on the Puppet Master. Please see http://links.puppetlabs.com/node_name_fact for more information.", :hook => proc do |value| if !value.empty? and Puppet[:node_name_value] != Puppet[:certname] raise "Cannot specify both the node_name_value and node_name_fact settings" end end }, :localconfig => { :default => "$statedir/localconfig", :type => :file, :owner => "root", :mode => 0660, :desc => "Where puppet agent caches the local configuration. An extension indicating the cache format is added automatically."}, :statefile => { :default => "$statedir/state.yaml", :type => :file, :mode => 0660, :desc => "Where puppet agent and puppet master store state associated with the running configuration. In the case of puppet master, this file reflects the state discovered through interacting with clients." }, :clientyamldir => { :default => "$vardir/client_yaml", :type => :directory, :mode => "750", :desc => "The directory in which client-side YAML data is stored." }, :client_datadir => { :default => "$vardir/client_data", :type => :directory, :mode => "750", :desc => "The directory in which serialized data is stored on the client." }, :classfile => { :default => "$statedir/classes.txt", :type => :file, :owner => "root", :mode => 0640, :desc => "The file in which puppet agent stores a list of the classes associated with the retrieved configuration. Can be loaded in the separate `puppet` executable using the `--loadclasses` option."}, :resourcefile => { :default => "$statedir/resources.txt", :type => :file, :owner => "root", :mode => 0640, :desc => "The file in which puppet agent stores a list of the resources associated with the retrieved configuration." }, :puppetdlog => { :default => "$logdir/puppetd.log", :type => :file, :owner => "root", :mode => 0640, :desc => "The log file for puppet agent. This is generally not used." }, :server => { :default => "puppet", :desc => "The server to which the puppet agent should connect" }, :use_srv_records => { :default => false, :type => :boolean, :desc => "Whether the server will search for SRV records in DNS for the current domain.", }, :srv_domain => { :default => "#{Puppet::Settings.domain_fact}", :desc => "The domain which will be queried to find the SRV records of servers to use.", }, :ignoreschedules => { :default => false, :type => :boolean, :desc => "Boolean; whether puppet agent should ignore schedules. This is useful for initial puppet agent runs.", }, :puppetport => { :default => 8139, :desc => "Which port puppet agent listens on.", }, :noop => { :default => false, :type => :boolean, :desc => "Whether puppet agent should be run in noop mode.", }, :runinterval => { :default => "30m", :type => :duration, :desc => "How often puppet agent applies the client configuration; in seconds. Note that a runinterval of 0 means \"run continuously\" rather than \"never run.\" If you want puppet agent to never run, you should start it with the `--no-client` option. Can be specified as a duration.", }, :listen => { :default => false, :type => :boolean, :desc => "Whether puppet agent should listen for connections. If this is true, then puppet agent will accept incoming REST API requests, subject to the default ACLs and the ACLs set in the `rest_authconfig` file. Puppet agent can respond usefully to requests on the `run`, `facts`, `certificate`, and `resource` endpoints.", }, :ca_server => { :default => "$server", :desc => "The server to use for certificate authority requests. It's a separate server because it cannot and does not need to horizontally scale.", }, :ca_port => { :default => "$masterport", :desc => "The port to use for the certificate authority.", }, :catalog_format => { :default => "", :desc => "(Deprecated for 'preferred_serialization_format') What format to use to dump the catalog. Only supports 'marshal' and 'yaml'. Only matters on the client, since it asks the server for a specific format.", :hook => proc { |value| if value Puppet.deprecation_warning "Setting 'catalog_format' is deprecated; use 'preferred_serialization_format' instead." Puppet.settings[:preferred_serialization_format] = value end } }, :preferred_serialization_format => { :default => "pson", :desc => "The preferred means of serializing ruby instances for passing over the wire. This won't guarantee that all instances will be serialized using this method, since not all classes can be guaranteed to support this format, but it will be used for all classes that support it.", }, :agent_catalog_run_lockfile => { :default => "$statedir/agent_catalog_run.lock", :type => :string, # (#2888) Ensure this file is not added to the settings catalog. :desc => "A lock file to indicate that a puppet agent catalog run is currently in progress. " + "The file contains the pid of the process that holds the lock on the catalog run.", }, :agent_disabled_lockfile => { :default => "$statedir/agent_disabled.lock", :type => :file, :desc => "A lock file to indicate that puppet agent runs have been administratively disabled. File contains a JSON object with state information.", }, :usecacheonfailure => { :default => true, :type => :boolean, :desc => "Whether to use the cached configuration when the remote configuration will not compile. This option is useful for testing new configurations, where you want to fix the broken configuration rather than reverting to a known-good one.", }, :use_cached_catalog => { :default => false, :type => :boolean, :desc => "Whether to only use the cached catalog rather than compiling a new catalog on every run. Puppet can be run with this enabled by default and then selectively disabled when a recompile is desired.", }, :ignorecache => { :default => false, :type => :boolean, :desc => "Ignore cache and always recompile the configuration. This is useful for testing new configurations, where the local cache may in fact be stale even if the timestamps are up to date - if the facts change or if the server changes.", }, :dynamicfacts => { :default => "memorysize,memoryfree,swapsize,swapfree", :desc => "Facts that are dynamic; these facts will be ignored when deciding whether changed facts should result in a recompile. Multiple facts should be comma-separated.", }, :splaylimit => { :default => "$runinterval", :type => :duration, :desc => "The maximum time to delay before runs. Defaults to being the same as the run interval. Can be specified as a duration.", }, :splay => { :default => false, :type => :boolean, :desc => "Whether to sleep for a pseudo-random (but consistent) amount of time before a run.", }, :clientbucketdir => { :default => "$vardir/clientbucket", :type => :directory, :mode => 0750, :desc => "Where FileBucket files are stored locally." }, :configtimeout => { :default => "2m", :type => :duration, :desc => "How long the client should wait for the configuration to be retrieved before considering it a failure. This can help reduce flapping if too many clients contact the server at one time. Can be specified as a duration.", }, :report_server => { :default => "$server", :desc => "The server to send transaction reports to.", }, :report_port => { :default => "$masterport", :desc => "The port to communicate with the report_server.", }, :inventory_server => { :default => "$server", :desc => "The server to send facts to.", }, :inventory_port => { :default => "$masterport", :desc => "The port to communicate with the inventory_server.", }, :report => { :default => true, :type => :boolean, :desc => "Whether to send reports after every transaction.", }, :lastrunfile => { :default => "$statedir/last_run_summary.yaml", :type => :file, :mode => 0644, :desc => "Where puppet agent stores the last run report summary in yaml format." }, :lastrunreport => { :default => "$statedir/last_run_report.yaml", :type => :file, :mode => 0640, :desc => "Where puppet agent stores the last run report in yaml format." }, :graph => { :default => false, :type => :boolean, :desc => "Whether to create dot graph files for the different configuration graphs. These dot files can be interpreted by tools like OmniGraffle or dot (which is part of ImageMagick).", }, :graphdir => { :default => "$statedir/graphs", :type => :directory, :desc => "Where to store dot-outputted graphs.", }, :http_compression => { :default => false, :type => :boolean, :desc => "Allow http compression in REST communication with the master. This setting might improve performance for agent -> master communications over slow WANs. Your puppet master needs to support compression (usually by activating some settings in a reverse-proxy in front of the puppet master, which rules out webrick). It is harmless to activate this settings if your master doesn't support compression, but if it supports it, this setting might reduce performance on high-speed LANs.", }, :waitforcert => { :default => "2m", :type => :duration, :desc => "The time interval 'puppet agent' should connect to the server and ask it to sign a certificate request. This is useful for the initial setup of a puppet client. You can turn off waiting for certificates by specifying a time of 0. Can be specified as a duration.", } ) define_settings(:inspect, :archive_files => { :type => :boolean, :default => false, :desc => "During an inspect run, whether to archive files whose contents are audited to a file bucket.", }, :archive_file_server => { :default => "$server", :desc => "During an inspect run, the file bucket server to archive files to if archive_files is set.", } ) # Plugin information. define_settings( :main, :plugindest => { :type => :directory, :default => "$libdir", :desc => "Where Puppet should store plugins that it pulls down from the central server.", }, :pluginsource => { :default => "puppet://$server/plugins", :desc => "From where to retrieve plugins. The standard Puppet `file` type is used for retrieval, so anything that is a valid file source can be used here.", }, :pluginsync => { :default => true, :type => :boolean, :desc => "Whether plugins should be synced with the central server.", }, :pluginsignore => { :default => ".svn CVS .git", :desc => "What files to ignore when pulling down plugins.", } ) # Central fact information. define_settings( :main, :factpath => { :type => :path, :default => "$vardir/lib/facter#{File::PATH_SEPARATOR}$vardir/facts", :desc => "Where Puppet should look for facts. Multiple directories should be separated by the system path separator character. (The POSIX path separator is ':', and the Windows path separator is ';'.)", :call_hook => :on_initialize_and_write, # Call our hook with the default value, so we always get the value added to facter. :hook => proc { |value| Facter.search(value) if Facter.respond_to?(:search) }} ) define_settings( :tagmail, :tagmap => { :default => "$confdir/tagmail.conf", :desc => "The mapping between reporting tags and email addresses.", }, :sendmail => { :default => which('sendmail') || '', :desc => "Where to find the sendmail binary with which to send email.", }, :reportfrom => { :default => "report@" + [Facter["hostname"].value,Facter["domain"].value].join("."), :desc => "The 'from' email address for the reports.", }, :smtpserver => { :default => "none", :desc => "The server through which to send email reports.", } ) define_settings( :rails, :dblocation => { :default => "$statedir/clientconfigs.sqlite3", :type => :file, :mode => 0660, :owner => "service", :group => "service", :desc => "The database cache for client configurations. Used for querying within the language." }, :dbadapter => { :default => "sqlite3", :desc => "The type of database to use.", }, :dbmigrate => { :default => false, :type => :boolean, :desc => "Whether to automatically migrate the database.", }, :dbname => { :default => "puppet", :desc => "The name of the database to use.", }, :dbserver => { :default => "localhost", :desc => "The database server for caching. Only used when networked databases are used.", }, :dbport => { :default => "", :desc => "The database password for caching. Only used when networked databases are used.", }, :dbuser => { :default => "puppet", :desc => "The database user for caching. Only used when networked databases are used.", }, :dbpassword => { :default => "puppet", :desc => "The database password for caching. Only used when networked databases are used.", }, :dbconnections => { :default => '', :desc => "The number of database connections for networked databases. Will be ignored unless the value is a positive integer.", }, :dbsocket => { :default => "", :desc => "The database socket location. Only used when networked databases are used. Will be ignored if the value is an empty string.", }, :railslog => { :default => "$logdir/rails.log", :type => :file, :mode => 0600, :owner => "service", :group => "service", :desc => "Where Rails-specific logs are sent" }, :rails_loglevel => { :default => "info", :desc => "The log level for Rails connections. The value must be a valid log level within Rails. Production environments normally use `info` and other environments normally use `debug`.", } ) define_settings( :couchdb, :couchdb_url => { :default => "http://127.0.0.1:5984/puppet", :desc => "The url where the puppet couchdb database will be created", } ) define_settings( :transaction, :tags => { :default => "", :desc => "Tags to use to find resources. If this is set, then only resources tagged with the specified tags will be applied. Values must be comma-separated.", }, :evaltrace => { :default => false, :type => :boolean, :desc => "Whether each resource should log when it is being evaluated. This allows you to interactively see exactly what is being done.", }, :summarize => { :default => false, :type => :boolean, :desc => "Whether to print a transaction summary.", } ) define_settings( :main, :external_nodes => { :default => "none", :desc => "An external command that can produce node information. The command's output must be a YAML dump of a hash, and that hash must have a `classes` key and/or a `parameters` key, where `classes` is an array or hash and `parameters` is a hash. For unknown nodes, the command should exit with a non-zero exit code. This command makes it straightforward to store your node mapping information in other data sources like databases.", } ) define_settings( :ldap, :ldapssl => { :default => false, :type => :boolean, :desc => "Whether SSL should be used when searching for nodes. Defaults to false because SSL usually requires certificates to be set up on the client side.", }, :ldaptls => { :default => false, :type => :boolean, :desc => "Whether TLS should be used when searching for nodes. Defaults to false because TLS usually requires certificates to be set up on the client side.", }, :ldapserver => { :default => "ldap", :desc => "The LDAP server. Only used if `node_terminus` is set to `ldap`.", }, :ldapport => { :default => 389, :desc => "The LDAP port. Only used if `node_terminus` is set to `ldap`.", }, :ldapstring => { :default => "(&(objectclass=puppetClient)(cn=%s))", :desc => "The search string used to find an LDAP node.", }, :ldapclassattrs => { :default => "puppetclass", :desc => "The LDAP attributes to use to define Puppet classes. Values should be comma-separated.", }, :ldapstackedattrs => { :default => "puppetvar", :desc => "The LDAP attributes that should be stacked to arrays by adding the values in all hierarchy elements of the tree. Values should be comma-separated.", }, :ldapattrs => { :default => "all", :desc => "The LDAP attributes to include when querying LDAP for nodes. All returned attributes are set as variables in the top-level scope. Multiple values should be comma-separated. The value 'all' returns all attributes.", }, :ldapparentattr => { :default => "parentnode", :desc => "The attribute to use to define the parent node.", }, :ldapuser => { :default => "", :desc => "The user to use to connect to LDAP. Must be specified as a full DN.", }, :ldappassword => { :default => "", :desc => "The password to use to connect to LDAP.", }, :ldapbase => { :default => "", :desc => "The search base for LDAP searches. It's impossible to provide a meaningful default here, although the LDAP libraries might have one already set. Generally, it should be the 'ou=Hosts' branch under your main directory.", } ) define_settings(:master, :storeconfigs => { :default => false, :type => :boolean, :desc => "Whether to store each client's configuration, including catalogs, facts, and related data. This also enables the import and export of resources in the Puppet language - a mechanism for exchange resources between nodes. By default this uses ActiveRecord and an SQL database to store and query the data; this, in turn, will depend on Rails being available. You can adjust the backend using the storeconfigs_backend setting.", # Call our hook with the default value, so we always get the libdir set. :call_hook => :on_initialize_and_write, :hook => proc do |value| require 'puppet/node' require 'puppet/node/facts' if value if not Puppet.settings[:async_storeconfigs] Puppet::Resource::Catalog.indirection.cache_class = :store_configs Puppet.settings[:catalog_cache_terminus] = :store_configs end Puppet::Node::Facts.indirection.cache_class = :store_configs Puppet::Resource.indirection.terminus_class = :store_configs end end }, :storeconfigs_backend => { :type => :terminus, :default => "active_record", :desc => "Configure the backend terminus used for StoreConfigs. By default, this uses the ActiveRecord store, which directly talks to the database from within the Puppet Master process." } ) - # This doesn't actually work right now. - define_settings(:parser, :templatedir => { :default => "$vardir/templates", :type => :directory, :desc => "Where Puppet looks for template files. Can be a list of colon-separated directories.", + }, + + :allow_variables_with_dashes => { + :default => false, + :desc => <<-'EOT' +Permit hyphens (`-`) in variable names and issue deprecation warnings about +them. This setting **should always be `false`;** setting it to `true` +will cause subtle and wide-ranging bugs. It will be removed in a future version. + +Hyphenated variables caused major problems in the language, but were allowed +between Puppet 2.7.3 and 2.7.14. If you used them during this window, we +apologize for the inconvenience --- you can temporarily set this to `true` +in order to upgrade, and can rename your variables at your leisure. Please +revert it to `false` after you have renamed all affected variables. +EOT } ) define_settings(:puppetdoc, :document_all => { :default => false, :type => :boolean, :desc => "Document all resources", } ) end diff --git a/lib/puppet/parser/lexer.rb b/lib/puppet/parser/lexer.rb index 0934a59e7..302f9eed5 100644 --- a/lib/puppet/parser/lexer.rb +++ b/lib/puppet/parser/lexer.rb @@ -1,550 +1,587 @@ # the scanner/lexer require 'forwardable' require 'strscan' require 'puppet' require 'puppet/util/methodhelper' module Puppet class LexError < RuntimeError; end end module Puppet::Parser; end class Puppet::Parser::Lexer extend Forwardable attr_reader :last, :file, :lexing_context, :token_queue attr_accessor :line, :indefine alias :indefine? :indefine def lex_error msg raise Puppet::LexError.new(msg) end class Token include Puppet::Util::MethodHelper attr_accessor :regex, :name, :string, :skip, :incr_line, :skip_text, :accumulate alias skip? skip alias accumulate? accumulate def initialize(string_or_regex, name, options = {}) if string_or_regex.is_a?(String) @name, @string = name, string_or_regex @regex = Regexp.new(Regexp.escape(string_or_regex)) else @name, @regex = name, string_or_regex end set_options(options) end def to_s string or @name.to_s end def acceptable?(context={}) # By default tokens are aceeptable in any context true end end # Maintain a list of tokens. class TokenList extend Forwardable attr_reader :regex_tokens, :string_tokens def_delegator :@tokens, :[] # Create a new token. def add_token(name, regex, options = {}, &block) raise(ArgumentError, "Token #{name} already exists") if @tokens.include?(name) token = Token.new(regex, name, options) @tokens[token.name] = token if token.string @string_tokens << token @tokens_by_string[token.string] = token else @regex_tokens << token end token.meta_def(:convert, &block) if block_given? token end def initialize @tokens = {} @regex_tokens = [] @string_tokens = [] @tokens_by_string = {} end # Look up a token by its value, rather than name. def lookup(string) @tokens_by_string[string] end # Define more tokens. def add_tokens(hash) hash.each do |regex, name| add_token(name, regex) end end # Sort our tokens by length, so we know once we match, we're done. # This helps us avoid the O(n^2) nature of token matching. def sort_tokens @string_tokens.sort! { |a, b| b.string.length <=> a.string.length } end + + # Yield each token name and value in turn. + def each + @tokens.each {|name, value| yield name, value } + end end TOKENS = TokenList.new TOKENS.add_tokens( '[' => :LBRACK, ']' => :RBRACK, '{' => :LBRACE, '}' => :RBRACE, '(' => :LPAREN, ')' => :RPAREN, '=' => :EQUALS, '+=' => :APPENDS, '==' => :ISEQUAL, '>=' => :GREATEREQUAL, '>' => :GREATERTHAN, '<' => :LESSTHAN, '<=' => :LESSEQUAL, '!=' => :NOTEQUAL, '!' => :NOT, ',' => :COMMA, '.' => :DOT, ':' => :COLON, '@' => :AT, '<<|' => :LLCOLLECT, '|>>' => :RRCOLLECT, '->' => :IN_EDGE, '<-' => :OUT_EDGE, '~>' => :IN_EDGE_SUB, '<~' => :OUT_EDGE_SUB, '<|' => :LCOLLECT, '|>' => :RCOLLECT, ';' => :SEMIC, '?' => :QMARK, '\\' => :BACKSLASH, '=>' => :FARROW, '+>' => :PARROW, '+' => :PLUS, '-' => :MINUS, '/' => :DIV, '*' => :TIMES, '<<' => :LSHIFT, '>>' => :RSHIFT, '=~' => :MATCH, '!~' => :NOMATCH, %r{((::){0,1}[A-Z][-\w]*)+} => :CLASSREF, "" => :STRING, "" => :DQPRE, "" => :DQMID, "" => :DQPOST, "" => :BOOLEAN ) # Numbers are treated separately from names, so that they may contain dots. TOKENS.add_token :NUMBER, %r{\b(?:0[xX][0-9A-Fa-f]+|0?\d+(?:\.\d+)?(?:[eE]-?\d+)?)\b} do |lexer, value| [TOKENS[:NAME], value] end #:stopdoc: # Issue #4161 def (TOKENS[:NUMBER]).acceptable?(context={}) ![:DQPRE,:DQMID].include? context[:after] end #:startdoc: TOKENS.add_token :NAME, %r{((::)?[a-z0-9][-\w]*)(::[a-z0-9][-\w]*)*} do |lexer, value| string_token = self # we're looking for keywords here if tmp = KEYWORDS.lookup(value) string_token = tmp if [:TRUE, :FALSE].include?(string_token.name) value = eval(value) string_token = TOKENS[:BOOLEAN] end end [string_token, value] end [:NAME,:CLASSNAME,:CLASSREF].each { |name_token| #:stopdoc: # Issue #4161 def (TOKENS[name_token]).acceptable?(context={}) ![:DQPRE,:DQMID].include? context[:after] end #:startdoc: } TOKENS.add_token :COMMENT, %r{#.*}, :accumulate => true, :skip => true do |lexer,value| value.sub!(/# ?/,'') [self, value] end TOKENS.add_token :MLCOMMENT, %r{/\*(.*?)\*/}m, :accumulate => true, :skip => true do |lexer, value| lexer.line += value.count("\n") value.sub!(/^\/\* ?/,'') value.sub!(/ ?\*\/$/,'') [self,value] end TOKENS.add_token :REGEX, %r{/[^/\n]*/} do |lexer, value| # Make sure we haven't matched an escaped / while value[-2..-2] == '\\' other = lexer.scan_until(%r{/}) value += other end regex = value.sub(%r{\A/}, "").sub(%r{/\Z}, '').gsub("\\/", "/") [self, Regexp.new(regex)] end #:stopdoc: # Issue #4161 def (TOKENS[:REGEX]).acceptable?(context={}) [:NODE,:LBRACE,:RBRACE,:MATCH,:NOMATCH,:COMMA].include? context[:after] end #:startdoc: TOKENS.add_token :RETURN, "\n", :skip => true, :incr_line => true, :skip_text => true TOKENS.add_token :SQUOTE, "'" do |lexer, value| [TOKENS[:STRING], lexer.slurpstring(value,["'"],:ignore_invalid_escapes).first ] end DQ_initial_token_types = {'$' => :DQPRE,'"' => :STRING} DQ_continuation_token_types = {'$' => :DQMID,'"' => :DQPOST} TOKENS.add_token :DQUOTE, /"/ do |lexer, value| lexer.tokenize_interpolated_string(DQ_initial_token_types) end TOKENS.add_token :DQCONT, /\}/ do |lexer, value| lexer.tokenize_interpolated_string(DQ_continuation_token_types) end #:stopdoc: # Issue #4161 def (TOKENS[:DQCONT]).acceptable?(context={}) context[:string_interpolation_depth] > 0 end #:startdoc: + TOKENS.add_token :DOLLAR_VAR_WITH_DASH, %r{\$(?:::)?(?:[-\w]+::)*[-\w]+} do |lexer, value| + lexer.warn_if_variable_has_hyphen(value) + + [TOKENS[:VARIABLE], value[1..-1]] + end + def (TOKENS[:DOLLAR_VAR_WITH_DASH]).acceptable?(context = {}) + Puppet[:allow_variables_with_dashes] + end + TOKENS.add_token :DOLLAR_VAR, %r{\$(::)?(\w+::)*\w+} do |lexer, value| [TOKENS[:VARIABLE],value[1..-1]] end + TOKENS.add_token :VARIABLE_WITH_DASH, %r{(?:::)?(?:[-\w]+::)*[-\w]+} do |lexer, value| + lexer.warn_if_variable_has_hyphen(value) + + [TOKENS[:VARIABLE], value] + end + #:stopdoc: # Issue #4161 + def (TOKENS[:VARIABLE_WITH_DASH]).acceptable?(context={}) + Puppet[:allow_variables_with_dashes] and TOKENS[:VARIABLE].acceptable?(context) + end + #:startdoc: + TOKENS.add_token :VARIABLE, %r{(::)?(\w+::)*\w+} #:stopdoc: # Issue #4161 def (TOKENS[:VARIABLE]).acceptable?(context={}) [:DQPRE,:DQMID].include? context[:after] end #:startdoc: TOKENS.sort_tokens @@pairs = { "{" => "}", "(" => ")", "[" => "]", "<|" => "|>", "<<|" => "|>>" } KEYWORDS = TokenList.new KEYWORDS.add_tokens( "case" => :CASE, "class" => :CLASS, "default" => :DEFAULT, "define" => :DEFINE, "import" => :IMPORT, "if" => :IF, "elsif" => :ELSIF, "else" => :ELSE, "inherits" => :INHERITS, "node" => :NODE, "and" => :AND, "or" => :OR, "undef" => :UNDEF, "false" => :FALSE, "true" => :TRUE, "in" => :IN, "unless" => :UNLESS ) def clear initvars end def expected return nil if @expected.empty? name = @expected[-1] TOKENS.lookup(name) or lex_error "Could not find expected token #{name}" end # scan the whole file # basically just used for testing def fullscan array = [] self.scan { |token, str| # Ignore any definition nesting problems @indefine = false array.push([token,str]) } array end def file=(file) @file = file @line = 1 contents = File.exists?(file) ? File.read(file) : "" @scanner = StringScanner.new(contents) end def_delegator :@token_queue, :shift, :shift_token def find_string_token # We know our longest string token is three chars, so try each size in turn # until we either match or run out of chars. This way our worst-case is three # tries, where it is otherwise the number of string token we have. Also, # the lookups are optimized hash lookups, instead of regex scans. # s = @scanner.peek(3) token = TOKENS.lookup(s[0,3]) || TOKENS.lookup(s[0,2]) || TOKENS.lookup(s[0,1]) [ token, token && @scanner.scan(token.regex) ] end # Find the next token that matches a regex. We look for these first. def find_regex_token best_token = nil best_length = 0 # I tried optimizing based on the first char, but it had # a slightly negative affect and was a good bit more complicated. TOKENS.regex_tokens.each do |token| if length = @scanner.match?(token.regex) and token.acceptable?(lexing_context) # We've found a longer match if length > best_length best_length = length best_token = token end end end return best_token, @scanner.scan(best_token.regex) if best_token end # Find the next token, returning the string and the token. def find_token shift_token || find_regex_token || find_string_token end def initialize initvars end def initvars @line = 1 @previous_token = nil @scanner = nil @file = nil # AAARRGGGG! okay, regexes in ruby are bloody annoying # no one else has "\n" =~ /\s/ @skip = %r{[ \t\r]+} @namestack = [] @token_queue = [] @indefine = false @expected = [] @commentstack = [ ['', @line] ] @lexing_context = { :after => nil, :start_of_line => true, :string_interpolation_depth => 0 } end # Make any necessary changes to the token and/or value. def munge_token(token, value) @line += 1 if token.incr_line skip if token.skip_text return if token.skip and not token.accumulate? token, value = token.convert(self, value) if token.respond_to?(:convert) return unless token if token.accumulate? comment = @commentstack.pop comment[0] << value + "\n" @commentstack.push(comment) end return if token.skip return token, { :value => value, :line => @line } end # Handling the namespace stack def_delegator :@namestack, :pop, :namepop # This value might have :: in it, but we don't care -- it'll be handled # normally when joining, and when popping we want to pop this full value, # however long the namespace is. def_delegator :@namestack, :<<, :namestack # Collect the current namespace. def namespace @namestack.join("::") end def_delegator :@scanner, :rest # this is the heart of the lexer def scan #Puppet.debug("entering scan") lex_error "Invalid or empty string" unless @scanner # Skip any initial whitespace. skip until token_queue.empty? and @scanner.eos? do yielded = false matched_token, value = find_token # error out if we didn't match anything at all lex_error "Could not match #{@scanner.rest[/^(\S+|\s+|.*)/]}" unless matched_token newline = matched_token.name == :RETURN # this matches a blank line; eat the previously accumulated comments getcomment if lexing_context[:start_of_line] and newline lexing_context[:start_of_line] = newline final_token, token_value = munge_token(matched_token, value) unless final_token skip next end lexing_context[:after] = final_token.name unless newline lexing_context[:string_interpolation_depth] += 1 if final_token.name == :DQPRE lexing_context[:string_interpolation_depth] -= 1 if final_token.name == :DQPOST value = token_value[:value] if match = @@pairs[value] and final_token.name != :DQUOTE and final_token.name != :SQUOTE @expected << match elsif exp = @expected[-1] and exp == value and final_token.name != :DQUOTE and final_token.name != :SQUOTE @expected.pop end if final_token.name == :LBRACE or final_token.name == :LPAREN commentpush end if final_token.name == :RPAREN commentpop end yield [final_token.name, token_value] if @previous_token namestack(value) if @previous_token.name == :CLASS and value != '{' if @previous_token.name == :DEFINE if indefine? msg = "Cannot nest definition #{value} inside #{@indefine}" self.indefine = false raise Puppet::ParseError, msg end @indefine = value end end @previous_token = final_token skip end @scanner = nil # This indicates that we're done parsing. yield [false,false] end # Skip any skipchars in our remaining string. def skip @scanner.skip(@skip) end # Provide some limited access to the scanner, for those # tokens that need it. def_delegator :@scanner, :scan_until # we've encountered the start of a string... # slurp in the rest of the string and return it def slurpstring(terminators,escapes=%w{ \\ $ ' " r n t s }+["\n"],ignore_invalid_escapes=false) # we search for the next quote that isn't preceded by a # backslash; the caret is there to match empty strings str = @scanner.scan_until(/([^\\]|^|[^\\])([\\]{2})*[#{terminators}]/) or lex_error "Unclosed quote after '#{last}' in '#{rest}'" @line += str.count("\n") # literal carriage returns add to the line count. str.gsub!(/\\(.)/m) { ch = $1 if escapes.include? ch case ch when 'r'; "\r" when 'n'; "\n" when 't'; "\t" when 's'; " " when "\n"; '' else ch end else Puppet.warning "Unrecognised escape sequence '\\#{ch}'#{file && " in file #{file}"}#{line && " at line #{line}"}" unless ignore_invalid_escapes "\\#{ch}" end } [ str[0..-2],str[-1,1] ] end def tokenize_interpolated_string(token_type,preamble='') value,terminator = slurpstring('"$') token_queue << [TOKENS[token_type[terminator]],preamble+value] + variable_regex = if Puppet[:allow_variables_with_dashes] + TOKENS[:VARIABLE_WITH_DASH].regex + else + TOKENS[:VARIABLE].regex + end if terminator != '$' or @scanner.scan(/\{/) token_queue.shift - elsif var_name = @scanner.scan(TOKENS[:VARIABLE].regex) + elsif var_name = @scanner.scan(variable_regex) + warn_if_variable_has_hyphen(var_name) token_queue << [TOKENS[:VARIABLE],var_name] tokenize_interpolated_string(DQ_continuation_token_types) else tokenize_interpolated_string(token_type,token_queue.pop.last + terminator) end end # just parse a string, not a whole file def string=(string) @scanner = StringScanner.new(string) end # returns the content of the currently accumulated content cache def commentpop @commentstack.pop[0] end def getcomment(line = nil) comment = @commentstack.last if line.nil? or comment[1] <= line @commentstack.pop @commentstack.push(['', @line]) return comment[0] end '' end def commentpush @commentstack.push(['', @line]) end + + def warn_if_variable_has_hyphen(var_name) + if var_name.include?('-') + Puppet.deprecation_warning("Using `-` in variable names is deprecated at #{file || ''}:#{line}. See http://links.puppetlabs.com/puppet-hyphenated-variable-deprecation") + end + end end diff --git a/lib/puppet/provider/augeas/augeas.rb b/lib/puppet/provider/augeas/augeas.rb index 3b7120df8..a301c8cd7 100644 --- a/lib/puppet/provider/augeas/augeas.rb +++ b/lib/puppet/provider/augeas/augeas.rb @@ -1,478 +1,478 @@ # # Copyright 2011 Bryan Kearney # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'augeas' if Puppet.features.augeas? require 'strscan' require 'puppet/util' require 'puppet/util/diff' require 'puppet/util/package' Puppet::Type.type(:augeas).provide(:augeas) do include Puppet::Util include Puppet::Util::Diff include Puppet::Util::Package - confine :true => Puppet.features.augeas? + confine :feature => :augeas has_features :parse_commands, :need_to_run?,:execute_changes SAVE_NOOP = "noop" SAVE_OVERWRITE = "overwrite" SAVE_NEWFILE = "newfile" SAVE_BACKUP = "backup" COMMANDS = { "set" => [ :path, :string ], "setm" => [ :path, :string, :string ], "rm" => [ :path ], "clear" => [ :path ], "mv" => [ :path, :path ], "insert" => [ :string, :string, :path ], "get" => [ :path, :comparator, :string ], "defvar" => [ :string, :path ], "defnode" => [ :string, :path, :string ], "match" => [ :path, :glob ], "size" => [:comparator, :int], "include" => [:string], "not_include" => [:string], "==" => [:glob], "!=" => [:glob] } COMMANDS["ins"] = COMMANDS["insert"] COMMANDS["remove"] = COMMANDS["rm"] COMMANDS["move"] = COMMANDS["mv"] attr_accessor :aug # Extracts an 2 dimensional array of commands which are in the # form of command path value. # The input can be # - A string with one command # - A string with many commands per line # - An array of strings. def parse_commands(data) context = resource[:context] # Add a trailing / if it is not there if (context.length > 0) context << "/" if context[-1, 1] != "/" end data = data.split($/) if data.is_a?(String) data = data.flatten args = [] data.each do |line| line.strip! next if line.nil? || line.empty? argline = [] sc = StringScanner.new(line) cmd = sc.scan(/\w+|==|!=/) formals = COMMANDS[cmd] fail("Unknown command #{cmd}") unless formals argline << cmd narg = 0 formals.each do |f| sc.skip(/\s+/) narg += 1 if f == :path start = sc.pos nbracket = 0 inSingleTick = false inDoubleTick = false begin sc.skip(/([^\]\[\s\\'"]|\\.)+/) ch = sc.getch nbracket += 1 if ch == "[" nbracket -= 1 if ch == "]" inSingleTick = !inSingleTick if ch == "'" inDoubleTick = !inDoubleTick if ch == "\"" fail("unmatched [") if nbracket < 0 end until ((nbracket == 0 && !inSingleTick && !inDoubleTick && (ch =~ /\s/)) || sc.eos?) len = sc.pos - start len -= 1 unless sc.eos? unless p = sc.string[start, len] fail("missing path argument #{narg} for #{cmd}") end # Rip off any ticks if they are there. p = p[1, (p.size - 2)] if p[0,1] == "'" || p[0,1] == "\"" p.chomp!("/") if p[0,1] != '$' && p[0,1] != "/" argline << context + p else argline << p end elsif f == :string delim = sc.peek(1) if delim == "'" || delim == "\"" sc.getch argline << sc.scan(/([^\\#{delim}]|(\\.))*/) sc.getch else argline << sc.scan(/[^\s]+/) end fail("missing string argument #{narg} for #{cmd}") unless argline[-1] elsif f == :comparator argline << sc.scan(/(==|!=|=~|<|<=|>|>=)/) unless argline[-1] puts sc.rest fail("invalid comparator for command #{cmd}") end elsif f == :int argline << sc.scan(/\d+/).to_i elsif f== :glob argline << sc.rest end end args << argline end args end def open_augeas unless @aug flags = Augeas::NONE flags = Augeas::TYPE_CHECK if resource[:type_check] == :true if resource[:incl] flags |= Augeas::NO_MODL_AUTOLOAD else flags |= Augeas::NO_LOAD end root = resource[:root] load_path = get_load_path(resource) debug("Opening augeas with root #{root}, lens path #{load_path}, flags #{flags}") @aug = Augeas::open(root, load_path,flags) debug("Augeas version #{get_augeas_version} is installed") if versioncmp(get_augeas_version, "0.3.6") >= 0 # Optimize loading if the context is given and it's a simple path, # requires the glob function from Augeas 0.8.2 or up glob_avail = !aug.match("/augeas/version/pathx/functions/glob").empty? opt_ctx = resource[:context].match("^/files/[^'\"\\[\\]]+$") if resource[:context] restricted = false if resource[:incl] aug.set("/augeas/load/Xfm/lens", resource[:lens]) aug.set("/augeas/load/Xfm/incl", resource[:incl]) restricted = true elsif glob_avail and opt_ctx restricted = true # Optimize loading if the context is given, requires the glob function # from Augeas 0.8.2 or up ctx_path = resource[:context].sub(/^\/files(.*?)\/?$/, '\1/') load_path = "/augeas/load/*['%s' !~ glob(incl) + regexp('/.*')]" % ctx_path if aug.match(load_path).size < aug.match("/augeas/load/*").size aug.rm(load_path) restricted = true else # This will occur if the context is less specific than any glob debug("Unable to optimize files loaded by context path, no glob matches") end end aug.load print_load_errors(:warning => restricted) end @aug end def close_augeas if @aug @aug.close debug("Closed the augeas connection") @aug = nil end end # Used by the need_to_run? method to process get filters. Returns # true if there is a match, false if otherwise # Assumes a syntax of get /files/path [COMPARATOR] value def process_get(cmd_array) return_value = false #validate and tear apart the command fail ("Invalid command: #{cmd_array.join(" ")}") if cmd_array.length < 4 cmd = cmd_array.shift path = cmd_array.shift comparator = cmd_array.shift arg = cmd_array.join(" ") #check the value in augeas result = @aug.get(path) || '' case comparator when "!=" return_value = (result != arg) when "=~" regex = Regexp.new(arg) return_value = (result =~ regex) else return_value = (result.send(comparator, arg)) end !!return_value end # Used by the need_to_run? method to process match filters. Returns # true if there is a match, false if otherwise def process_match(cmd_array) return_value = false #validate and tear apart the command fail("Invalid command: #{cmd_array.join(" ")}") if cmd_array.length < 3 cmd = cmd_array.shift path = cmd_array.shift # Need to break apart the clause clause_array = parse_commands(cmd_array.shift)[0] verb = clause_array.shift #Get the values from augeas result = @aug.match(path) || [] fail("Error trying to match path '#{path}'") if (result == -1) # Now do the work case verb when "size" fail("Invalid command: #{cmd_array.join(" ")}") if clause_array.length != 2 comparator = clause_array.shift arg = clause_array.shift case comparator when "!=" return_value = !(result.size.send(:==, arg)) else return_value = (result.size.send(comparator, arg)) end when "include" arg = clause_array.shift return_value = result.include?(arg) when "not_include" arg = clause_array.shift return_value = !result.include?(arg) when "==" begin arg = clause_array.shift new_array = eval arg return_value = (result == new_array) rescue fail("Invalid array in command: #{cmd_array.join(" ")}") end when "!=" begin arg = clause_array.shift new_array = eval arg return_value = (result != new_array) rescue fail("Invalid array in command: #{cmd_array.join(" ")}") end end !!return_value end # Generate lens load paths from user given paths and local pluginsync dir def get_load_path(resource) load_path = [] # Permits colon separated strings or arrays if resource[:load_path] load_path = [resource[:load_path]].flatten load_path.map! { |path| path.split(/:/) } load_path.flatten! end if File.exists?("#{Puppet[:libdir]}/augeas/lenses") load_path << "#{Puppet[:libdir]}/augeas/lenses" end load_path.join(":") end def get_augeas_version @aug.get("/augeas/version") || "" end def set_augeas_save_mode(mode) @aug.set("/augeas/save", mode) end def print_load_errors(args={}) errors = @aug.match("/augeas//error") unless errors.empty? if args[:warning] warning("Loading failed for one or more files, see debug for /augeas//error output") else debug("Loading failed for one or more files, output from /augeas//error:") end end print_errors(errors) end def print_put_errors errors = @aug.match("/augeas//error[. = 'put_failed']") debug("Put failed on one or more files, output from /augeas//error:") unless errors.empty? print_errors(errors) end def print_errors(errors) errors.each do |errnode| @aug.match("#{errnode}/*").each do |subnode| subvalue = @aug.get(subnode) debug("#{subnode} = #{subvalue}") end end end # Determines if augeas acutally needs to run. def need_to_run? force = resource[:force] return_value = true begin open_augeas filter = resource[:onlyif] unless filter == "" cmd_array = parse_commands(filter)[0] command = cmd_array[0]; begin case command when "get"; return_value = process_get(cmd_array) when "match"; return_value = process_match(cmd_array) end rescue SystemExit,NoMemoryError raise rescue Exception => e fail("Error sending command '#{command}' with params #{cmd_array[1..-1].inspect}/#{e.message}") end end unless force # If we have a verison of augeas which is at least 0.3.6 then we # can make the changes now and see if changes were made. if return_value and versioncmp(get_augeas_version, "0.3.6") >= 0 debug("Will attempt to save and only run if files changed") # Execute in NEWFILE mode so we can show a diff set_augeas_save_mode(SAVE_NEWFILE) do_execute_changes save_result = @aug.save unless save_result print_put_errors fail("Save failed with return code #{save_result}, see debug") end saved_files = @aug.match("/augeas/events/saved") if saved_files.size > 0 root = resource[:root].sub(/^\/$/, "") saved_files.map! {|key| @aug.get(key).sub(/^\/files/, root) } saved_files.uniq.each do |saved_file| if Puppet[:show_diff] notice "\n" + diff(saved_file, saved_file + ".augnew") end File.delete(saved_file + ".augnew") end debug("Files changed, should execute") return_value = true else debug("Skipping because no files were changed") return_value = false end end end ensure if not return_value or resource.noop? or not save_result close_augeas end end return_value end def execute_changes # Workaround Augeas bug where changing the save mode doesn't trigger a # reload of the previously saved file(s) when we call Augeas#load @aug.match("/augeas/events/saved").each do |file| @aug.rm("/augeas#{@aug.get(file)}/mtime") end # Reload augeas, and execute the changes for real set_augeas_save_mode(SAVE_OVERWRITE) if versioncmp(get_augeas_version, "0.3.6") >= 0 @aug.load do_execute_changes unless @aug.save print_put_errors fail("Save failed with return code #{success}, see debug") end :executed ensure close_augeas end # Actually execute the augeas changes. def do_execute_changes commands = parse_commands(resource[:changes]) commands.each do |cmd_array| fail("invalid command #{cmd_array.join[" "]}") if cmd_array.length < 2 command = cmd_array[0] cmd_array.shift begin case command when "set" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.set(cmd_array[0], cmd_array[1]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) when "setm" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.setm(cmd_array[0], cmd_array[1], cmd_array[2]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) when "rm", "remove" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.rm(cmd_array[0]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) when "clear" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.clear(cmd_array[0]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) when "insert", "ins" label = cmd_array[0] where = cmd_array[1] path = cmd_array[2] case where when "before"; before = true when "after"; before = false else fail("Invalid value '#{where}' for where param") end debug("sending command '#{command}' with params #{[label, where, path].inspect}") rv = aug.insert(path, label, before) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) when "defvar" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.defvar(cmd_array[0], cmd_array[1]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) when "defnode" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.defnode(cmd_array[0], cmd_array[1], cmd_array[2]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) when "mv", "move" debug("sending command '#{command}' with params #{cmd_array.inspect}") rv = aug.mv(cmd_array[0], cmd_array[1]) fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) else fail("Command '#{command}' is not supported") end rescue SystemExit,NoMemoryError raise rescue Exception => e fail("Error sending command '#{command}' with params #{cmd_array.inspect}/#{e.message}") end end end end diff --git a/lib/puppet/provider/group/ldap.rb b/lib/puppet/provider/group/ldap.rb index 78333f176..19e9e1b14 100644 --- a/lib/puppet/provider/group/ldap.rb +++ b/lib/puppet/provider/group/ldap.rb @@ -1,45 +1,45 @@ require 'puppet/provider/ldap' Puppet::Type.type(:group).provide :ldap, :parent => Puppet::Provider::Ldap do desc "Group management via LDAP. This provider requires that you have valid values for all of the LDAP-related settings in `puppet.conf`, including `ldapbase`. You will almost definitely need settings for `ldapuser` and `ldappassword` in order for your clients to write to LDAP. Note that this provider will automatically generate a GID for you if you do not specify one, but it is a potentially expensive operation, as it iterates across all existing groups to pick the appropriate next one." - confine :true => Puppet.features.ldap?, :false => (Puppet[:ldapuser] == "") + confine :feature => :ldap, :false => (Puppet[:ldapuser] == "") # We're mapping 'members' here because we want to make it # easy for the ldap user provider to manage groups. This # way it can just use the 'update' method in the group manager, # whereas otherwise it would need to replicate that code. manages(:posixGroup).at("ou=Groups").and.maps :name => :cn, :gid => :gidNumber, :members => :memberUid # Find the next gid after the current largest gid. provider = self manager.generates(:gidNumber).with do largest = 500 if existing = provider.manager.search existing.each do |hash| next unless value = hash[:gid] num = value[0].to_i largest = num if num > largest end end largest + 1 end # Convert a group name to an id. def self.name2id(group) return nil unless result = manager.search("cn=#{group}") and result.length > 0 # Only use the first result. group = result[0] gid = group[:gid][0] end end diff --git a/lib/puppet/util/feature.rb b/lib/puppet/util/feature.rb index eb61dabfa..3dccec2fe 100644 --- a/lib/puppet/util/feature.rb +++ b/lib/puppet/util/feature.rb @@ -1,81 +1,83 @@ class Puppet::Util::Feature attr_reader :path # Create a new feature test. You have to pass the feature name, # and it must be unique. You can either provide a block that # will get executed immediately to determine if the feature # is present, or you can pass an option to determine it. # Currently, the only supported option is 'libs' (must be # passed as a symbol), which will make sure that each lib loads # successfully. def add(name, options = {}) method = name.to_s + "?" @results.delete(name) if block_given? begin result = yield rescue Exception => detail warn "Failed to load feature test for #{name}: #{detail}" result = false end @results[name] = result end meta_def(method) do - @results[name] = test(name, options) unless @results.include?(name) + # Positive cache only, except blocks which are executed just once above + final = @results[name] || block_given? + @results[name] = test(name, options) unless final @results[name] end end # Create a new feature collection. def initialize(path) @path = path @results = {} @loader = Puppet::Util::Autoload.new(self, @path) end def load @loader.loadall end def method_missing(method, *args) return super unless method.to_s =~ /\?$/ feature = method.to_s.sub(/\?$/, '') @loader.load(feature) respond_to?(method) && self.send(method) end # Actually test whether the feature is present. We only want to test when # someone asks for the feature, so we don't unnecessarily load # files. def test(name, options) return true unless ary = options[:libs] ary = [ary] unless ary.is_a?(Array) ary.each do |lib| return false unless load_library(lib, name) end # We loaded all of the required libraries true end private def load_library(lib, name) raise ArgumentError, "Libraries must be passed as strings not #{lib.class}" unless lib.is_a?(String) begin require lib rescue SystemExit,NoMemoryError raise rescue Exception Puppet.debug "Failed to load library '#{lib}' for feature '#{name}'" return false end true end end diff --git a/spec/unit/parser/lexer_spec.rb b/spec/unit/parser/lexer_spec.rb index 492fb1199..e375bed39 100755 --- a/spec/unit/parser/lexer_spec.rb +++ b/spec/unit/parser/lexer_spec.rb @@ -1,756 +1,868 @@ #! /usr/bin/env ruby require 'spec_helper' require 'puppet/parser/lexer' # This is a special matcher to match easily lexer output RSpec::Matchers.define :be_like do |*expected| match do |actual| expected.zip(actual).all? { |e,a| !e or a[0] == e or (e.is_a? Array and a[0] == e[0] and (a[1] == e[1] or (a[1].is_a?(Hash) and a[1][:value] == e[1]))) } end end __ = nil +def tokens_scanned_from(s) + lexer = Puppet::Parser::Lexer.new + lexer.string = s + lexer.fullscan[0..-2] +end + + describe Puppet::Parser::Lexer do describe "when reading strings" do before { @lexer = Puppet::Parser::Lexer.new } it "should increment the line count for every carriage return in the string" do @lexer.line = 10 @lexer.string = "this\nis\natest'" @lexer.slurpstring("'") @lexer.line.should == 12 end it "should not increment the line count for escapes in the string" do @lexer.line = 10 @lexer.string = "this\\nis\\natest'" @lexer.slurpstring("'") @lexer.line.should == 10 end it "should not think the terminator is escaped, when preceeded by an even number of backslashes" do @lexer.line = 10 @lexer.string = "here\nis\nthe\nstring\\\\'with\nextra\njunk" @lexer.slurpstring("'") @lexer.line.should == 13 end { 'r' => "\r", 'n' => "\n", 't' => "\t", 's' => " " }.each do |esc, expected_result| it "should recognize \\#{esc} sequence" do @lexer.string = "\\#{esc}'" @lexer.slurpstring("'")[0].should == expected_result end end end end describe Puppet::Parser::Lexer::Token do before do @token = Puppet::Parser::Lexer::Token.new(%r{something}, :NAME) end [:regex, :name, :string, :skip, :incr_line, :skip_text, :accumulate].each do |param| it "should have a #{param.to_s} reader" do @token.should be_respond_to(param) end it "should have a #{param.to_s} writer" do @token.should be_respond_to(param.to_s + "=") end end end describe Puppet::Parser::Lexer::Token, "when initializing" do it "should create a regex if the first argument is a string" do Puppet::Parser::Lexer::Token.new("something", :NAME).regex.should == %r{something} end it "should set the string if the first argument is one" do Puppet::Parser::Lexer::Token.new("something", :NAME).string.should == "something" end it "should set the regex if the first argument is one" do Puppet::Parser::Lexer::Token.new(%r{something}, :NAME).regex.should == %r{something} end end describe Puppet::Parser::Lexer::TokenList do before do @list = Puppet::Parser::Lexer::TokenList.new end it "should have a method for retrieving tokens by the name" do token = @list.add_token :name, "whatever" @list[:name].should equal(token) end it "should have a method for retrieving string tokens by the string" do token = @list.add_token :name, "whatever" @list.lookup("whatever").should equal(token) end it "should add tokens to the list when directed" do token = @list.add_token :name, "whatever" @list[:name].should equal(token) end it "should have a method for adding multiple tokens at once" do @list.add_tokens "whatever" => :name, "foo" => :bar @list[:name].should_not be_nil @list[:bar].should_not be_nil end it "should fail to add tokens sharing a name with an existing token" do @list.add_token :name, "whatever" expect { @list.add_token :name, "whatever" }.to raise_error(ArgumentError) end it "should set provided options on tokens being added" do token = @list.add_token :name, "whatever", :skip_text => true token.skip_text.should == true end it "should define any provided blocks as a :convert method" do token = @list.add_token(:name, "whatever") do "foo" end token.convert.should == "foo" end it "should store all string tokens in the :string_tokens list" do one = @list.add_token(:name, "1") @list.string_tokens.should be_include(one) end it "should store all regex tokens in the :regex_tokens list" do one = @list.add_token(:name, %r{one}) @list.regex_tokens.should be_include(one) end it "should not store string tokens in the :regex_tokens list" do one = @list.add_token(:name, "1") @list.regex_tokens.should_not be_include(one) end it "should not store regex tokens in the :string_tokens list" do one = @list.add_token(:name, %r{one}) @list.string_tokens.should_not be_include(one) end it "should sort the string tokens inversely by length when asked" do one = @list.add_token(:name, "1") two = @list.add_token(:other, "12") @list.sort_tokens @list.string_tokens.should == [two, one] end end describe Puppet::Parser::Lexer::TOKENS do before do @lexer = Puppet::Parser::Lexer.new end { :LBRACK => '[', :RBRACK => ']', :LBRACE => '{', :RBRACE => '}', :LPAREN => '(', :RPAREN => ')', :EQUALS => '=', :ISEQUAL => '==', :GREATEREQUAL => '>=', :GREATERTHAN => '>', :LESSTHAN => '<', :LESSEQUAL => '<=', :NOTEQUAL => '!=', :NOT => '!', :COMMA => ',', :DOT => '.', :COLON => ':', :AT => '@', :LLCOLLECT => '<<|', :RRCOLLECT => '|>>', :LCOLLECT => '<|', :RCOLLECT => '|>', :SEMIC => ';', :QMARK => '?', :BACKSLASH => '\\', :FARROW => '=>', :PARROW => '+>', :APPENDS => '+=', :PLUS => '+', :MINUS => '-', :DIV => '/', :TIMES => '*', :LSHIFT => '<<', :RSHIFT => '>>', :MATCH => '=~', :NOMATCH => '!~', :IN_EDGE => '->', :OUT_EDGE => '<-', :IN_EDGE_SUB => '~>', :OUT_EDGE_SUB => '<~', }.each do |name, string| it "should have a token named #{name.to_s}" do Puppet::Parser::Lexer::TOKENS[name].should_not be_nil end it "should match '#{string}' for the token #{name.to_s}" do Puppet::Parser::Lexer::TOKENS[name].string.should == string end end { "case" => :CASE, "class" => :CLASS, "default" => :DEFAULT, "define" => :DEFINE, "import" => :IMPORT, "if" => :IF, "elsif" => :ELSIF, "else" => :ELSE, "inherits" => :INHERITS, "node" => :NODE, "and" => :AND, "or" => :OR, "undef" => :UNDEF, "false" => :FALSE, "true" => :TRUE, "in" => :IN, "unless" => :UNLESS, }.each do |string, name| it "should have a keyword named #{name.to_s}" do Puppet::Parser::Lexer::KEYWORDS[name].should_not be_nil end it "should have the keyword for #{name.to_s} set to #{string}" do Puppet::Parser::Lexer::KEYWORDS[name].string.should == string end end # These tokens' strings don't matter, just that the tokens exist. [:STRING, :DQPRE, :DQMID, :DQPOST, :BOOLEAN, :NAME, :NUMBER, :COMMENT, :MLCOMMENT, :RETURN, :SQUOTE, :DQUOTE, :VARIABLE].each do |name| it "should have a token named #{name.to_s}" do Puppet::Parser::Lexer::TOKENS[name].should_not be_nil end end end describe Puppet::Parser::Lexer::TOKENS[:CLASSREF] do before { @token = Puppet::Parser::Lexer::TOKENS[:CLASSREF] } it "should match against single upper-case alpha-numeric terms" do @token.regex.should =~ "One" end it "should match against upper-case alpha-numeric terms separated by double colons" do @token.regex.should =~ "One::Two" end it "should match against many upper-case alpha-numeric terms separated by double colons" do @token.regex.should =~ "One::Two::Three::Four::Five" end it "should match against upper-case alpha-numeric terms prefixed by double colons" do @token.regex.should =~ "::One" end end describe Puppet::Parser::Lexer::TOKENS[:NAME] do before { @token = Puppet::Parser::Lexer::TOKENS[:NAME] } it "should match against lower-case alpha-numeric terms" do @token.regex.should =~ "one-two" end it "should return itself and the value if the matched term is not a keyword" do Puppet::Parser::Lexer::KEYWORDS.expects(:lookup).returns(nil) @token.convert(stub("lexer"), "myval").should == [Puppet::Parser::Lexer::TOKENS[:NAME], "myval"] end it "should return the keyword token and the value if the matched term is a keyword" do keyword = stub 'keyword', :name => :testing Puppet::Parser::Lexer::KEYWORDS.expects(:lookup).returns(keyword) @token.convert(stub("lexer"), "myval").should == [keyword, "myval"] end it "should return the BOOLEAN token and 'true' if the matched term is the string 'true'" do keyword = stub 'keyword', :name => :TRUE Puppet::Parser::Lexer::KEYWORDS.expects(:lookup).returns(keyword) @token.convert(stub('lexer'), "true").should == [Puppet::Parser::Lexer::TOKENS[:BOOLEAN], true] end it "should return the BOOLEAN token and 'false' if the matched term is the string 'false'" do keyword = stub 'keyword', :name => :FALSE Puppet::Parser::Lexer::KEYWORDS.expects(:lookup).returns(keyword) @token.convert(stub('lexer'), "false").should == [Puppet::Parser::Lexer::TOKENS[:BOOLEAN], false] end it "should match against lower-case alpha-numeric terms separated by double colons" do @token.regex.should =~ "one::two" end it "should match against many lower-case alpha-numeric terms separated by double colons" do @token.regex.should =~ "one::two::three::four::five" end it "should match against lower-case alpha-numeric terms prefixed by double colons" do @token.regex.should =~ "::one" end it "should match against nested terms starting with numbers" do @token.regex.should =~ "::1one::2two::3three" end end describe Puppet::Parser::Lexer::TOKENS[:NUMBER] do before do @token = Puppet::Parser::Lexer::TOKENS[:NUMBER] @regex = @token.regex end it "should match against numeric terms" do @regex.should =~ "2982383139" end it "should match against float terms" do @regex.should =~ "29823.235" end it "should match against hexadecimal terms" do @regex.should =~ "0xBEEF0023" end it "should match against float with exponent terms" do @regex.should =~ "10e23" end it "should match against float terms with negative exponents" do @regex.should =~ "10e-23" end it "should match against float terms with fractional parts and exponent" do @regex.should =~ "1.234e23" end it "should return the NAME token and the value" do @token.convert(stub("lexer"), "myval").should == [Puppet::Parser::Lexer::TOKENS[:NAME], "myval"] end end describe Puppet::Parser::Lexer::TOKENS[:COMMENT] do before { @token = Puppet::Parser::Lexer::TOKENS[:COMMENT] } it "should match against lines starting with '#'" do @token.regex.should =~ "# this is a comment" end it "should be marked to get skipped" do @token.skip?.should be_true end it "should be marked to accumulate" do @token.accumulate?.should be_true end it "'s block should return the comment without the #" do @token.convert(@lexer,"# this is a comment")[1].should == "this is a comment" end end describe Puppet::Parser::Lexer::TOKENS[:MLCOMMENT] do before do @token = Puppet::Parser::Lexer::TOKENS[:MLCOMMENT] @lexer = stub 'lexer', :line => 0 end it "should match against lines enclosed with '/*' and '*/'" do @token.regex.should =~ "/* this is a comment */" end it "should match multiple lines enclosed with '/*' and '*/'" do @token.regex.should =~ """/* this is a comment */""" end it "should increase the lexer current line number by the amount of lines spanned by the comment" do @lexer.expects(:line=).with(2) @token.convert(@lexer, "1\n2\n3") end it "should not greedily match comments" do match = @token.regex.match("/* first */ word /* second */") match[1].should == " first " end it "should be marked to accumulate" do @token.accumulate?.should be_true end it "'s block should return the comment without the comment marks" do @lexer.stubs(:line=).with(0) @token.convert(@lexer,"/* this is a comment */")[1].should == "this is a comment" end end describe Puppet::Parser::Lexer::TOKENS[:RETURN] do before { @token = Puppet::Parser::Lexer::TOKENS[:RETURN] } it "should match against carriage returns" do @token.regex.should =~ "\n" end it "should be marked to initiate text skipping" do @token.skip_text.should be_true end it "should be marked to increment the line" do @token.incr_line.should be_true end end -shared_examples_for "variable names in the lexer" do |prefix| + +shared_examples_for "handling `-` in standard variable names" do |prefix| # Watch out - a regex might match a *prefix* on these, not just the whole # word, so make sure you don't have false positive or negative results based # on that. legal = %w{f foo f::b foo::b f::bar foo::bar 3 foo3 3foo} illegal = %w{f- f-o -f f::-o f::o- f::o-o} ["", "::"].each do |global_scope| legal.each do |name| var = prefix + global_scope + name it "should accept #{var.inspect} as a valid variable name" do (subject.regex.match(var) || [])[0].should == var end end illegal.each do |name| var = prefix + global_scope + name - it "should NOT accept #{var.inspect} as a valid variable name" do + it "when `variable_with_dash` is disabled it should NOT accept #{var.inspect} as a valid variable name" do + Puppet[:allow_variables_with_dashes] = false + (subject.regex.match(var) || [])[0].should_not == var + end + + it "when `variable_with_dash` is enabled it should NOT accept #{var.inspect} as a valid variable name" do + Puppet[:allow_variables_with_dashes] = true (subject.regex.match(var) || [])[0].should_not == var end end end end describe Puppet::Parser::Lexer::TOKENS[:DOLLAR_VAR] do its(:skip_text) { should be_false } its(:incr_line) { should be_false } - it_should_behave_like "variable names in the lexer", '$' + it_should_behave_like "handling `-` in standard variable names", '$' end describe Puppet::Parser::Lexer::TOKENS[:VARIABLE] do its(:skip_text) { should be_false } its(:incr_line) { should be_false } - it_should_behave_like "variable names in the lexer", '' + it_should_behave_like "handling `-` in standard variable names", '' end -def tokens_scanned_from(s) - lexer = Puppet::Parser::Lexer.new - lexer.string = s - lexer.fullscan[0..-2] +describe "the horrible deprecation / compatibility variables with dashes" do + NamesWithDashes = %w{f- f-o -f f::-o f::o- f::o-o} + + { Puppet::Parser::Lexer::TOKENS[:DOLLAR_VAR_WITH_DASH] => '$', + Puppet::Parser::Lexer::TOKENS[:VARIABLE_WITH_DASH] => '' + }.each do |token, prefix| + describe token do + its(:skip_text) { should be_false } + its(:incr_line) { should be_false } + + context "when compatibly is disabled" do + before :each do Puppet[:allow_variables_with_dashes] = false end + Puppet::Parser::Lexer::TOKENS.each do |name, value| + it "should be unacceptable after #{name}" do + token.acceptable?(:after => name).should be_false + end + end + + # Yes, this should still *match*, just not be acceptable. + NamesWithDashes.each do |name| + ["", "::"].each do |global_scope| + var = prefix + global_scope + name + it "should match #{var.inspect}" do + subject.regex.match(var).to_a.should == [var] + end + end + end + end + + context "when compatibility is enabled" do + before :each do Puppet[:allow_variables_with_dashes] = true end + it "should be acceptable after DQPRE" do + token.acceptable?(:after => :DQPRE).should be_true + end + + NamesWithDashes.each do |name| + ["", "::"].each do |global_scope| + var = prefix + global_scope + name + it "should match #{var.inspect}" do + subject.regex.match(var).to_a.should == [var] + end + end + end + end + end + end + + context "deprecation warnings" do + before :each do Puppet[:allow_variables_with_dashes] = true end + + it "should match a top level variable" do + Puppet.expects(:deprecation_warning).once + + tokens_scanned_from('$foo-bar').should == [ + [:VARIABLE, { :value => 'foo-bar', :line => 1 }] + ] + end + + it "does not warn about a variable without a dash" do + Puppet.expects(:deprecation_warning).never + + tokens_scanned_from('$c').should == [ + [:VARIABLE, { :value => "c", :line => 1 }] + ] + end + + it "does not warn about referencing a class name that contains a dash" do + Puppet.expects(:deprecation_warning).never + + tokens_scanned_from('foo-bar').should == [ + [:NAME, { :value => "foo-bar", :line => 1 }] + ] + end + + it "warns about reference to variable" do + Puppet.expects(:deprecation_warning).once + + tokens_scanned_from('$::foo-bar::baz-quux').should == [ + [:VARIABLE, { :value => "::foo-bar::baz-quux", :line => 1 }] + ] + end + + it "warns about reference to variable interpolated in a string" do + Puppet.expects(:deprecation_warning).once + + tokens_scanned_from('"$::foo-bar::baz-quux"').should == [ + [:DQPRE, { :value => "", :line => 1 }], + [:VARIABLE, { :value => "::foo-bar::baz-quux", :line => 1 }], + [:DQPOST, { :value => "", :line => 1 }], + ] + end + + it "warns about reference to variable interpolated in a string as an expression" do + Puppet.expects(:deprecation_warning).once + + tokens_scanned_from('"${::foo-bar::baz-quux}"').should == [ + [:DQPRE, { :value => "", :line => 1 }], + [:VARIABLE, { :value => "::foo-bar::baz-quux", :line => 1 }], + [:DQPOST, { :value => "", :line => 1 }], + ] + end + end end describe Puppet::Parser::Lexer,"when lexing strings" do { %q{'single quoted string')} => [[:STRING,'single quoted string']], %q{"double quoted string"} => [[:STRING,'double quoted string']], %q{'single quoted string with an escaped "\\'"'} => [[:STRING,'single quoted string with an escaped "\'"']], %q{'single quoted string with an escaped "\$"'} => [[:STRING,'single quoted string with an escaped "\$"']], %q{'single quoted string with an escaped "\."'} => [[:STRING,'single quoted string with an escaped "\."']], %q{'single quoted string with an escaped "\r\n"'} => [[:STRING,'single quoted string with an escaped "\r\n"']], %q{'single quoted string with an escaped "\n"'} => [[:STRING,'single quoted string with an escaped "\n"']], %q{'single quoted string with an escaped "\\\\"'} => [[:STRING,'single quoted string with an escaped "\\\\"']], %q{"string with an escaped '\\"'"} => [[:STRING,"string with an escaped '\"'"]], %q{"string with an escaped '\\$'"} => [[:STRING,"string with an escaped '$'"]], %Q{"string with a line ending with a backslash: \\\nfoo"} => [[:STRING,"string with a line ending with a backslash: foo"]], %q{"string with $v (but no braces)"} => [[:DQPRE,"string with "],[:VARIABLE,'v'],[:DQPOST,' (but no braces)']], %q["string with ${v} in braces"] => [[:DQPRE,"string with "],[:VARIABLE,'v'],[:DQPOST,' in braces']], %q["string with ${qualified::var} in braces"] => [[:DQPRE,"string with "],[:VARIABLE,'qualified::var'],[:DQPOST,' in braces']], %q{"string with $v and $v (but no braces)"} => [[:DQPRE,"string with "],[:VARIABLE,"v"],[:DQMID," and "],[:VARIABLE,"v"],[:DQPOST," (but no braces)"]], %q["string with ${v} and ${v} in braces"] => [[:DQPRE,"string with "],[:VARIABLE,"v"],[:DQMID," and "],[:VARIABLE,"v"],[:DQPOST," in braces"]], %q["string with ${'a nested single quoted string'} inside it."] => [[:DQPRE,"string with "],[:STRING,'a nested single quoted string'],[:DQPOST,' inside it.']], %q["string with ${['an array ',$v2]} in it."] => [[:DQPRE,"string with "],:LBRACK,[:STRING,"an array "],:COMMA,[:VARIABLE,"v2"],:RBRACK,[:DQPOST," in it."]], %q{a simple "scanner" test} => [[:NAME,"a"],[:NAME,"simple"], [:STRING,"scanner"],[:NAME,"test"]], %q{a simple 'single quote scanner' test} => [[:NAME,"a"],[:NAME,"simple"], [:STRING,"single quote scanner"],[:NAME,"test"]], %q{a harder 'a $b \c"'} => [[:NAME,"a"],[:NAME,"harder"], [:STRING,'a $b \c"']], %q{a harder "scanner test"} => [[:NAME,"a"],[:NAME,"harder"], [:STRING,"scanner test"]], %q{a hardest "scanner \"test\""} => [[:NAME,"a"],[:NAME,"hardest"],[:STRING,'scanner "test"']], %Q{a hardestest "scanner \\"test\\"\n"} => [[:NAME,"a"],[:NAME,"hardestest"],[:STRING,%Q{scanner "test"\n}]], %q{function("call")} => [[:NAME,"function"],[:LPAREN,"("],[:STRING,'call'],[:RPAREN,")"]], %q["string with ${(3+5)/4} nested math."] => [[:DQPRE,"string with "],:LPAREN,[:NAME,"3"],:PLUS,[:NAME,"5"],:RPAREN,:DIV,[:NAME,"4"],[:DQPOST," nested math."]], %q["$$$$"] => [[:STRING,"$$$$"]], %q["$variable"] => [[:DQPRE,""],[:VARIABLE,"variable"],[:DQPOST,""]], %q["$var$other"] => [[:DQPRE,""],[:VARIABLE,"var"],[:DQMID,""],[:VARIABLE,"other"],[:DQPOST,""]], %q["foo$bar$"] => [[:DQPRE,"foo"],[:VARIABLE,"bar"],[:DQPOST,"$"]], %q["foo$$bar"] => [[:DQPRE,"foo$"],[:VARIABLE,"bar"],[:DQPOST,""]], %q[""] => [[:STRING,""]], %q["123 456 789 0"] => [[:STRING,"123 456 789 0"]], %q["${123} 456 $0"] => [[:DQPRE,""],[:VARIABLE,"123"],[:DQMID," 456 "],[:VARIABLE,"0"],[:DQPOST,""]], %q["$foo::::bar"] => [[:DQPRE,""],[:VARIABLE,"foo"],[:DQPOST,"::::bar"]] }.each { |src,expected_result| it "should handle #{src} correctly" do tokens_scanned_from(src).should be_like(*expected_result) end } end describe Puppet::Parser::Lexer::TOKENS[:DOLLAR_VAR] do before { @token = Puppet::Parser::Lexer::TOKENS[:DOLLAR_VAR] } it "should match against alpha words prefixed with '$'" do @token.regex.should =~ '$this_var' end it "should return the VARIABLE token and the variable name stripped of the '$'" do @token.convert(stub("lexer"), "$myval").should == [Puppet::Parser::Lexer::TOKENS[:VARIABLE], "myval"] end end describe Puppet::Parser::Lexer::TOKENS[:REGEX] do before { @token = Puppet::Parser::Lexer::TOKENS[:REGEX] } it "should match against any expression enclosed in //" do @token.regex.should =~ '/this is a regex/' end it 'should not match if there is \n in the regex' do @token.regex.should_not =~ "/this is \n a regex/" end describe "when scanning" do it "should not consider escaped slashes to be the end of a regex" do tokens_scanned_from("$x =~ /this \\/ foo/").should be_like(__,__,[:REGEX,%r{this / foo}]) end it "should not lex chained division as a regex" do tokens_scanned_from("$x = $a/$b/$c").collect { |name, data| name }.should_not be_include( :REGEX ) end it "should accept a regular expression after NODE" do tokens_scanned_from("node /www.*\.mysite\.org/").should be_like(__,[:REGEX,Regexp.new("www.*\.mysite\.org")]) end it "should accept regular expressions in a CASE" do s = %q{case $variable { "something": {$othervar = 4096 / 2} /regex/: {notice("this notably sucks")} } } tokens_scanned_from(s).should be_like( :CASE,:VARIABLE,:LBRACE,:STRING,:COLON,:LBRACE,:VARIABLE,:EQUALS,:NAME,:DIV,:NAME,:RBRACE,[:REGEX,/regex/],:COLON,:LBRACE,:NAME,:LPAREN,:STRING,:RPAREN,:RBRACE,:RBRACE ) end end it "should return the REGEX token and a Regexp" do @token.convert(stub("lexer"), "/myregex/").should == [Puppet::Parser::Lexer::TOKENS[:REGEX], Regexp.new(/myregex/)] end end describe Puppet::Parser::Lexer, "when lexing comments" do before { @lexer = Puppet::Parser::Lexer.new } it "should accumulate token in munge_token" do token = stub 'token', :skip => true, :accumulate? => true, :incr_line => nil, :skip_text => false token.stubs(:convert).with(@lexer, "# this is a comment").returns([token, " this is a comment"]) @lexer.munge_token(token, "# this is a comment") @lexer.munge_token(token, "# this is a comment") @lexer.getcomment.should == " this is a comment\n this is a comment\n" end it "should add a new comment stack level on LBRACE" do @lexer.string = "{" @lexer.expects(:commentpush) @lexer.fullscan end it "should add a new comment stack level on LPAREN" do @lexer.string = "(" @lexer.expects(:commentpush) @lexer.fullscan end it "should pop the current comment on RPAREN" do @lexer.string = ")" @lexer.expects(:commentpop) @lexer.fullscan end it "should return the current comments on getcomment" do @lexer.string = "# comment" @lexer.fullscan @lexer.getcomment.should == "comment\n" end it "should discard the previous comments on blank line" do @lexer.string = "# 1\n\n# 2" @lexer.fullscan @lexer.getcomment.should == "2\n" end it "should skip whitespace before lexing the next token after a non-token" do tokens_scanned_from("/* 1\n\n */ \ntest").should be_like([:NAME, "test"]) end it "should not return comments seen after the current line" do @lexer.string = "# 1\n\n# 2" @lexer.fullscan @lexer.getcomment(1).should == "" end it "should return a comment seen before the current line" do @lexer.string = "# 1\n# 2" @lexer.fullscan @lexer.getcomment(2).should == "1\n2\n" end end # FIXME: We need to rewrite all of these tests, but I just don't want to take the time right now. describe "Puppet::Parser::Lexer in the old tests" do before { @lexer = Puppet::Parser::Lexer.new } it "should do simple lexing" do { %q{\\} => [[:BACKSLASH,"\\"]], %q{simplest scanner test} => [[:NAME,"simplest"],[:NAME,"scanner"],[:NAME,"test"]], %Q{returned scanner test\n} => [[:NAME,"returned"],[:NAME,"scanner"],[:NAME,"test"]] }.each { |source,expected| tokens_scanned_from(source).should be_like(*expected) } end it "should fail usefully" do expect { tokens_scanned_from('^') }.to raise_error(RuntimeError) end it "should fail if the string is not set" do expect { @lexer.fullscan }.to raise_error(Puppet::LexError) end it "should correctly identify keywords" do tokens_scanned_from("case").should be_like([:CASE, "case"]) end it "should correctly parse class references" do %w{Many Different Words A Word}.each { |t| tokens_scanned_from(t).should be_like([:CLASSREF,t])} end # #774 it "should correctly parse namespaced class refernces token" do %w{Foo ::Foo Foo::Bar ::Foo::Bar}.each { |t| tokens_scanned_from(t).should be_like([:CLASSREF, t]) } end it "should correctly parse names" do %w{this is a bunch of names}.each { |t| tokens_scanned_from(t).should be_like([:NAME,t]) } end it "should correctly parse names with numerals" do %w{1name name1 11names names11}.each { |t| tokens_scanned_from(t).should be_like([:NAME,t]) } end it "should correctly parse empty strings" do expect { tokens_scanned_from('$var = ""') }.to_not raise_error end it "should correctly parse virtual resources" do tokens_scanned_from("@type {").should be_like([:AT, "@"], [:NAME, "type"], [:LBRACE, "{"]) end it "should correctly deal with namespaces" do @lexer.string = %{class myclass} @lexer.fullscan @lexer.namespace.should == "myclass" @lexer.namepop @lexer.namespace.should == "" @lexer.string = "class base { class sub { class more" @lexer.fullscan @lexer.namespace.should == "base::sub::more" @lexer.namepop @lexer.namespace.should == "base::sub" end it "should not put class instantiation on the namespace" do @lexer.string = "class base { class sub { class { mode" @lexer.fullscan @lexer.namespace.should == "base::sub" end it "should correctly handle fully qualified names" do @lexer.string = "class base { class sub::more {" @lexer.fullscan @lexer.namespace.should == "base::sub::more" @lexer.namepop @lexer.namespace.should == "base" end it "should correctly lex variables" do ["$variable", "$::variable", "$qualified::variable", "$further::qualified::variable"].each do |string| tokens_scanned_from(string).should be_like([:VARIABLE,string.sub(/^\$/,'')]) end end it "should end variables at `-`" do tokens_scanned_from('$hyphenated-variable'). should be_like [:VARIABLE, "hyphenated"], [:MINUS, '-'], [:NAME, 'variable'] end it "should not include whitespace in a variable" do tokens_scanned_from("$foo bar").should_not be_like([:VARIABLE, "foo bar"]) end it "should not include excess colons in a variable" do tokens_scanned_from("$foo::::bar").should_not be_like([:VARIABLE, "foo::::bar"]) end end describe "Puppet::Parser::Lexer in the old tests when lexing example files" do my_fixtures('*.pp') do |file| it "should correctly lex #{file}" do lexer = Puppet::Parser::Lexer.new lexer.file = file expect { lexer.fullscan }.to_not raise_error end end end describe "when trying to lex an non-existent file" do include PuppetSpec::Files it "should return an empty list of tokens" do lexer = Puppet::Parser::Lexer.new lexer.file = nofile = tmpfile('lexer') File.exists?(nofile).should == false lexer.fullscan.should == [[false,false]] end end diff --git a/spec/unit/util/feature_spec.rb b/spec/unit/util/feature_spec.rb index f14393398..d5bcafe83 100755 --- a/spec/unit/util/feature_spec.rb +++ b/spec/unit/util/feature_spec.rb @@ -1,81 +1,92 @@ #! /usr/bin/env ruby require 'spec_helper' require 'puppet/util/feature' describe Puppet::Util::Feature do before do @features = Puppet::Util::Feature.new("features") @features.stubs(:warn) end it "should consider undefined features to be absent" do @features.should_not be_defined_feature end it "should be able to add new features" do @features.add(:myfeature) {} @features.should respond_to(:myfeature?) end it "should call associated code when loading a feature" do $loaded_feature = false @features.add(:myfeature) { $loaded_feature = true} $loaded_feature.should be_true end it "should consider a feature absent when the feature load fails" do @features.add(:failer) { raise "foo" } @features.should_not be_failer end it "should consider a feature to be absent when the feature load returns false" do @features.add(:failer) { false } @features.should_not be_failer end it "should consider a feature to be present when the feature load returns true" do @features.add(:available) { true } @features.should be_available end - it "should cache the results of a feature load" do + it "should cache the results of a feature load via code block" do $loaded_feature = 0 @features.add(:myfeature) { $loaded_feature += 1 } @features.myfeature? @features.myfeature? $loaded_feature.should == 1 end it "should invalidate the cache for the feature when loading" do # block defined features are evaluated at load time @features.add(:myfeature) { false } @features.should_not be_myfeature # features with no block have deferred evaluation so an existing cached # value would take precedence @features.add(:myfeature) @features.should be_myfeature end it "should support features with libraries" do lambda { @features.add(:puppet, :libs => %w{puppet}) }.should_not raise_error end it "should consider a feature to be present if all of its libraries are present" do @features.add(:myfeature, :libs => %w{foo bar}) @features.expects(:require).with("foo") @features.expects(:require).with("bar") @features.should be_myfeature end it "should log and consider a feature to be absent if any of its libraries are absent" do @features.add(:myfeature, :libs => %w{foo bar}) @features.expects(:require).with("foo").raises(LoadError) @features.stubs(:require).with("bar") Puppet.expects(:debug) @features.should_not be_myfeature end + + it "should change the feature to be present when its libraries become available" do + @features.add(:myfeature, :libs => %w{foo bar}) + @features.expects(:require).twice().with("foo").raises(LoadError).then.returns(nil) + @features.stubs(:require).with("bar") + + Puppet.expects(:debug) + + @features.should_not be_myfeature + @features.should be_myfeature + end end