diff --git a/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb b/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb index dafef1cff..543c1d815 100644 --- a/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb +++ b/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb @@ -1,82 +1,82 @@ # User story: # A new user has installed puppet either from source or from a gem, which does # not put the "puppet" user or group on the system. They run the puppet master, # which fails because of the missing user and then correct their actions. They # expect that after correcting their actions, puppet will work correctly. test_name "Puppet manages its own configuration in a robust manner" -skip_test "JVM Puppet cannot change its user while running." if @options[:is_jvm_puppet] +skip_test "JVM Puppet cannot change its user while running." if @options[:is_puppetserver] # when owner/group works on windows for settings, this confine should be removed. confine :except, :platform => 'windows' # when managhome roundtrips for solaris, this confine should be removed confine :except, :platform => 'solaris' # pe setup includes ownership of external directories such as the passenger # document root, which puppet itself knows nothing about confine :except, :type => 'pe' # same issue for a foss passenger run if master.is_using_passenger? skip_test 'Cannot test with passenger.' end if master.use_service_scripts? # Beaker defaults to leaving puppet running when using service scripts, # Need to shut it down so we can modify user/group and test startup failure on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) end step "Clear out yaml directory because of a bug in the indirector/yaml. (See #21145)" on master, 'rm -rf $(puppet master --configprint yamldir)' original_state = {} step "Record original state of system users" do hosts.each do |host| original_state[host] = {} original_state[host][:user] = user = host.execute('puppet config print user') original_state[host][:group] = group = host.execute('puppet config print group') original_state[host][:ug_resources] = on(host, puppet('resource', 'user', user)).stdout original_state[host][:ug_resources] += on(host, puppet('resource', 'group', group)).stdout original_state[host][:ug_resources] += "Group['#{group}'] -> User['#{user}']\n" end end teardown do # And cleaning up yaml dir again here because we are changing service # user and group ids back to the original uid and gid on master, 'rm -rf $(puppet master --configprint yamldir)' hosts.each do |host| apply_manifest_on(host, <<-ORIG) #{original_state[host][:ug_resources]} ORIG end with_puppet_running_on(master, {}) do agents.each do |agent| on agent, puppet('agent', '-t', '--server', master) end end end step "Remove system users" do hosts.each do |host| on host, puppet('resource', 'user', original_state[host][:user], 'ensure=absent') on host, puppet('resource', 'group', original_state[host][:group], 'ensure=absent') end end step "Ensure master fails to start when missing system user" do on master, puppet('master'), :acceptable_exit_codes => [74] do assert_match(/could not change to group "#{original_state[master][:group]}"/, result.output) assert_match(/Could not change to user #{original_state[master][:user]}/, result.output) end end step "Ensure master starts when making users after having previously failed startup" do with_puppet_running_on(master, :master => { :mkusers => true }) do agents.each do |agent| on agent, puppet('agent', '-t', '--server', master) end end end diff --git a/acceptance/tests/environment/cmdline_overrides_environment.rb b/acceptance/tests/environment/cmdline_overrides_environment.rb index 78a07413b..bbaad76eb 100644 --- a/acceptance/tests/environment/cmdline_overrides_environment.rb +++ b/acceptance/tests/environment/cmdline_overrides_environment.rb @@ -1,322 +1,322 @@ test_name "Commandline modulepath and manifest settings override environment" -skip_test "CLI-master tests are not applicable" if @options[:is_jvm_puppet] +skip_test "CLI-master tests are not applicable" if @options[:is_puppetserver] testdir = create_tmpdir_for_user master, 'cmdline_and_environment' environmentpath = "#{testdir}/environments" modulepath = "#{testdir}/modules" manifests = "#{testdir}/manifests" sitepp = "#{manifests}/site.pp" other_manifestdir = "#{testdir}/other_manifests" other_sitepp = "#{other_manifestdir}/site.pp" other_modulepath = "#{testdir}/some_other_modulepath" cmdline_manifest = "#{testdir}/cmdline.pp" step "Prepare manifests and modules" apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, owner => #{master['user']}, group => #{master['group']}, mode => 0750, } ############################################## # A production directory environment file { "#{testdir}":; "#{environmentpath}":; "#{environmentpath}/production":; "#{environmentpath}/production/manifests":; "#{environmentpath}/production/modules":; "#{environmentpath}/production/modules/amod":; "#{environmentpath}/production/modules/amod/manifests":; } file { "#{environmentpath}/production/modules/amod/manifests/init.pp": ensure => file, mode => 0640, content => 'class amod { notify { "amod from production environment": } }' } file { "#{environmentpath}/production/manifests/production.pp": ensure => file, mode => 0640, content => ' notify { "in production.pp": } include amod ' } ############################################################## # To be set as default manifests and modulepath in puppet.conf file { "#{modulepath}":; "#{modulepath}/amod/":; "#{modulepath}/amod/manifests":; } file { "#{modulepath}/amod/manifests/init.pp": ensure => file, mode => 0640, content => 'class amod { notify { "amod from modulepath": } }' } file { "#{manifests}": } file { "#{sitepp}": ensure => file, mode => 0640, content => ' notify { "in site.pp": } include amod ' } file { "#{other_manifestdir}": } file { "#{other_sitepp}": ensure => file, mode => 0640, content => ' notify { "in other manifestdir site.pp": } include amod ' } ################################ # To be specified on commandline file { "#{other_modulepath}":; "#{other_modulepath}/amod/":; "#{other_modulepath}/amod/manifests":; } file { "#{other_modulepath}/amod/manifests/init.pp": ensure => file, mode => 0640, content => 'class amod { notify { "amod from commandline modulepath": } }' } file { "#{cmdline_manifest}": ensure => file, mode => 0640, content => ' notify { "in cmdline.pp": } include amod ' } MANIFEST def shutdown_puppet_if_running_as_a_service if master.use_service_scripts? # Beaker defaults to leaving puppet running when using service scripts, # Need to shut it down so we can start up with commandline options on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) end end teardown do if master.use_service_scripts? # Beaker defaults to leaving puppet running when using service scripts, on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=running')) end end # Note: this is the semantics seen with legacy environments if commandline # manifest/modulepath are set. step "CASE 1: puppet master with --manifest and --modulepath overrides set production directory environment" do if master.is_using_passenger? step "Skipping for Passenger (PE) setup; since the equivalent of a commandline override would be adding the setting to config.ru, which seems like a very odd thing to do." else shutdown_puppet_if_running_as_a_service master_opts = { 'master' => { 'environmentpath' => environmentpath, 'manifest' => sitepp, 'modulepath' => modulepath, }, :__service_args__ => { :bypass_service_script => true, }, } master_opts_with_cmdline = master_opts.merge(:__commandline_args__ => "--manifest=#{cmdline_manifest} --modulepath=#{other_modulepath}") with_puppet_running_on master, master_opts_with_cmdline, testdir do agents.each do |agent| on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2] ) do assert_match(/in cmdline\.pp/, stdout) assert_match(/amod from commandline modulepath/, stdout) assert_no_match(/production/, stdout) end step "CASE 1a: even if environment is specified" on(agent, puppet("agent -t --server #{master} --environment production"), :acceptable_exit_codes => [2]) do assert_match(/in cmdline\.pp/, stdout) assert_match(/amod from commandline modulepath/, stdout) assert_no_match(/production/, stdout) end end end step "CASE 2: or if you set --manifestdir" do master_opts_with_cmdline = master_opts.merge(:__commandline_args__ => "--manifestdir=#{other_manifestdir} --modulepath=#{other_modulepath}") step "CASE 2: it is ignored if manifest is set in puppet.conf to something not using $manifestdir" with_puppet_running_on master, master_opts_with_cmdline, testdir do agents.each do |agent| on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2]) do assert_match(/in production\.pp/, stdout) assert_match(/amod from commandline modulepath/, stdout) end end end step "CASE 2a: but does pull in the default manifest via manifestdir if manifest is not set" master_opts_with_cmdline = master_opts.merge(:__commandline_args__ => "--manifestdir=#{other_manifestdir} --modulepath=#{other_modulepath}") master_opts_with_cmdline['master'].delete('manifest') with_puppet_running_on master, master_opts_with_cmdline, testdir do agents.each do |agent| on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2]) do assert_match(/in other manifestdir site\.pp/, stdout) assert_match(/amod from commandline modulepath/, stdout) assert_no_match(/production/, stdout) end end end end end end step "CASE 3: puppet master with manifest and modulepath set in puppet.conf is overriden by an existing and set production directory environment" do master_opts = { 'master' => { 'environmentpath' => environmentpath, 'manifest' => sitepp, 'modulepath' => modulepath, } } if master.is_pe? master_opts['master']['basemodulepath'] = master['sitemoduledir'] end with_puppet_running_on master, master_opts, testdir do agents.each do |agent| step "CASE 3: this case is unfortunate, but will be irrelevant when we remove legacyenv in 4.0" on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2] ) do assert_match(/in production\.pp/, stdout) assert_match(/amod from production environment/, stdout) end step "CASE 3a: if environment is specified" on(agent, puppet("agent -t --server #{master} --environment production"), :acceptable_exit_codes => [2]) do assert_match(/in production\.pp/, stdout) assert_match(/amod from production environment/, stdout) end end end end step "CASE 4: puppet master with default manifest, modulepath, environment, environmentpath and an existing '#{environmentpath}/production' directory environment that has not been set" do if master.is_using_passenger? step "Skipping for PE because PE requires most of the existing puppet.conf and /etc/puppetlabs/puppet configuration, and we cannot simply point to a new conf directory." else shutdown_puppet_if_running_as_a_service ssldir = on(master, puppet("master --configprint ssldir")).stdout.chomp master_opts = { :__service_args__ => { :bypass_service_script => true, }, :__commandline_args__ => "--confdir=#{testdir} --ssldir=#{ssldir}" } with_puppet_running_on master, master_opts, testdir do agents.each do |agent| step "CASE 4: #{environmentpath}/production directory environment does not take precedence because default environmentpath is ''" on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2] ) do assert_match(/in site\.pp/, stdout) assert_match(/amod from modulepath/, stdout) end on(agent, puppet("agent -t --server #{master} --environment production"), :acceptable_exit_codes => [2]) do assert_match(/in site\.pp/, stdout) assert_match(/amod from modulepath/, stdout) end end end end end step "CASE 5: puppet master with explicit dynamic environment settings and empty environmentpath" do step "CASE 5: Prepare an additional modulepath module" apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, owner => #{master['user']}, group => #{master['group']}, mode => 0750, } # A second module in another modules dir file { "#{other_modulepath}":; "#{other_modulepath}/bmod/":; "#{other_modulepath}/bmod/manifests":; } file { "#{other_modulepath}/bmod/manifests/init.pp": ensure => file, mode => 0640, content => 'class bmod { notify { "bmod from other modulepath": } }' } file { "#{environmentpath}/production/manifests/production.pp": ensure => file, mode => 0640, content => ' notify { "in production.pp": } include amod include bmod ' } MANIFEST master_opts = { 'master' => { 'manifest' => "#{environmentpath}/$environment/manifests", 'modulepath' => "#{environmentpath}/$environment/modules:#{other_modulepath}", } } if master.is_pe? master_opts['master']['modulepath'] << ":#{master['sitemoduledir']}" end with_puppet_running_on master, master_opts, testdir do agents.each do |agent| step "CASE 5: pulls in the production environment based on $environment default" on(agent, puppet("agent -t --server #{master}"), :acceptable_exit_codes => [2] ) do assert_match(/in production\.pp/, stdout) assert_match(/amod from production environment/, stdout) step "CASE 5: and sees modules located in later elements of the modulepath (which would not be seen by a directory env (PUP-2158)" assert_match(/bmod from other modulepath/, stdout) end step "CASE 5a: pulls in the production environment when explicitly set" on(agent, puppet("agent -t --server #{master} --environment production"), :acceptable_exit_codes => [2] ) do assert_match(/in production\.pp/, stdout) assert_match(/amod from production environment/, stdout) step "CASE 5a: and sees modules located in later elements of the modulepath (which would not be seen by a directory env (PUP-2158)" assert_match(/bmod from other modulepath/, stdout) end end end end diff --git a/acceptance/tests/external_ca_support/apache_external_root_ca.rb b/acceptance/tests/external_ca_support/apache_external_root_ca.rb index 39bd194c1..b78d748d9 100644 --- a/acceptance/tests/external_ca_support/apache_external_root_ca.rb +++ b/acceptance/tests/external_ca_support/apache_external_root_ca.rb @@ -1,203 +1,203 @@ begin require 'puppet_x/acceptance/external_cert_fixtures' rescue LoadError $LOAD_PATH.unshift(File.expand_path('../../../lib', __FILE__)) require 'puppet_x/acceptance/external_cert_fixtures' end # This test only runs on EL-6 master roles. confine :to, :platform => 'el-6' confine :except, :type => 'pe' -skip_test "Test not supported on jvm" if @options[:is_jvm_puppet] +skip_test "Test not supported on jvm" if @options[:is_puppetserver] if master.use_service_scripts? # Beaker defaults to leaving puppet running when using service scripts, # Need to shut it down so we can start up our apache instance on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) teardown do # And ensure that it is up again after everything is done on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=running')) end end # Verify that a trivial manifest can be run to completion. # Supported Setup: Single, Root CA # - Agent and Master SSL cert issued by the Root CA # - Revocation disabled on the agent `certificate_revocation = false` # - CA disabled on the master `ca = false` # # SUPPORT NOTES # # * If the x509 alt names extension is used when issuing SSL server certificates # for the Puppet master, then the client SSL certificate issued by an external # CA must posses the DNS common name in the alternate name field. This is # due to a bug in Ruby. If the CN is not duplicated in the Alt Names, then # the following error will appear on the agent with MRI 1.8.7: # # Warning: Server hostname 'master1.example.org' did not match server # certificate; expected one of master1.example.org, DNS:puppet, # DNS:master-ca.example.org # # See: https://bugs.ruby-lang.org/issues/6493 test_name "Puppet agent works with Apache, both configured with externally issued certificates from independent intermediate CA's" step "Copy certificates and configuration files to the master..." fixture_dir = File.expand_path('../fixtures', __FILE__) testdir = master.tmpdir('apache_external_root_ca') fixtures = PuppetX::Acceptance::ExternalCertFixtures.new(fixture_dir, testdir) # We need this variable in scope. disable_and_reenable_selinux = nil # Register our cleanup steps early in a teardown so that they will happen even # if execution aborts part way. teardown do step "Cleanup Apache (httpd) and /etc/hosts" # Restore /etc/hosts on master, "cp -p '#{testdir}/hosts' /etc/hosts" # stop the service before moving files around on master, "/etc/init.d/httpd stop" on master, "mv --force /etc/httpd/conf/httpd.conf{,.external_ca_test}" on master, "mv --force /etc/httpd/conf/httpd.conf{.orig,}" if disable_and_reenable_selinux step "Restore the original state of SELinux" on master, "setenforce 1" end end # Read all of the CA certificates. # Copy all of the x.509 fixture data over to the master. create_remote_file master, "#{testdir}/ca_root.crt", fixtures.root_ca_cert create_remote_file master, "#{testdir}/ca_agent.crt", fixtures.agent_ca_cert create_remote_file master, "#{testdir}/ca_master.crt", fixtures.master_ca_cert create_remote_file master, "#{testdir}/ca_master.crl", fixtures.master_ca_crl create_remote_file master, "#{testdir}/ca_master_bundle.crt", "#{fixtures.master_ca_cert}\n#{fixtures.root_ca_cert}\n" create_remote_file master, "#{testdir}/ca_agent_bundle.crt", "#{fixtures.agent_ca_cert}\n#{fixtures.root_ca_cert}\n" create_remote_file master, "#{testdir}/agent.crt", fixtures.agent_cert create_remote_file master, "#{testdir}/agent.key", fixtures.agent_key create_remote_file master, "#{testdir}/agent_email.crt", fixtures.agent_email_cert create_remote_file master, "#{testdir}/agent_email.key", fixtures.agent_email_key create_remote_file master, "#{testdir}/master.crt", fixtures.master_cert create_remote_file master, "#{testdir}/master.key", fixtures.master_key create_remote_file master, "#{testdir}/master_rogue.crt", fixtures.master_cert_rogue create_remote_file master, "#{testdir}/master_rogue.key", fixtures.master_key_rogue ## # Now create the master and agent puppet.conf # # We need to create the public directory for Passenger and the modules # directory to avoid `Error: Could not evaluate: Could not retrieve information # from environment production source(s) puppet://master1.example.org/plugins` on master, "mkdir -p #{testdir}/etc/{master/{public,modules/empty/lib},agent}" # Backup /etc/hosts on master, "cp -p /etc/hosts '#{testdir}/hosts'" # Make master1.example.org resolve if it doesn't already. on master, "grep -q -x '#{fixtures.host_entry}' /etc/hosts || echo '#{fixtures.host_entry}' >> /etc/hosts" create_remote_file master, "#{testdir}/etc/agent/puppet.conf", fixtures.agent_conf create_remote_file master, "#{testdir}/etc/agent/puppet.conf.crl", fixtures.agent_conf_crl create_remote_file master, "#{testdir}/etc/agent/puppet.conf.email", fixtures.agent_conf_email create_remote_file master, "#{testdir}/etc/master/puppet.conf", fixtures.master_conf # auth.conf to allow *.example.com access to the rest API create_remote_file master, "#{testdir}/etc/master/auth.conf", fixtures.auth_conf create_remote_file master, "#{testdir}/etc/master/config.ru", fixtures.config_ru step "Set filesystem permissions and ownership for the master" # These permissions are required for Passenger to start Puppet as puppet on master, "chown -R puppet:puppet #{testdir}/etc/master" # These permissions are just for testing, end users should protect their # private keys. on master, "chmod -R a+rX #{testdir}" agent_cmd_prefix = "--confdir #{testdir}/etc/agent --vardir #{testdir}/etc/agent/var" step "Configure EPEL" epel_release_path = "http://mirror.us.leaseweb.net/epel/6/i386/epel-release-6-8.noarch.rpm" on master, "rpm -q epel-release || (yum -y install #{epel_release_path} && yum -y upgrade epel-release)" step "Configure Apache and Passenger" packages = [ 'httpd', 'mod_ssl', 'mod_passenger', 'rubygem-passenger', 'policycoreutils-python' ] packages.each do |pkg| on master, "rpm -q #{pkg} || (yum -y install #{pkg})" end create_remote_file master, "#{testdir}/etc/httpd.conf", fixtures.httpd_conf on master, 'test -f /etc/httpd/conf/httpd.conf.orig || cp -p /etc/httpd/conf/httpd.conf{,.orig}' on master, "cat #{testdir}/etc/httpd.conf > /etc/httpd/conf/httpd.conf" step "Make SELinux and Apache play nicely together..." on master, "sestatus" do if stdout.match(/Current mode:.*enforcing/) disable_and_reenable_selinux = true else disable_and_reenable_selinux = false end end if disable_and_reenable_selinux on master, "setenforce 0" end step "Start the Apache httpd service..." on master, 'service httpd restart' # Move the agent SSL cert and key into place. # The filename must match the configured certname, otherwise Puppet will try # and generate a new certificate and key step "Configure the agent with the externally issued certificates" on master, "mkdir -p #{testdir}/etc/agent/ssl/{public_keys,certs,certificate_requests,private_keys,private}" create_remote_file master, "#{testdir}/etc/agent/ssl/certs/#{fixtures.agent_name}.pem", fixtures.agent_cert create_remote_file master, "#{testdir}/etc/agent/ssl/private_keys/#{fixtures.agent_name}.pem", fixtures.agent_key # Now, try and run the agent on the master against itself. step "Successfully run the puppet agent on the master" on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do assert_no_match /Creating a new SSL key/, stdout assert_no_match /\Wfailed\W/i, stderr assert_no_match /\Wfailed\W/i, stdout assert_no_match /\Werror\W/i, stderr assert_no_match /\Werror\W/i, stdout # Assert the exit code so we get a "Failed test" instead of an "Errored test" assert exit_code == 0 end step "Agent refuses to connect to a rogue master" on master, puppet_agent("#{agent_cmd_prefix} --ssl_client_ca_auth=#{testdir}/ca_master.crt --masterport=8141 --test"), :acceptable_exit_codes => (0..255) do assert_no_match /Creating a new SSL key/, stdout assert_match /certificate verify failed/i, stderr assert_match /The server presented a SSL certificate chain which does not include a CA listed in the ssl_client_ca_auth file/i, stderr assert exit_code == 1 end step "Master accepts client cert with email address in subject" on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_email}" on master, "cp #{testdir}/etc/agent/puppet.conf{.email,}" on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do assert_no_match /\Wfailed\W/i, stdout assert_no_match /\Wfailed\W/i, stderr assert_no_match /\Werror\W/i, stdout assert_no_match /\Werror\W/i, stderr # Assert the exit code so we get a "Failed test" instead of an "Errored test" assert exit_code == 0 end step "Agent refuses to connect to revoked master" on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_crl}" on master, "cp #{testdir}/etc/agent/puppet.conf{.crl,}" revoke_opts = "--hostcrl #{testdir}/ca_master.crl" on master, puppet_agent("#{agent_cmd_prefix} #{revoke_opts} --test"), :acceptable_exit_codes => (0..255) do assert_match /certificate revoked.*?example.org/, stderr assert exit_code == 1 end step "Finished testing External Certificates" diff --git a/acceptance/tests/store_configs/enc_provides_node_when_storeconfigs_enabled.rb b/acceptance/tests/store_configs/enc_provides_node_when_storeconfigs_enabled.rb index c50cda655..135ebf0c4 100644 --- a/acceptance/tests/store_configs/enc_provides_node_when_storeconfigs_enabled.rb +++ b/acceptance/tests/store_configs/enc_provides_node_when_storeconfigs_enabled.rb @@ -1,121 +1,123 @@ test_name "ENC node information is used when store configs enabled (#16698)" confine :to, :platform => ['debian', 'ubuntu'] confine :except, :platform => 'lucid' +skip_test "Test not supported on jvm" if @options[:is_puppetserver] + testdir = master.tmpdir('use_enc') create_remote_file master, "#{testdir}/enc.rb", < [], 'parameters' => { 'data' => 'data from enc' }, }.to_yaml) END on master, "chmod 755 #{testdir}/enc.rb" create_remote_file(master, "#{testdir}/site.pp", 'notify { $data: }') on master, "chown -R #{master['user']}:#{master['group']} #{testdir}" on master, "chmod -R g+rwX #{testdir}" create_remote_file master, "#{testdir}/setup.pp", < $lsbmajdistrelease ? { 5 => '2.2.3', default => '3.2.16', }, default => '3.2.16', } # Trusty doesn't have a rubygems package anymore # Not sure which other Debian's might follow suit so # restricting this narrowly for now # if $lsbdistid == "Ubuntu" and $lsbdistrelease == "14.04" { package { activerecord: ensure => $active_record_version, provider => 'gem', } } else { package { rubygems: ensure => present; activerecord: ensure => $active_record_version, provider => 'gem', require => Package[rubygems]; } } if $osfamily == "Debian" { package { # This is the deb sqlite3 package sqlite3: ensure => present; libsqlite3-dev: ensure => present, require => Package[sqlite3]; } } elsif $osfamily == "RedHat" { $sqlite_gem_pkg_name = $operatingsystem ? { "Fedora" => "rubygem-sqlite3", default => "rubygem-sqlite3-ruby" } package { sqlite: ensure => present; $sqlite_gem_pkg_name: ensure => present, require => Package[sqlite] } } else { fail "Unknown OS $osfamily" } END # This is a brute force hack around PUP-1073 because the deb for the core # sqlite3 package and the rubygem for the sqlite3 driver are both named # 'sqlite3'. So we just run a second puppet apply. create_remote_file master, "#{testdir}/setup_sqlite_gem.pp", < 'sqlite3', ensure => present, provider => 'gem', } } END on master, puppet_apply("#{testdir}/setup.pp") on master, puppet_apply("#{testdir}/setup_sqlite_gem.pp") master_opts = { 'master' => { 'node_terminus' => 'exec', 'external_nodes' => "#{testdir}/enc.rb", 'storeconfigs' => true, 'dbadapter' => 'sqlite3', 'dblocation' => "#{testdir}/store_configs.sqlite3", 'manifest' => "#{testdir}/site.pp" } } with_puppet_running_on master, master_opts, testdir do agents.each do |agent| run_agent_on(agent, "--no-daemonize --onetime --server #{master} --verbose") assert_match(/data from enc/, stdout) end end